135
|
1 # cmdutil.py - help for command processing in mercurial
|
|
2 #
|
|
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
|
|
4 #
|
|
5 # This software may be used and distributed according to the terms of the
|
|
6 # GNU General Public License version 2, incorporated herein by reference.
|
|
7
|
|
8 from node import hex, nullid, nullrev, short
|
|
9 from i18n import _
|
|
10 import os, sys, errno, re, glob
|
|
11 import mdiff, bdiff, util, templater, patch, error, encoding
|
|
12 import match as _match
|
|
13
|
|
14 revrangesep = ':'
|
|
15
|
|
16 def findpossible(cmd, table, strict=False):
|
|
17 """
|
|
18 Return cmd -> (aliases, command table entry)
|
|
19 for each matching command.
|
|
20 Return debug commands (or their aliases) only if no normal command matches.
|
|
21 """
|
|
22 choice = {}
|
|
23 debugchoice = {}
|
|
24 for e in table.keys():
|
|
25 aliases = e.lstrip("^").split("|")
|
|
26 found = None
|
|
27 if cmd in aliases:
|
|
28 found = cmd
|
|
29 elif not strict:
|
|
30 for a in aliases:
|
|
31 if a.startswith(cmd):
|
|
32 found = a
|
|
33 break
|
|
34 if found is not None:
|
|
35 if aliases[0].startswith("debug") or found.startswith("debug"):
|
|
36 debugchoice[found] = (aliases, table[e])
|
|
37 else:
|
|
38 choice[found] = (aliases, table[e])
|
|
39
|
|
40 if not choice and debugchoice:
|
|
41 choice = debugchoice
|
|
42
|
|
43 return choice
|
|
44
|
|
45 def findcmd(cmd, table, strict=True):
|
|
46 """Return (aliases, command table entry) for command string."""
|
|
47 choice = findpossible(cmd, table, strict)
|
|
48
|
|
49 if cmd in choice:
|
|
50 return choice[cmd]
|
|
51
|
|
52 if len(choice) > 1:
|
|
53 clist = choice.keys()
|
|
54 clist.sort()
|
|
55 raise error.AmbiguousCommand(cmd, clist)
|
|
56
|
|
57 if choice:
|
|
58 return choice.values()[0]
|
|
59
|
|
60 raise error.UnknownCommand(cmd)
|
|
61
|
|
62 def bail_if_changed(repo):
|
|
63 if repo.dirstate.parents()[1] != nullid:
|
|
64 raise util.Abort(_('outstanding uncommitted merge'))
|
|
65 modified, added, removed, deleted = repo.status()[:4]
|
|
66 if modified or added or removed or deleted:
|
|
67 raise util.Abort(_("outstanding uncommitted changes"))
|
|
68
|
|
69 def logmessage(opts):
|
|
70 """ get the log message according to -m and -l option """
|
|
71 message = opts.get('message')
|
|
72 logfile = opts.get('logfile')
|
|
73
|
|
74 if message and logfile:
|
|
75 raise util.Abort(_('options --message and --logfile are mutually '
|
|
76 'exclusive'))
|
|
77 if not message and logfile:
|
|
78 try:
|
|
79 if logfile == '-':
|
|
80 message = sys.stdin.read()
|
|
81 else:
|
|
82 message = open(logfile).read()
|
|
83 except IOError, inst:
|
|
84 raise util.Abort(_("can't read commit message '%s': %s") %
|
|
85 (logfile, inst.strerror))
|
|
86 return message
|
|
87
|
|
88 def loglimit(opts):
|
|
89 """get the log limit according to option -l/--limit"""
|
|
90 limit = opts.get('limit')
|
|
91 if limit:
|
|
92 try:
|
|
93 limit = int(limit)
|
|
94 except ValueError:
|
|
95 raise util.Abort(_('limit must be a positive integer'))
|
|
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
|
|
97 else:
|
|
98 limit = sys.maxint
|
|
99 return limit
|
|
100
|
|
101 def remoteui(src, opts):
|
|
102 'build a remote ui from ui or repo and opts'
|
|
103 if hasattr(src, 'baseui'): # looks like a repository
|
|
104 dst = src.baseui.copy() # drop repo-specific config
|
|
105 src = src.ui # copy target options from repo
|
|
106 else: # assume it's a global ui object
|
|
107 dst = src.copy() # keep all global options
|
|
108
|
|
109 # copy ssh-specific options
|
|
110 for o in 'ssh', 'remotecmd':
|
|
111 v = opts.get(o) or src.config('ui', o)
|
|
112 if v:
|
|
113 dst.setconfig("ui", o, v)
|
|
114 # copy bundle-specific options
|
|
115 r = src.config('bundle', 'mainreporoot')
|
|
116 if r:
|
|
117 dst.setconfig('bundle', 'mainreporoot', r)
|
|
118
|
|
119 return dst
|
|
120
|
|
121 def revpair(repo, revs):
|
|
122 '''return pair of nodes, given list of revisions. second item can
|
|
123 be None, meaning use working dir.'''
|
|
124
|
|
125 def revfix(repo, val, defval):
|
|
126 if not val and val != 0 and defval is not None:
|
|
127 val = defval
|
|
128 return repo.lookup(val)
|
|
129
|
|
130 if not revs:
|
|
131 return repo.dirstate.parents()[0], None
|
|
132 end = None
|
|
133 if len(revs) == 1:
|
|
134 if revrangesep in revs[0]:
|
|
135 start, end = revs[0].split(revrangesep, 1)
|
|
136 start = revfix(repo, start, 0)
|
|
137 end = revfix(repo, end, len(repo) - 1)
|
|
138 else:
|
|
139 start = revfix(repo, revs[0], None)
|
|
140 elif len(revs) == 2:
|
|
141 if revrangesep in revs[0] or revrangesep in revs[1]:
|
|
142 raise util.Abort(_('too many revisions specified'))
|
|
143 start = revfix(repo, revs[0], None)
|
|
144 end = revfix(repo, revs[1], None)
|
|
145 else:
|
|
146 raise util.Abort(_('too many revisions specified'))
|
|
147 return start, end
|
|
148
|
|
149 def revrange(repo, revs):
|
|
150 """Yield revision as strings from a list of revision specifications."""
|
|
151
|
|
152 def revfix(repo, val, defval):
|
|
153 if not val and val != 0 and defval is not None:
|
|
154 return defval
|
|
155 return repo.changelog.rev(repo.lookup(val))
|
|
156
|
|
157 seen, l = set(), []
|
|
158 for spec in revs:
|
|
159 if revrangesep in spec:
|
|
160 start, end = spec.split(revrangesep, 1)
|
|
161 start = revfix(repo, start, 0)
|
|
162 end = revfix(repo, end, len(repo) - 1)
|
|
163 step = start > end and -1 or 1
|
|
164 for rev in xrange(start, end+step, step):
|
|
165 if rev in seen:
|
|
166 continue
|
|
167 seen.add(rev)
|
|
168 l.append(rev)
|
|
169 else:
|
|
170 rev = revfix(repo, spec, None)
|
|
171 if rev in seen:
|
|
172 continue
|
|
173 seen.add(rev)
|
|
174 l.append(rev)
|
|
175
|
|
176 return l
|
|
177
|
|
178 def make_filename(repo, pat, node,
|
|
179 total=None, seqno=None, revwidth=None, pathname=None):
|
|
180 node_expander = {
|
|
181 'H': lambda: hex(node),
|
|
182 'R': lambda: str(repo.changelog.rev(node)),
|
|
183 'h': lambda: short(node),
|
|
184 }
|
|
185 expander = {
|
|
186 '%': lambda: '%',
|
|
187 'b': lambda: os.path.basename(repo.root),
|
|
188 }
|
|
189
|
|
190 try:
|
|
191 if node:
|
|
192 expander.update(node_expander)
|
|
193 if node:
|
|
194 expander['r'] = (lambda:
|
|
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
|
|
196 if total is not None:
|
|
197 expander['N'] = lambda: str(total)
|
|
198 if seqno is not None:
|
|
199 expander['n'] = lambda: str(seqno)
|
|
200 if total is not None and seqno is not None:
|
|
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
|
|
202 if pathname is not None:
|
|
203 expander['s'] = lambda: os.path.basename(pathname)
|
|
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
|
|
205 expander['p'] = lambda: pathname
|
|
206
|
|
207 newname = []
|
|
208 patlen = len(pat)
|
|
209 i = 0
|
|
210 while i < patlen:
|
|
211 c = pat[i]
|
|
212 if c == '%':
|
|
213 i += 1
|
|
214 c = pat[i]
|
|
215 c = expander[c]()
|
|
216 newname.append(c)
|
|
217 i += 1
|
|
218 return ''.join(newname)
|
|
219 except KeyError, inst:
|
|
220 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
|
|
221 inst.args[0])
|
|
222
|
|
223 def make_file(repo, pat, node=None,
|
|
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
|
|
225
|
|
226 writable = 'w' in mode or 'a' in mode
|
|
227
|
|
228 if not pat or pat == '-':
|
|
229 return writable and sys.stdout or sys.stdin
|
|
230 if hasattr(pat, 'write') and writable:
|
|
231 return pat
|
|
232 if hasattr(pat, 'read') and 'r' in mode:
|
|
233 return pat
|
|
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
|
|
235 pathname),
|
|
236 mode)
|
|
237
|
|
238 def expandpats(pats):
|
|
239 if not util.expandglobs:
|
|
240 return list(pats)
|
|
241 ret = []
|
|
242 for p in pats:
|
|
243 kind, name = _match._patsplit(p, None)
|
|
244 if kind is None:
|
|
245 try:
|
|
246 globbed = glob.glob(name)
|
|
247 except re.error:
|
|
248 globbed = [name]
|
|
249 if globbed:
|
|
250 ret.extend(globbed)
|
|
251 continue
|
|
252 ret.append(p)
|
|
253 return ret
|
|
254
|
|
255 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
|
|
256 if not globbed and default == 'relpath':
|
|
257 pats = expandpats(pats or [])
|
|
258 m = _match.match(repo.root, repo.getcwd(), pats,
|
|
259 opts.get('include'), opts.get('exclude'), default)
|
|
260 def badfn(f, msg):
|
|
261 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
|
|
262 m.bad = badfn
|
|
263 return m
|
|
264
|
|
265 def matchall(repo):
|
|
266 return _match.always(repo.root, repo.getcwd())
|
|
267
|
|
268 def matchfiles(repo, files):
|
|
269 return _match.exact(repo.root, repo.getcwd(), files)
|
|
270
|
|
271 def findrenames(repo, added, removed, threshold):
|
|
272 '''find renamed files -- yields (before, after, score) tuples'''
|
|
273 ctx = repo['.']
|
|
274 for a in added:
|
|
275 aa = repo.wread(a)
|
|
276 bestname, bestscore = None, threshold
|
|
277 for r in removed:
|
|
278 if r not in ctx:
|
|
279 continue
|
|
280 rr = ctx.filectx(r).data()
|
|
281
|
|
282 # bdiff.blocks() returns blocks of matching lines
|
|
283 # count the number of bytes in each
|
|
284 equal = 0
|
|
285 alines = mdiff.splitnewlines(aa)
|
|
286 matches = bdiff.blocks(aa, rr)
|
|
287 for x1,x2,y1,y2 in matches:
|
|
288 for line in alines[x1:x2]:
|
|
289 equal += len(line)
|
|
290
|
|
291 lengths = len(aa) + len(rr)
|
|
292 if lengths:
|
|
293 myscore = equal*2.0 / lengths
|
|
294 if myscore >= bestscore:
|
|
295 bestname, bestscore = r, myscore
|
|
296 if bestname:
|
|
297 yield bestname, a, bestscore
|
|
298
|
|
299 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
|
|
300 if dry_run is None:
|
|
301 dry_run = opts.get('dry_run')
|
|
302 if similarity is None:
|
|
303 similarity = float(opts.get('similarity') or 0)
|
|
304 # we'd use status here, except handling of symlinks and ignore is tricky
|
|
305 added, unknown, deleted, removed = [], [], [], []
|
|
306 audit_path = util.path_auditor(repo.root)
|
|
307 m = match(repo, pats, opts)
|
|
308 for abs in repo.walk(m):
|
|
309 target = repo.wjoin(abs)
|
|
310 good = True
|
|
311 try:
|
|
312 audit_path(abs)
|
|
313 except:
|
|
314 good = False
|
|
315 rel = m.rel(abs)
|
|
316 exact = m.exact(abs)
|
|
317 if good and abs not in repo.dirstate:
|
|
318 unknown.append(abs)
|
|
319 if repo.ui.verbose or not exact:
|
|
320 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
|
|
321 elif repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
|
|
322 or (os.path.isdir(target) and not os.path.islink(target))):
|
|
323 deleted.append(abs)
|
|
324 if repo.ui.verbose or not exact:
|
|
325 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
|
|
326 # for finding renames
|
|
327 elif repo.dirstate[abs] == 'r':
|
|
328 removed.append(abs)
|
|
329 elif repo.dirstate[abs] == 'a':
|
|
330 added.append(abs)
|
|
331 if not dry_run:
|
|
332 repo.remove(deleted)
|
|
333 repo.add(unknown)
|
|
334 if similarity > 0:
|
|
335 for old, new, score in findrenames(repo, added + unknown,
|
|
336 removed + deleted, similarity):
|
|
337 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
|
|
338 repo.ui.status(_('recording removal of %s as rename to %s '
|
|
339 '(%d%% similar)\n') %
|
|
340 (m.rel(old), m.rel(new), score * 100))
|
|
341 if not dry_run:
|
|
342 repo.copy(old, new)
|
|
343
|
|
344 def copy(ui, repo, pats, opts, rename=False):
|
|
345 # called with the repo lock held
|
|
346 #
|
|
347 # hgsep => pathname that uses "/" to separate directories
|
|
348 # ossep => pathname that uses os.sep to separate directories
|
|
349 cwd = repo.getcwd()
|
|
350 targets = {}
|
|
351 after = opts.get("after")
|
|
352 dryrun = opts.get("dry_run")
|
|
353
|
|
354 def walkpat(pat):
|
|
355 srcs = []
|
|
356 m = match(repo, [pat], opts, globbed=True)
|
|
357 for abs in repo.walk(m):
|
|
358 state = repo.dirstate[abs]
|
|
359 rel = m.rel(abs)
|
|
360 exact = m.exact(abs)
|
|
361 if state in '?r':
|
|
362 if exact and state == '?':
|
|
363 ui.warn(_('%s: not copying - file is not managed\n') % rel)
|
|
364 if exact and state == 'r':
|
|
365 ui.warn(_('%s: not copying - file has been marked for'
|
|
366 ' remove\n') % rel)
|
|
367 continue
|
|
368 # abs: hgsep
|
|
369 # rel: ossep
|
|
370 srcs.append((abs, rel, exact))
|
|
371 return srcs
|
|
372
|
|
373 # abssrc: hgsep
|
|
374 # relsrc: ossep
|
|
375 # otarget: ossep
|
|
376 def copyfile(abssrc, relsrc, otarget, exact):
|
|
377 abstarget = util.canonpath(repo.root, cwd, otarget)
|
|
378 reltarget = repo.pathto(abstarget, cwd)
|
|
379 target = repo.wjoin(abstarget)
|
|
380 src = repo.wjoin(abssrc)
|
|
381 state = repo.dirstate[abstarget]
|
|
382
|
|
383 # check for collisions
|
|
384 prevsrc = targets.get(abstarget)
|
|
385 if prevsrc is not None:
|
|
386 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
|
|
387 (reltarget, repo.pathto(abssrc, cwd),
|
|
388 repo.pathto(prevsrc, cwd)))
|
|
389 return
|
|
390
|
|
391 # check for overwrites
|
|
392 exists = os.path.exists(target)
|
|
393 if not after and exists or after and state in 'mn':
|
|
394 if not opts['force']:
|
|
395 ui.warn(_('%s: not overwriting - file exists\n') %
|
|
396 reltarget)
|
|
397 return
|
|
398
|
|
399 if after:
|
|
400 if not exists:
|
|
401 return
|
|
402 elif not dryrun:
|
|
403 try:
|
|
404 if exists:
|
|
405 os.unlink(target)
|
|
406 targetdir = os.path.dirname(target) or '.'
|
|
407 if not os.path.isdir(targetdir):
|
|
408 os.makedirs(targetdir)
|
|
409 util.copyfile(src, target)
|
|
410 except IOError, inst:
|
|
411 if inst.errno == errno.ENOENT:
|
|
412 ui.warn(_('%s: deleted in working copy\n') % relsrc)
|
|
413 else:
|
|
414 ui.warn(_('%s: cannot copy - %s\n') %
|
|
415 (relsrc, inst.strerror))
|
|
416 return True # report a failure
|
|
417
|
|
418 if ui.verbose or not exact:
|
|
419 if rename:
|
|
420 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
|
|
421 else:
|
|
422 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
|
|
423
|
|
424 targets[abstarget] = abssrc
|
|
425
|
|
426 # fix up dirstate
|
|
427 origsrc = repo.dirstate.copied(abssrc) or abssrc
|
|
428 if abstarget == origsrc: # copying back a copy?
|
|
429 if state not in 'mn' and not dryrun:
|
|
430 repo.dirstate.normallookup(abstarget)
|
|
431 else:
|
|
432 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
|
|
433 if not ui.quiet:
|
|
434 ui.warn(_("%s has not been committed yet, so no copy "
|
|
435 "data will be stored for %s.\n")
|
|
436 % (repo.pathto(origsrc, cwd), reltarget))
|
|
437 if repo.dirstate[abstarget] in '?r' and not dryrun:
|
|
438 repo.add([abstarget])
|
|
439 elif not dryrun:
|
|
440 repo.copy(origsrc, abstarget)
|
|
441
|
|
442 if rename and not dryrun:
|
|
443 repo.remove([abssrc], not after)
|
|
444
|
|
445 # pat: ossep
|
|
446 # dest ossep
|
|
447 # srcs: list of (hgsep, hgsep, ossep, bool)
|
|
448 # return: function that takes hgsep and returns ossep
|
|
449 def targetpathfn(pat, dest, srcs):
|
|
450 if os.path.isdir(pat):
|
|
451 abspfx = util.canonpath(repo.root, cwd, pat)
|
|
452 abspfx = util.localpath(abspfx)
|
|
453 if destdirexists:
|
|
454 striplen = len(os.path.split(abspfx)[0])
|
|
455 else:
|
|
456 striplen = len(abspfx)
|
|
457 if striplen:
|
|
458 striplen += len(os.sep)
|
|
459 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
|
|
460 elif destdirexists:
|
|
461 res = lambda p: os.path.join(dest,
|
|
462 os.path.basename(util.localpath(p)))
|
|
463 else:
|
|
464 res = lambda p: dest
|
|
465 return res
|
|
466
|
|
467 # pat: ossep
|
|
468 # dest ossep
|
|
469 # srcs: list of (hgsep, hgsep, ossep, bool)
|
|
470 # return: function that takes hgsep and returns ossep
|
|
471 def targetpathafterfn(pat, dest, srcs):
|
|
472 if _match.patkind(pat):
|
|
473 # a mercurial pattern
|
|
474 res = lambda p: os.path.join(dest,
|
|
475 os.path.basename(util.localpath(p)))
|
|
476 else:
|
|
477 abspfx = util.canonpath(repo.root, cwd, pat)
|
|
478 if len(abspfx) < len(srcs[0][0]):
|
|
479 # A directory. Either the target path contains the last
|
|
480 # component of the source path or it does not.
|
|
481 def evalpath(striplen):
|
|
482 score = 0
|
|
483 for s in srcs:
|
|
484 t = os.path.join(dest, util.localpath(s[0])[striplen:])
|
|
485 if os.path.exists(t):
|
|
486 score += 1
|
|
487 return score
|
|
488
|
|
489 abspfx = util.localpath(abspfx)
|
|
490 striplen = len(abspfx)
|
|
491 if striplen:
|
|
492 striplen += len(os.sep)
|
|
493 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
|
|
494 score = evalpath(striplen)
|
|
495 striplen1 = len(os.path.split(abspfx)[0])
|
|
496 if striplen1:
|
|
497 striplen1 += len(os.sep)
|
|
498 if evalpath(striplen1) > score:
|
|
499 striplen = striplen1
|
|
500 res = lambda p: os.path.join(dest,
|
|
501 util.localpath(p)[striplen:])
|
|
502 else:
|
|
503 # a file
|
|
504 if destdirexists:
|
|
505 res = lambda p: os.path.join(dest,
|
|
506 os.path.basename(util.localpath(p)))
|
|
507 else:
|
|
508 res = lambda p: dest
|
|
509 return res
|
|
510
|
|
511
|
|
512 pats = expandpats(pats)
|
|
513 if not pats:
|
|
514 raise util.Abort(_('no source or destination specified'))
|
|
515 if len(pats) == 1:
|
|
516 raise util.Abort(_('no destination specified'))
|
|
517 dest = pats.pop()
|
|
518 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
|
|
519 if not destdirexists:
|
|
520 if len(pats) > 1 or _match.patkind(pats[0]):
|
|
521 raise util.Abort(_('with multiple sources, destination must be an '
|
|
522 'existing directory'))
|
|
523 if util.endswithsep(dest):
|
|
524 raise util.Abort(_('destination %s is not a directory') % dest)
|
|
525
|
|
526 tfn = targetpathfn
|
|
527 if after:
|
|
528 tfn = targetpathafterfn
|
|
529 copylist = []
|
|
530 for pat in pats:
|
|
531 srcs = walkpat(pat)
|
|
532 if not srcs:
|
|
533 continue
|
|
534 copylist.append((tfn(pat, dest, srcs), srcs))
|
|
535 if not copylist:
|
|
536 raise util.Abort(_('no files to copy'))
|
|
537
|
|
538 errors = 0
|
|
539 for targetpath, srcs in copylist:
|
|
540 for abssrc, relsrc, exact in srcs:
|
|
541 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
|
|
542 errors += 1
|
|
543
|
|
544 if errors:
|
|
545 ui.warn(_('(consider using --after)\n'))
|
|
546
|
|
547 return errors
|
|
548
|
|
549 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None):
|
|
550 '''Run a command as a service.'''
|
|
551
|
|
552 if opts['daemon'] and not opts['daemon_pipefds']:
|
|
553 rfd, wfd = os.pipe()
|
|
554 args = sys.argv[:]
|
|
555 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
|
|
556 # Don't pass --cwd to the child process, because we've already
|
|
557 # changed directory.
|
|
558 for i in xrange(1,len(args)):
|
|
559 if args[i].startswith('--cwd='):
|
|
560 del args[i]
|
|
561 break
|
|
562 elif args[i].startswith('--cwd'):
|
|
563 del args[i:i+2]
|
|
564 break
|
|
565 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
|
|
566 args[0], args)
|
|
567 os.close(wfd)
|
|
568 os.read(rfd, 1)
|
|
569 if parentfn:
|
|
570 return parentfn(pid)
|
|
571 else:
|
|
572 os._exit(0)
|
|
573
|
|
574 if initfn:
|
|
575 initfn()
|
|
576
|
|
577 if opts['pid_file']:
|
|
578 fp = open(opts['pid_file'], 'w')
|
|
579 fp.write(str(os.getpid()) + '\n')
|
|
580 fp.close()
|
|
581
|
|
582 if opts['daemon_pipefds']:
|
|
583 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
|
|
584 os.close(rfd)
|
|
585 try:
|
|
586 os.setsid()
|
|
587 except AttributeError:
|
|
588 pass
|
|
589 os.write(wfd, 'y')
|
|
590 os.close(wfd)
|
|
591 sys.stdout.flush()
|
|
592 sys.stderr.flush()
|
|
593
|
|
594 nullfd = os.open(util.nulldev, os.O_RDWR)
|
|
595 logfilefd = nullfd
|
|
596 if logfile:
|
|
597 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
|
|
598 os.dup2(nullfd, 0)
|
|
599 os.dup2(logfilefd, 1)
|
|
600 os.dup2(logfilefd, 2)
|
|
601 if nullfd not in (0, 1, 2):
|
|
602 os.close(nullfd)
|
|
603 if logfile and logfilefd not in (0, 1, 2):
|
|
604 os.close(logfilefd)
|
|
605
|
|
606 if runfn:
|
|
607 return runfn()
|
|
608
|
|
609 class changeset_printer(object):
|
|
610 '''show changeset information when templating not requested.'''
|
|
611
|
|
612 def __init__(self, ui, repo, patch, diffopts, buffered):
|
|
613 self.ui = ui
|
|
614 self.repo = repo
|
|
615 self.buffered = buffered
|
|
616 self.patch = patch
|
|
617 self.diffopts = diffopts
|
|
618 self.header = {}
|
|
619 self.hunk = {}
|
|
620 self.lastheader = None
|
|
621
|
|
622 def flush(self, rev):
|
|
623 if rev in self.header:
|
|
624 h = self.header[rev]
|
|
625 if h != self.lastheader:
|
|
626 self.lastheader = h
|
|
627 self.ui.write(h)
|
|
628 del self.header[rev]
|
|
629 if rev in self.hunk:
|
|
630 self.ui.write(self.hunk[rev])
|
|
631 del self.hunk[rev]
|
|
632 return 1
|
|
633 return 0
|
|
634
|
|
635 def show(self, ctx, copies=(), **props):
|
|
636 if self.buffered:
|
|
637 self.ui.pushbuffer()
|
|
638 self._show(ctx, copies, props)
|
|
639 self.hunk[ctx.rev()] = self.ui.popbuffer()
|
|
640 else:
|
|
641 self._show(ctx, copies, props)
|
|
642
|
|
643 def _show(self, ctx, copies, props):
|
|
644 '''show a single changeset or file revision'''
|
|
645 changenode = ctx.node()
|
|
646 rev = ctx.rev()
|
|
647
|
|
648 if self.ui.quiet:
|
|
649 self.ui.write("%d:%s\n" % (rev, short(changenode)))
|
|
650 return
|
|
651
|
|
652 log = self.repo.changelog
|
|
653 changes = log.read(changenode)
|
|
654 date = util.datestr(changes[2])
|
|
655 extra = changes[5]
|
|
656 branch = extra.get("branch")
|
|
657
|
|
658 hexfunc = self.ui.debugflag and hex or short
|
|
659
|
|
660 parents = [(p, hexfunc(log.node(p)))
|
|
661 for p in self._meaningful_parentrevs(log, rev)]
|
|
662
|
|
663 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
|
|
664
|
|
665 # don't show the default branch name
|
|
666 if branch != 'default':
|
|
667 branch = encoding.tolocal(branch)
|
|
668 self.ui.write(_("branch: %s\n") % branch)
|
|
669 for tag in self.repo.nodetags(changenode):
|
|
670 self.ui.write(_("tag: %s\n") % tag)
|
|
671 for parent in parents:
|
|
672 self.ui.write(_("parent: %d:%s\n") % parent)
|
|
673
|
|
674 if self.ui.debugflag:
|
|
675 self.ui.write(_("manifest: %d:%s\n") %
|
|
676 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
|
|
677 self.ui.write(_("user: %s\n") % changes[1])
|
|
678 self.ui.write(_("date: %s\n") % date)
|
|
679
|
|
680 if self.ui.debugflag:
|
|
681 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
|
|
682 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
|
|
683 files):
|
|
684 if value:
|
|
685 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
|
|
686 elif changes[3] and self.ui.verbose:
|
|
687 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
|
|
688 if copies and self.ui.verbose:
|
|
689 copies = ['%s (%s)' % c for c in copies]
|
|
690 self.ui.write(_("copies: %s\n") % ' '.join(copies))
|
|
691
|
|
692 if extra and self.ui.debugflag:
|
|
693 for key, value in sorted(extra.items()):
|
|
694 self.ui.write(_("extra: %s=%s\n")
|
|
695 % (key, value.encode('string_escape')))
|
|
696
|
|
697 description = changes[4].strip()
|
|
698 if description:
|
|
699 if self.ui.verbose:
|
|
700 self.ui.write(_("description:\n"))
|
|
701 self.ui.write(description)
|
|
702 self.ui.write("\n\n")
|
|
703 else:
|
|
704 self.ui.write(_("summary: %s\n") %
|
|
705 description.splitlines()[0])
|
|
706 self.ui.write("\n")
|
|
707
|
|
708 self.showpatch(changenode)
|
|
709
|
|
710 def showpatch(self, node):
|
|
711 if self.patch:
|
|
712 prev = self.repo.changelog.parents(node)[0]
|
|
713 chunks = patch.diff(self.repo, prev, node, match=self.patch,
|
|
714 opts=patch.diffopts(self.ui, self.diffopts))
|
|
715 for chunk in chunks:
|
|
716 self.ui.write(chunk)
|
|
717 self.ui.write("\n")
|
|
718
|
|
719 def _meaningful_parentrevs(self, log, rev):
|
|
720 """Return list of meaningful (or all if debug) parentrevs for rev.
|
|
721
|
|
722 For merges (two non-nullrev revisions) both parents are meaningful.
|
|
723 Otherwise the first parent revision is considered meaningful if it
|
|
724 is not the preceding revision.
|
|
725 """
|
|
726 parents = log.parentrevs(rev)
|
|
727 if not self.ui.debugflag and parents[1] == nullrev:
|
|
728 if parents[0] >= rev - 1:
|
|
729 parents = []
|
|
730 else:
|
|
731 parents = [parents[0]]
|
|
732 return parents
|
|
733
|
|
734
|
|
735 class changeset_templater(changeset_printer):
|
|
736 '''format changeset information.'''
|
|
737
|
|
738 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
|
|
739 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
|
|
740 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
|
|
741 self.t = templater.templater(mapfile, {'formatnode': formatnode},
|
|
742 cache={
|
|
743 'parent': '{rev}:{node|formatnode} ',
|
|
744 'manifest': '{rev}:{node|formatnode}',
|
|
745 'filecopy': '{name} ({source})'})
|
|
746
|
|
747 def use_template(self, t):
|
|
748 '''set template string to use'''
|
|
749 self.t.cache['changeset'] = t
|
|
750
|
|
751 def _meaningful_parentrevs(self, ctx):
|
|
752 """Return list of meaningful (or all if debug) parentrevs for rev.
|
|
753 """
|
|
754 parents = ctx.parents()
|
|
755 if len(parents) > 1:
|
|
756 return parents
|
|
757 if self.ui.debugflag:
|
|
758 return [parents[0], self.repo['null']]
|
|
759 if parents[0].rev() >= ctx.rev() - 1:
|
|
760 return []
|
|
761 return parents
|
|
762
|
|
763 def _show(self, ctx, copies, props):
|
|
764 '''show a single changeset or file revision'''
|
|
765
|
|
766 def showlist(name, values, plural=None, **args):
|
|
767 '''expand set of values.
|
|
768 name is name of key in template map.
|
|
769 values is list of strings or dicts.
|
|
770 plural is plural of name, if not simply name + 's'.
|
|
771
|
|
772 expansion works like this, given name 'foo'.
|
|
773
|
|
774 if values is empty, expand 'no_foos'.
|
|
775
|
|
776 if 'foo' not in template map, return values as a string,
|
|
777 joined by space.
|
|
778
|
|
779 expand 'start_foos'.
|
|
780
|
|
781 for each value, expand 'foo'. if 'last_foo' in template
|
|
782 map, expand it instead of 'foo' for last key.
|
|
783
|
|
784 expand 'end_foos'.
|
|
785 '''
|
|
786 if plural: names = plural
|
|
787 else: names = name + 's'
|
|
788 if not values:
|
|
789 noname = 'no_' + names
|
|
790 if noname in self.t:
|
|
791 yield self.t(noname, **args)
|
|
792 return
|
|
793 if name not in self.t:
|
|
794 if isinstance(values[0], str):
|
|
795 yield ' '.join(values)
|
|
796 else:
|
|
797 for v in values:
|
|
798 yield dict(v, **args)
|
|
799 return
|
|
800 startname = 'start_' + names
|
|
801 if startname in self.t:
|
|
802 yield self.t(startname, **args)
|
|
803 vargs = args.copy()
|
|
804 def one(v, tag=name):
|
|
805 try:
|
|
806 vargs.update(v)
|
|
807 except (AttributeError, ValueError):
|
|
808 try:
|
|
809 for a, b in v:
|
|
810 vargs[a] = b
|
|
811 except ValueError:
|
|
812 vargs[name] = v
|
|
813 return self.t(tag, **vargs)
|
|
814 lastname = 'last_' + name
|
|
815 if lastname in self.t:
|
|
816 last = values.pop()
|
|
817 else:
|
|
818 last = None
|
|
819 for v in values:
|
|
820 yield one(v)
|
|
821 if last is not None:
|
|
822 yield one(last, tag=lastname)
|
|
823 endname = 'end_' + names
|
|
824 if endname in self.t:
|
|
825 yield self.t(endname, **args)
|
|
826
|
|
827 def showbranches(**args):
|
|
828 branch = ctx.branch()
|
|
829 if branch != 'default':
|
|
830 branch = encoding.tolocal(branch)
|
|
831 return showlist('branch', [branch], plural='branches', **args)
|
|
832
|
|
833 def showparents(**args):
|
|
834 parents = [[('rev', p.rev()), ('node', p.hex())]
|
|
835 for p in self._meaningful_parentrevs(ctx)]
|
|
836 return showlist('parent', parents, **args)
|
|
837
|
|
838 def showtags(**args):
|
|
839 return showlist('tag', ctx.tags(), **args)
|
|
840
|
|
841 def showextras(**args):
|
|
842 for key, value in sorted(ctx.extra().items()):
|
|
843 args = args.copy()
|
|
844 args.update(dict(key=key, value=value))
|
|
845 yield self.t('extra', **args)
|
|
846
|
|
847 def showcopies(**args):
|
|
848 c = [{'name': x[0], 'source': x[1]} for x in copies]
|
|
849 return showlist('file_copy', c, plural='file_copies', **args)
|
|
850
|
|
851 files = []
|
|
852 def getfiles():
|
|
853 if not files:
|
|
854 files[:] = self.repo.status(ctx.parents()[0].node(),
|
|
855 ctx.node())[:3]
|
|
856 return files
|
|
857 def showfiles(**args):
|
|
858 return showlist('file', ctx.files(), **args)
|
|
859 def showmods(**args):
|
|
860 return showlist('file_mod', getfiles()[0], **args)
|
|
861 def showadds(**args):
|
|
862 return showlist('file_add', getfiles()[1], **args)
|
|
863 def showdels(**args):
|
|
864 return showlist('file_del', getfiles()[2], **args)
|
|
865 def showmanifest(**args):
|
|
866 args = args.copy()
|
|
867 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
|
|
868 node=hex(ctx.changeset()[0])))
|
|
869 return self.t('manifest', **args)
|
|
870
|
|
871 def showdiffstat(**args):
|
|
872 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
|
|
873 files, adds, removes = 0, 0, 0
|
|
874 for i in patch.diffstatdata(util.iterlines(diff)):
|
|
875 files += 1
|
|
876 adds += i[1]
|
|
877 removes += i[2]
|
|
878 return '%s: +%s/-%s' % (files, adds, removes)
|
|
879
|
|
880 defprops = {
|
|
881 'author': ctx.user(),
|
|
882 'branches': showbranches,
|
|
883 'date': ctx.date(),
|
|
884 'desc': ctx.description().strip(),
|
|
885 'file_adds': showadds,
|
|
886 'file_dels': showdels,
|
|
887 'file_mods': showmods,
|
|
888 'files': showfiles,
|
|
889 'file_copies': showcopies,
|
|
890 'manifest': showmanifest,
|
|
891 'node': ctx.hex(),
|
|
892 'parents': showparents,
|
|
893 'rev': ctx.rev(),
|
|
894 'tags': showtags,
|
|
895 'extras': showextras,
|
|
896 'diffstat': showdiffstat,
|
|
897 }
|
|
898 props = props.copy()
|
|
899 props.update(defprops)
|
|
900
|
|
901 # find correct templates for current mode
|
|
902
|
|
903 tmplmodes = [
|
|
904 (True, None),
|
|
905 (self.ui.verbose, 'verbose'),
|
|
906 (self.ui.quiet, 'quiet'),
|
|
907 (self.ui.debugflag, 'debug'),
|
|
908 ]
|
|
909
|
|
910 types = {'header': '', 'changeset': 'changeset'}
|
|
911 for mode, postfix in tmplmodes:
|
|
912 for type in types:
|
|
913 cur = postfix and ('%s_%s' % (type, postfix)) or type
|
|
914 if mode and cur in self.t:
|
|
915 types[type] = cur
|
|
916
|
|
917 try:
|
|
918
|
|
919 # write header
|
|
920 if types['header']:
|
|
921 h = templater.stringify(self.t(types['header'], **props))
|
|
922 if self.buffered:
|
|
923 self.header[ctx.rev()] = h
|
|
924 else:
|
|
925 self.ui.write(h)
|
|
926
|
|
927 # write changeset metadata, then patch if requested
|
|
928 key = types['changeset']
|
|
929 self.ui.write(templater.stringify(self.t(key, **props)))
|
|
930 self.showpatch(ctx.node())
|
|
931
|
|
932 except KeyError, inst:
|
|
933 msg = _("%s: no key named '%s'")
|
|
934 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
|
|
935 except SyntaxError, inst:
|
|
936 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
|
|
937
|
|
938 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
|
|
939 """show one changeset using template or regular display.
|
|
940
|
|
941 Display format will be the first non-empty hit of:
|
|
942 1. option 'template'
|
|
943 2. option 'style'
|
|
944 3. [ui] setting 'logtemplate'
|
|
945 4. [ui] setting 'style'
|
|
946 If all of these values are either the unset or the empty string,
|
|
947 regular display via changeset_printer() is done.
|
|
948 """
|
|
949 # options
|
|
950 patch = False
|
|
951 if opts.get('patch'):
|
|
952 patch = matchfn or matchall(repo)
|
|
953
|
|
954 tmpl = opts.get('template')
|
|
955 style = None
|
|
956 if tmpl:
|
|
957 tmpl = templater.parsestring(tmpl, quoted=False)
|
|
958 else:
|
|
959 style = opts.get('style')
|
|
960
|
|
961 # ui settings
|
|
962 if not (tmpl or style):
|
|
963 tmpl = ui.config('ui', 'logtemplate')
|
|
964 if tmpl:
|
|
965 tmpl = templater.parsestring(tmpl)
|
|
966 else:
|
|
967 style = ui.config('ui', 'style')
|
|
968
|
|
969 if not (tmpl or style):
|
|
970 return changeset_printer(ui, repo, patch, opts, buffered)
|
|
971
|
|
972 mapfile = None
|
|
973 if style and not tmpl:
|
|
974 mapfile = style
|
|
975 if not os.path.split(mapfile)[0]:
|
|
976 mapname = (templater.templatepath('map-cmdline.' + mapfile)
|
|
977 or templater.templatepath(mapfile))
|
|
978 if mapname: mapfile = mapname
|
|
979
|
|
980 try:
|
|
981 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
|
|
982 except SyntaxError, inst:
|
|
983 raise util.Abort(inst.args[0])
|
|
984 if tmpl: t.use_template(tmpl)
|
|
985 return t
|
|
986
|
|
987 def finddate(ui, repo, date):
|
|
988 """Find the tipmost changeset that matches the given date spec"""
|
|
989 df = util.matchdate(date)
|
|
990 get = util.cachefunc(lambda r: repo[r].changeset())
|
|
991 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
|
|
992 results = {}
|
|
993 for st, rev, fns in changeiter:
|
|
994 if st == 'add':
|
|
995 d = get(rev)[2]
|
|
996 if df(d[0]):
|
|
997 results[rev] = d
|
|
998 elif st == 'iter':
|
|
999 if rev in results:
|
|
1000 ui.status(_("Found revision %s from %s\n") %
|
|
1001 (rev, util.datestr(results[rev])))
|
|
1002 return str(rev)
|
|
1003
|
|
1004 raise util.Abort(_("revision matching date not found"))
|
|
1005
|
|
1006 def walkchangerevs(ui, repo, pats, change, opts):
|
|
1007 '''Iterate over files and the revs in which they changed.
|
|
1008
|
|
1009 Callers most commonly need to iterate backwards over the history
|
|
1010 in which they are interested. Doing so has awful (quadratic-looking)
|
|
1011 performance, so we use iterators in a "windowed" way.
|
|
1012
|
|
1013 We walk a window of revisions in the desired order. Within the
|
|
1014 window, we first walk forwards to gather data, then in the desired
|
|
1015 order (usually backwards) to display it.
|
|
1016
|
|
1017 This function returns an (iterator, matchfn) tuple. The iterator
|
|
1018 yields 3-tuples. They will be of one of the following forms:
|
|
1019
|
|
1020 "window", incrementing, lastrev: stepping through a window,
|
|
1021 positive if walking forwards through revs, last rev in the
|
|
1022 sequence iterated over - use to reset state for the current window
|
|
1023
|
|
1024 "add", rev, fns: out-of-order traversal of the given filenames
|
|
1025 fns, which changed during revision rev - use to gather data for
|
|
1026 possible display
|
|
1027
|
|
1028 "iter", rev, None: in-order traversal of the revs earlier iterated
|
|
1029 over with "add" - use to display data'''
|
|
1030
|
|
1031 def increasing_windows(start, end, windowsize=8, sizelimit=512):
|
|
1032 if start < end:
|
|
1033 while start < end:
|
|
1034 yield start, min(windowsize, end-start)
|
|
1035 start += windowsize
|
|
1036 if windowsize < sizelimit:
|
|
1037 windowsize *= 2
|
|
1038 else:
|
|
1039 while start > end:
|
|
1040 yield start, min(windowsize, start-end-1)
|
|
1041 start -= windowsize
|
|
1042 if windowsize < sizelimit:
|
|
1043 windowsize *= 2
|
|
1044
|
|
1045 m = match(repo, pats, opts)
|
|
1046 follow = opts.get('follow') or opts.get('follow_first')
|
|
1047
|
|
1048 if not len(repo):
|
|
1049 return [], m
|
|
1050
|
|
1051 if follow:
|
|
1052 defrange = '%s:0' % repo['.'].rev()
|
|
1053 else:
|
|
1054 defrange = '-1:0'
|
|
1055 revs = revrange(repo, opts['rev'] or [defrange])
|
|
1056 wanted = set()
|
|
1057 slowpath = m.anypats() or (m.files() and opts.get('removed'))
|
|
1058 fncache = {}
|
|
1059
|
|
1060 if not slowpath and not m.files():
|
|
1061 # No files, no patterns. Display all revs.
|
|
1062 wanted = set(revs)
|
|
1063 copies = []
|
|
1064 if not slowpath:
|
|
1065 # Only files, no patterns. Check the history of each file.
|
|
1066 def filerevgen(filelog, node):
|
|
1067 cl_count = len(repo)
|
|
1068 if node is None:
|
|
1069 last = len(filelog) - 1
|
|
1070 else:
|
|
1071 last = filelog.rev(node)
|
|
1072 for i, window in increasing_windows(last, nullrev):
|
|
1073 revs = []
|
|
1074 for j in xrange(i - window, i + 1):
|
|
1075 n = filelog.node(j)
|
|
1076 revs.append((filelog.linkrev(j),
|
|
1077 follow and filelog.renamed(n)))
|
|
1078 for rev in reversed(revs):
|
|
1079 # only yield rev for which we have the changelog, it can
|
|
1080 # happen while doing "hg log" during a pull or commit
|
|
1081 if rev[0] < cl_count:
|
|
1082 yield rev
|
|
1083 def iterfiles():
|
|
1084 for filename in m.files():
|
|
1085 yield filename, None
|
|
1086 for filename_node in copies:
|
|
1087 yield filename_node
|
|
1088 minrev, maxrev = min(revs), max(revs)
|
|
1089 for file_, node in iterfiles():
|
|
1090 filelog = repo.file(file_)
|
|
1091 if not len(filelog):
|
|
1092 if node is None:
|
|
1093 # A zero count may be a directory or deleted file, so
|
|
1094 # try to find matching entries on the slow path.
|
|
1095 if follow:
|
|
1096 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
|
|
1097 slowpath = True
|
|
1098 break
|
|
1099 else:
|
|
1100 ui.warn(_('%s:%s copy source revision cannot be found!\n')
|
|
1101 % (file_, short(node)))
|
|
1102 continue
|
|
1103 for rev, copied in filerevgen(filelog, node):
|
|
1104 if rev <= maxrev:
|
|
1105 if rev < minrev:
|
|
1106 break
|
|
1107 fncache.setdefault(rev, [])
|
|
1108 fncache[rev].append(file_)
|
|
1109 wanted.add(rev)
|
|
1110 if follow and copied:
|
|
1111 copies.append(copied)
|
|
1112 if slowpath:
|
|
1113 if follow:
|
|
1114 raise util.Abort(_('can only follow copies/renames for explicit '
|
|
1115 'filenames'))
|
|
1116
|
|
1117 # The slow path checks files modified in every changeset.
|
|
1118 def changerevgen():
|
|
1119 for i, window in increasing_windows(len(repo) - 1, nullrev):
|
|
1120 for j in xrange(i - window, i + 1):
|
|
1121 yield j, change(j)[3]
|
|
1122
|
|
1123 for rev, changefiles in changerevgen():
|
|
1124 matches = filter(m, changefiles)
|
|
1125 if matches:
|
|
1126 fncache[rev] = matches
|
|
1127 wanted.add(rev)
|
|
1128
|
|
1129 class followfilter(object):
|
|
1130 def __init__(self, onlyfirst=False):
|
|
1131 self.startrev = nullrev
|
|
1132 self.roots = []
|
|
1133 self.onlyfirst = onlyfirst
|
|
1134
|
|
1135 def match(self, rev):
|
|
1136 def realparents(rev):
|
|
1137 if self.onlyfirst:
|
|
1138 return repo.changelog.parentrevs(rev)[0:1]
|
|
1139 else:
|
|
1140 return filter(lambda x: x != nullrev,
|
|
1141 repo.changelog.parentrevs(rev))
|
|
1142
|
|
1143 if self.startrev == nullrev:
|
|
1144 self.startrev = rev
|
|
1145 return True
|
|
1146
|
|
1147 if rev > self.startrev:
|
|
1148 # forward: all descendants
|
|
1149 if not self.roots:
|
|
1150 self.roots.append(self.startrev)
|
|
1151 for parent in realparents(rev):
|
|
1152 if parent in self.roots:
|
|
1153 self.roots.append(rev)
|
|
1154 return True
|
|
1155 else:
|
|
1156 # backwards: all parents
|
|
1157 if not self.roots:
|
|
1158 self.roots.extend(realparents(self.startrev))
|
|
1159 if rev in self.roots:
|
|
1160 self.roots.remove(rev)
|
|
1161 self.roots.extend(realparents(rev))
|
|
1162 return True
|
|
1163
|
|
1164 return False
|
|
1165
|
|
1166 # it might be worthwhile to do this in the iterator if the rev range
|
|
1167 # is descending and the prune args are all within that range
|
|
1168 for rev in opts.get('prune', ()):
|
|
1169 rev = repo.changelog.rev(repo.lookup(rev))
|
|
1170 ff = followfilter()
|
|
1171 stop = min(revs[0], revs[-1])
|
|
1172 for x in xrange(rev, stop-1, -1):
|
|
1173 if ff.match(x):
|
|
1174 wanted.discard(x)
|
|
1175
|
|
1176 def iterate():
|
|
1177 if follow and not m.files():
|
|
1178 ff = followfilter(onlyfirst=opts.get('follow_first'))
|
|
1179 def want(rev):
|
|
1180 return ff.match(rev) and rev in wanted
|
|
1181 else:
|
|
1182 def want(rev):
|
|
1183 return rev in wanted
|
|
1184
|
|
1185 for i, window in increasing_windows(0, len(revs)):
|
|
1186 yield 'window', revs[0] < revs[-1], revs[-1]
|
|
1187 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
|
|
1188 for rev in sorted(nrevs):
|
|
1189 fns = fncache.get(rev)
|
|
1190 if not fns:
|
|
1191 def fns_generator():
|
|
1192 for f in change(rev)[3]:
|
|
1193 if m(f):
|
|
1194 yield f
|
|
1195 fns = fns_generator()
|
|
1196 yield 'add', rev, fns
|
|
1197 for rev in nrevs:
|
|
1198 yield 'iter', rev, None
|
|
1199 return iterate(), m
|
|
1200
|
|
1201 def commit(ui, repo, commitfunc, pats, opts):
|
|
1202 '''commit the specified files or all outstanding changes'''
|
|
1203 date = opts.get('date')
|
|
1204 if date:
|
|
1205 opts['date'] = util.parsedate(date)
|
|
1206 message = logmessage(opts)
|
|
1207
|
|
1208 # extract addremove carefully -- this function can be called from a command
|
|
1209 # that doesn't support addremove
|
|
1210 if opts.get('addremove'):
|
|
1211 addremove(repo, pats, opts)
|
|
1212
|
|
1213 return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
|
|
1214
|
|
1215 def commiteditor(repo, ctx, subs):
|
|
1216 if ctx.description():
|
|
1217 return ctx.description()
|
|
1218 return commitforceeditor(repo, ctx, subs)
|
|
1219
|
|
1220 def commitforceeditor(repo, ctx, subs):
|
|
1221 edittext = []
|
|
1222 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
|
|
1223 if ctx.description():
|
|
1224 edittext.append(ctx.description())
|
|
1225 edittext.append("")
|
|
1226 edittext.append("") # Empty line between message and comments.
|
|
1227 edittext.append(_("HG: Enter commit message."
|
|
1228 " Lines beginning with 'HG:' are removed."))
|
|
1229 edittext.append(_("HG: Leave message empty to abort commit."))
|
|
1230 edittext.append("HG: --")
|
|
1231 edittext.append(_("HG: user: %s") % ctx.user())
|
|
1232 if ctx.p2():
|
|
1233 edittext.append(_("HG: branch merge"))
|
|
1234 if ctx.branch():
|
|
1235 edittext.append(_("HG: branch '%s'")
|
|
1236 % encoding.tolocal(ctx.branch()))
|
|
1237 edittext.extend([_("HG: subrepo %s") % s for s in subs])
|
|
1238 edittext.extend([_("HG: added %s") % f for f in added])
|
|
1239 edittext.extend([_("HG: changed %s") % f for f in modified])
|
|
1240 edittext.extend([_("HG: removed %s") % f for f in removed])
|
|
1241 if not added and not modified and not removed:
|
|
1242 edittext.append(_("HG: no files changed"))
|
|
1243 edittext.append("")
|
|
1244 # run editor in the repository root
|
|
1245 olddir = os.getcwd()
|
|
1246 os.chdir(repo.root)
|
|
1247 text = repo.ui.edit("\n".join(edittext), ctx.user())
|
|
1248 text = re.sub("(?m)^HG:.*\n", "", text)
|
|
1249 os.chdir(olddir)
|
|
1250
|
|
1251 if not text.strip():
|
|
1252 raise util.Abort(_("empty commit message"))
|
|
1253
|
|
1254 return text
|