From patchwork Sun Mar 23 00:21:39 2014 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [1,of,2] tags: Log events related to tags cache From: Gregory Szorc X-Patchwork-Id: 4040 Message-Id: <4041f83efd1de680c82b.1395534099@77.1.168.192.in-addr.arpa> To: mercurial-devel@selenic.com Date: Sat, 22 Mar 2014 17:21:39 -0700 # HG changeset patch # User Gregory Szorc # Date 1395532277 25200 # Sat Mar 22 16:51:17 2014 -0700 # Node ID 4041f83efd1de680c82b65c776f85b642a59754a # Parent c7d543eebe1a703eeb8bb02ee3a2ae2e08746c4d tags: Log events related to tags cache We now log when .hg/cache/tags data is built from scratch by reading manifests and when the file is written. diff --git a/mercurial/tags.py b/mercurial/tags.py --- a/mercurial/tags.py +++ b/mercurial/tags.py @@ -10,16 +10,17 @@ # Eventually, it could take care of updating (adding/removing/moving) # tags too. from node import nullid, bin, hex, short from i18n import _ import encoding import error import errno +import time def findglobaltags(ui, repo, alltags, tagtypes): '''Find global tags in repo by reading .hgtags from every head that has a distinct version of it, using a cache to avoid excess work. Updates the dicts alltags, tagtypes in place: alltags maps tag name to (node, hist) pair (see _readtags() below), and tagtypes maps tag name to tag type ("global" in this case).''' # This is so we can be lazy and assume alltags contains only global @@ -229,16 +230,18 @@ def _readtagcache(ui, repo): # N.B. in case 4 (nodes destroyed), "new head" really means "newly # exposed". if not len(repo.file('.hgtags')): # No tags have ever been committed, so we can avoid a # potentially expensive search. return (repoheads, cachefnode, None, True) + starttime = time.time() + newheads = [head for head in repoheads if head not in set(cacheheads)] # Now we have to lookup the .hgtags filenode for every new head. # This is the most expensive part of finding tags, so performance # depends primarily on the size of newheads. Worst case: no cache # file, so newheads == repoheads. @@ -246,27 +249,35 @@ def _readtagcache(ui, repo): cctx = repo[head] try: fnode = cctx.filenode('.hgtags') cachefnode[head] = fnode except error.LookupError: # no .hgtags file on this head pass + duration = time.time() - starttime + ui.log('cache', + 'Resolved %d tags cache entries from %d manifests in %0.2f seconds\n', + len(cachefnode), len(newheads), duration) + # Caller has to iterate over all heads, but can use the filenodes in # cachefnode to get to each .hgtags revision quickly. return (repoheads, cachefnode, None, True) def _writetagcache(ui, repo, heads, tagfnode, cachetags): try: cachefile = repo.opener('cache/tags', 'w', atomictemp=True) except (OSError, IOError): return + ui.log('cache', 'Writing tags cache file with %d heads and %d tags\n', + len(heads), len(cachetags)) + realheads = repo.heads() # for sanity checks below for head in heads: # temporary sanity checks; these can probably be removed # once this code has been in crew for a few weeks assert head in repo.changelog.nodemap, \ 'trying to write non-existent node %s to tag cache' % short(head) assert head in realheads, \ 'trying to write non-head %s to tag cache' % short(head)