Commit 4d96cae6 authored by Qijiang Fan's avatar Qijiang Fan

Upstream version 1.3

parent 6e8f1b65
repo: f2636cfed11500fdc47d1e3822d8e4a2bd636bf7
node: 0cbf9fd89672e73165e1bb4db1ec8f7f65b95c94
branch: default
tag: 1.3
......@@ -12,3 +12,8 @@ MANIFEST
dist
*.egg-info
hgsubversion/__version__.py
nbproject
.project
.pydevproject
.settings
*.orig
......@@ -4,3 +4,4 @@
8e621dbb82d4363a85317638ad237e2817c56347 1.1.1
093ae2915b452539b44390ee4ea14987484e1eee 1.1.2
708234ad6c97fb52417e0b46a86c8373e25123a5 1.2
4bbc6bf947f56a92e95a04a27b94a9f72d5482d7 1.2.1
......@@ -38,14 +38,14 @@ try:
# force demandimport to load templatekw
templatekw.keywords
except ImportError:
templatekw = None
templatekw = None
try:
from mercurial import revset
# force demandimport to load revset
revset.methods
except ImportError:
revset = None
revset = None
try:
from mercurial import subrepo
......@@ -99,14 +99,21 @@ wrapcmds = { # cmd: generic, target, fixdoc, ppopts, opts
]),
}
# only need the discovery variant of this code when we drop hg < 1.6
try:
from mercurial import discovery
def findcommonoutgoing(orig, *args, **opts):
capable = getattr(args[1], 'capable', lambda x: False)
if capable('subversion'):
return wrappers.findcommonoutgoing(*args, **opts)
else:
return orig(*args, **opts)
extensions.wrapfunction(discovery, 'findcommonoutgoing', findcommonoutgoing)
except AttributeError:
# only need the discovery variant of this code when we drop hg < 1.6
def findoutgoing(orig, *args, **opts):
capable = getattr(args[1], 'capable', lambda x: False)
if capable('subversion'):
return wrappers.outgoing(*args, **opts)
return wrappers.findoutgoing(*args, **opts)
else:
return orig(*args, **opts)
extensions.wrapfunction(discovery, 'findoutgoing', findoutgoing)
......@@ -169,7 +176,7 @@ def extsetup():
def reposetup(ui, repo):
if repo.local():
svnrepo.generate_repo_class(ui, repo)
svnrepo.generate_repo_class(ui, repo)
_old_local = hg.schemes['file']
def _lookup(url):
......
......@@ -8,6 +8,7 @@ from mercurial import node
import svnwrap
import util
import svnexternals
class RevisionData(object):
......@@ -118,7 +119,7 @@ class HgEditor(svnwrap.Editor):
# assuming it is a directory
self.current.externals[path] = None
map(self.current.delete, [pat for pat in self.current.files.iterkeys()
if pat.startswith(path+'/')])
if pat.startswith(path + '/')])
for f in ctx.walk(util.PrefixMatch(br_path2)):
f_p = '%s/%s' % (path, f[len(br_path2):])
if f_p not in self.current.files:
......@@ -231,11 +232,11 @@ class HgEditor(svnwrap.Editor):
if tag:
changeid = self.meta.tags[tag]
source_rev, source_branch = self.meta.get_source_rev(changeid)[:2]
cp_f = ''
frompath = ''
else:
source_rev = copyfrom_revision
cp_f, source_branch = self.meta.split_branch_path(copyfrom_path)[:2]
if cp_f == '' and br_path == '':
frompath, source_branch = self.meta.split_branch_path(copyfrom_path)[:2]
if frompath == '' and br_path == '':
assert br_path is not None
tmp = source_branch, source_rev, self.current.rev.revnum
self.meta.branches[branch] = tmp
......@@ -243,23 +244,22 @@ class HgEditor(svnwrap.Editor):
if new_hash == node.nullid:
self.current.missing.add('%s/' % path)
return path
cp_f_ctx = self.repo.changectx(new_hash)
if cp_f != '/' and cp_f != '':
cp_f = '%s/' % cp_f
fromctx = self.repo.changectx(new_hash)
if frompath != '/' and frompath != '':
frompath = '%s/' % frompath
else:
cp_f = ''
frompath = ''
copies = {}
for f in cp_f_ctx:
if not f.startswith(cp_f):
for f in fromctx:
if not f.startswith(frompath):
continue
f2 = f[len(cp_f):]
fctx = cp_f_ctx.filectx(f)
fp_c = path + '/' + f2
self.current.set(fp_c, fctx.data(), 'x' in fctx.flags(), 'l' in fctx.flags())
if fp_c in self.current.deleted:
del self.current.deleted[fp_c]
fctx = fromctx.filectx(f)
dest = path + '/' + f[len(frompath):]
self.current.set(dest, fctx.data(), 'x' in fctx.flags(), 'l' in fctx.flags())
if dest in self.current.deleted:
del self.current.deleted[dest]
if branch == source_branch:
copies[fp_c] = f
copies[dest] = f
if copies:
# Preserve the directory copy records if no file was changed between
# the source and destination revisions, or discard it completely.
......@@ -267,8 +267,15 @@ class HgEditor(svnwrap.Editor):
if parentid != revlog.nullid:
parentctx = self.repo.changectx(parentid)
for k, v in copies.iteritems():
if util.issamefile(parentctx, cp_f_ctx, v):
if util.issamefile(parentctx, fromctx, v):
self.current.copies[k] = v
# Copy the externals definitions of copied directories
fromext = svnexternals.parse(self.ui, fromctx)
for p, v in fromext.iteritems():
pp = p and (p + '/') or ''
if pp.startswith(frompath):
dest = (path + '/' + pp[len(frompath):]).rstrip('/')
self.current.externals[dest] = v
return path
@svnwrap.ieditor
......
......@@ -5,6 +5,7 @@ from mercurial import util as hgutil
from mercurial import node
import svncommands
import util
class AuthorMap(dict):
'''A mapping from Subversion-style authors to Mercurial-style
......@@ -34,6 +35,8 @@ class AuthorMap(dict):
def load(self, path):
''' Load mappings from a file at the specified path. '''
path = os.path.expandvars(path)
if not os.path.exists(path):
return
......@@ -43,12 +46,9 @@ class AuthorMap(dict):
self.ui.note('reading authormap from %s\n' % path)
f = open(path, 'r')
for number, line in enumerate(f):
if writing:
writing.write(line)
for number, line_org in enumerate(f):
line = line.split('#')[0]
line = line_org.split('#')[0]
if not line.strip():
continue
......@@ -61,10 +61,15 @@ class AuthorMap(dict):
src = src.strip()
dst = dst.strip()
self.ui.debug('adding author %s to author map\n' % src)
if src in self and dst != self[src]:
msg = 'overriding author: "%s" to "%s" (%s)\n'
self.ui.status(msg % (self[src], dst, src))
if writing:
if not src in self:
self.ui.debug('adding author %s to author map\n' % src)
elif dst != self[src]:
msg = 'overriding author: "%s" to "%s" (%s)\n'
self.ui.status(msg % (self[src], dst, src))
writing.write(line_org)
self[src] = dst
f.close()
......@@ -132,14 +137,14 @@ class Tags(dict):
print 'tagmap too new -- please upgrade'
raise NotImplementedError
for l in f:
hash, revision, tag = l.split(' ', 2)
ha, revision, tag = l.split(' ', 2)
revision = int(revision)
tag = tag[:-1]
if self.endrev is not None and revision > self.endrev:
break
if not tag:
continue
dict.__setitem__(self, tag, node.bin(hash))
dict.__setitem__(self, tag, node.bin(ha))
f.close()
def _write(self):
......@@ -164,11 +169,11 @@ class Tags(dict):
def __setitem__(self, tag, info):
if not tag:
raise hgutil.Abort('tag cannot be empty')
hash, revision = info
ha, revision = info
f = open(self.path, 'a')
f.write('%s %s %s\n' % (node.hex(hash), revision, tag))
f.write('%s %s %s\n' % (node.hex(ha), revision, tag))
f.close()
dict.__setitem__(self, tag, hash)
dict.__setitem__(self, tag, ha)
class RevMap(dict):
......@@ -178,13 +183,28 @@ class RevMap(dict):
def __init__(self, repo):
dict.__init__(self)
self.path = os.path.join(repo.path, 'svn', 'rev_map')
self.youngest = 0
self.ypath = os.path.join(repo.path, 'svn', 'lastpulled')
# TODO(durin42): Consider moving management of the youngest
# file to svnmeta itself rather than leaving it here.
# must load youngest file first, or else self._load() can
# clobber the info
_yonngest_str = util.load_string(self.ypath, '0')
self._youngest = int(_yonngest_str.strip())
self.oldest = 0
if os.path.isfile(self.path):
self._load()
else:
self._write()
def _set_youngest(self, rev):
self._youngest = max(self._youngest, rev)
util.save_string(self.ypath, str(self._youngest) + '\n')
def _get_youngest(self):
return self._youngest
youngest = property(_get_youngest, _set_youngest)
def hashes(self):
return dict((v, k) for (k, v) in self.iteritems())
......@@ -199,7 +219,7 @@ class RevMap(dict):
print 'revmap too new -- please upgrade'
raise NotImplementedError
for l in f:
revnum, hash, branch = l.split(' ', 2)
revnum, ha, branch = l.split(' ', 2)
if branch == '\n':
branch = None
else:
......@@ -209,7 +229,7 @@ class RevMap(dict):
self.youngest = revnum
if revnum < self.oldest or not self.oldest:
self.oldest = revnum
dict.__setitem__(self, (revnum, branch), node.bin(hash))
dict.__setitem__(self, (revnum, branch), node.bin(ha))
f.close()
def _write(self):
......@@ -217,17 +237,17 @@ class RevMap(dict):
f.write('%s\n' % self.VERSION)
f.close()
def __setitem__(self, key, hash):
def __setitem__(self, key, ha):
revnum, branch = key
f = open(self.path, 'a')
b = branch or ''
f.write(str(revnum) + ' ' + node.hex(hash) + ' ' + b + '\n')
f.write(str(revnum) + ' ' + node.hex(ha) + ' ' + b + '\n')
f.close()
if revnum > self.youngest or not self.youngest:
self.youngest = revnum
if revnum < self.oldest or not self.oldest:
self.oldest = revnum
dict.__setitem__(self, (revnum, branch), hash)
dict.__setitem__(self, (revnum, branch), ha)
class FileMap(object):
......@@ -247,12 +267,12 @@ class FileMap(object):
yield name[:e], name[e+1:]
e = name.rfind('/', 0, e)
def check(self, map, path):
map = getattr(self, map)
for pre, suf in self._rpairs(path):
if pre not in map:
def check(self, m, path):
m = getattr(self, m)
for pre, _suf in self._rpairs(path):
if pre not in m:
continue
return map[pre]
return m[pre]
return None
def __contains__(self, path):
......@@ -268,13 +288,17 @@ class FileMap(object):
return False
return True
def add(self, fn, map, path):
mapping = getattr(self, map)
# Needed so empty filemaps are false
def __len__(self):
return len(self.include) + len(self.exclude)
def add(self, fn, m, path):
mapping = getattr(self, m)
if path in mapping:
msg = 'duplicate %s entry in %s: "%s"\n'
self.ui.status(msg % (map, fn, path))
self.ui.status(msg % (m, fn, path))
return
bits = map.strip('e'), path
bits = m.strip('e'), path
self.ui.debug('%sing %s\n' % bits)
mapping[path] = path
......@@ -365,8 +389,8 @@ class TagMap(dict):
oldname = newname
other =
The oldname tag from SVN will be represented as newname in the hg tags;
the other tag will not be reflected in the hg repository.
The oldname tag from SVN will be represented as newname in the hg tags;
the other tag will not be reflected in the hg repository.
'''
def __init__(self, ui, path):
......
......@@ -122,6 +122,9 @@ def commit(ui, repo, rev_ctx, meta, base_revision, svn):
props.setdefault(file, {})['svn:executable'] = '*'
if 'l' in fctx.flags():
props.setdefault(file, {})['svn:special'] = '*'
isbinary = hgutil.binary(new_data)
if isbinary:
props.setdefault(file, {})['svn:mime-type'] = 'application/octet-stream'
if file not in parent:
renamed = fctx.renamed()
......@@ -141,6 +144,8 @@ def commit(ui, repo, rev_ctx, meta, base_revision, svn):
if ('l' in parent.filectx(file).flags()
and 'l' not in rev_ctx.filectx(file).flags()):
props.setdefault(file, {})['svn:special'] = None
if hgutil.binary(base_data) and not isbinary:
props.setdefault(file, {})['svn:mime-type'] = None
action = 'modify'
else:
pos = file.rfind('/')
......@@ -178,11 +183,7 @@ def commit(ui, repo, rev_ctx, meta, base_revision, svn):
if tf in file_data and tf != ntf:
file_data[ntf] = file_data[tf]
if tf in props:
props[ntf] = props[tf]
del props[tf]
if hgutil.binary(file_data[ntf][1]):
props.setdefault(ntf, {}).update(props.get(ntf, {}))
props.setdefault(ntf, {})['svn:mime-type'] = 'application/octet-stream'
props[ntf] = props.pop(tf)
del file_data[tf]
addeddirs = [svnpath(d) for d in addeddirs]
......
......@@ -174,7 +174,7 @@ def convert_rev(ui, meta, svn, r, tbdelta, firstrun):
date,
extra)
new_hash = meta.repo.commitctx(current_ctx)
new_hash = meta.repo.svn_commitctx(current_ctx)
util.describe_commit(ui, new_hash, branch)
if (rev.revnum, branch) not in meta.revmap and not tag:
meta.revmap[rev.revnum, branch] = new_hash
......@@ -209,7 +209,7 @@ def convert_rev(ui, meta, svn, r, tbdelta, firstrun):
meta.authors[rev.author],
date,
extra)
new_hash = meta.repo.commitctx(current_ctx)
new_hash = meta.repo.svn_commitctx(current_ctx)
util.describe_commit(ui, new_hash, branch)
if (rev.revnum, branch) not in meta.revmap:
meta.revmap[rev.revnum, branch] = new_hash
......
......@@ -2,10 +2,11 @@ import cStringIO
import errno
import re
from mercurial import patch
from mercurial import node
from mercurial import context
from mercurial import node
from mercurial import patch
from mercurial import revlog
from mercurial import util as hgutil
import svnwrap
import svnexternals
......@@ -49,7 +50,6 @@ def print_your_svn_is_old_message(ui): #pragma: no cover
ui.status("In light of that, I'll fall back and do diffs, but it won't do "
"as good a job. You should really upgrade your server.\n")
def mempatchproxy(parentctx, files):
# Avoid circular references patch.patchfile -> mempatch
patchfile = patch.patchfile
......@@ -79,17 +79,32 @@ def mempatchproxy(parentctx, files):
def filteriterhunks(meta):
iterhunks = patch.iterhunks
def filterhunks(ui, fp, sourcefile=None, textmode=False):
def filterhunks(*args, **kwargs):
# ui, fp, sourcefile=None, textmode=False
applycurrent = False
# Passing False instead of textmode because we should never
# be ignoring EOL type.
if not iterhunks.func_defaults:
# Since 1.7 (cfedc529e4a1)
gen = iterhunks(ui, fp)
elif len(iterhunks.func_defaults) == 1:
gen = iterhunks(ui, fp, sourcefile)
if iterhunks.func_code.co_argcount == 1:
# Since 1.9 (28762bb767dc)
fp = args[0]
gen = iterhunks(fp)
else:
gen = iterhunks(ui, fp, sourcefile, textmode)
ui, fp = args[:2]
if len(args) > 2:
sourcefile = args[2]
else:
sourcefile = kwargs.get('sourcefile', None)
if len(args) > 3:
textmode = args[3]
else:
textmode = kwargs.get('textmode', False)
if not iterhunks.func_defaults:
# Since 1.7 (cfedc529e4a1)
gen = iterhunks(ui, fp)
elif len(iterhunks.func_defaults) == 1:
gen = iterhunks(ui, fp, sourcefile)
else:
gen = iterhunks(ui, fp, sourcefile, textmode)
for data in gen:
if data[0] == 'file':
if data[1][1] in meta.filemap:
......@@ -101,6 +116,71 @@ def filteriterhunks(meta):
yield data
return filterhunks
def patchrepoold(ui, meta, parentctx, patchfp):
files = {}
try:
oldpatchfile = patch.patchfile
olditerhunks = patch.iterhunks
patch.patchfile = mempatchproxy(parentctx, files)
patch.iterhunks = filteriterhunks(meta)
try:
# We can safely ignore the changed list since we are
# handling non-git patches. Touched files are known
# by our memory patcher.
patch_st = patch.applydiff(ui, patchfp, {}, strip=0)
finally:
patch.patchfile = oldpatchfile
patch.iterhunks = olditerhunks
except patch.PatchError:
# TODO: this happens if the svn server has the wrong mime
# type stored and doesn't know a file is binary. It would
# be better to do one file at a time and only do a
# full fetch on files that had problems.
raise BadPatchApply('patching failed')
# if this patch didn't apply right, fall back to exporting the
# entire rev.
if patch_st == -1:
assert False, ('This should only happen on case-insensitive'
' volumes.')
elif patch_st == 1:
# When converting Django, I saw fuzz on .po files that was
# causing revisions to end up failing verification. If that
# can be fixed, maybe this won't ever be reached.
raise BadPatchApply('patching succeeded with fuzz')
return files
try:
class svnbackend(patch.repobackend):
def getfile(self, fname):
data, (islink, isexec) = super(svnbackend, self).getfile(fname)
if islink:
data = 'link ' + data
return data, (islink, isexec)
except AttributeError:
svnbackend = None
def patchrepo(ui, meta, parentctx, patchfp):
if not svnbackend:
return patchrepoold(ui, meta, parentctx, patchfp)
store = patch.filestore()
try:
touched = set()
backend = svnbackend(ui, meta.repo, parentctx, store)
ret = patch.patchbackend(ui, backend, patchfp, 0, touched)
if ret < 0:
raise BadPatchApply('patching failed')
if ret > 0:
raise BadPatchApply('patching succeeded with fuzz')
files = {}
for f in touched:
try:
data, mode, copied = store.getfile(f)
files[f] = data
except IOError:
files[f] = None
return files
finally:
store.close()
def diff_branchrev(ui, svn, meta, branch, branchpath, r, parentctx):
"""Extract all 'branch' content at a given revision.
......@@ -146,38 +226,9 @@ def diff_branchrev(ui, svn, meta, branch, branchpath, r, parentctx):
# are marked as touched. Content is loaded on demand.
touched_files.update(any_file_re.findall(d))
if d2.strip() and len(re.findall('\n[-+]', d2.strip())) > 0:
try:
oldpatchfile = patch.patchfile
olditerhunks = patch.iterhunks
patch.patchfile = mempatchproxy(parentctx, files_data)
patch.iterhunks = filteriterhunks(meta)
try:
# We can safely ignore the changed list since we are
# handling non-git patches. Touched files are known
# by our memory patcher.
patch_st = patch.applydiff(ui, cStringIO.StringIO(d2),
{}, strip=0)
finally:
patch.patchfile = oldpatchfile
patch.iterhunks = olditerhunks
except patch.PatchError:
# TODO: this happens if the svn server has the wrong mime
# type stored and doesn't know a file is binary. It would
# be better to do one file at a time and only do a
# full fetch on files that had problems.
raise BadPatchApply('patching failed')
files_data = patchrepo(ui, meta, parentctx, cStringIO.StringIO(d2))
for x in files_data.iterkeys():
ui.note('M %s\n' % x)
# if this patch didn't apply right, fall back to exporting the
# entire rev.
if patch_st == -1:
assert False, ('This should only happen on case-insensitive'
' volumes.')
elif patch_st == 1:
# When converting Django, I saw fuzz on .po files that was
# causing revisions to end up failing verification. If that
# can be fixed, maybe this won't ever be reached.
raise BadPatchApply('patching succeeded with fuzz')
else:
ui.status('Not using patch for %s, diff had no hunks.\n' %
r.revnum)
......@@ -357,7 +408,7 @@ def fetch_externals(ui, svn, branchpath, r, parentctx):
# revision in the common case.
dirs = set(externals)
if parentctx.node() == revlog.nullid:
dirs.update([p for p,k in svn.list_files(branchpath, r.revnum) if k == 'd'])
dirs.update([p for p, k in svn.list_files(branchpath, r.revnum) if k == 'd'])
dirs.add('')
else:
branchprefix = (branchpath and branchpath + '/') or branchpath
......@@ -509,7 +560,7 @@ def branches_in_paths(meta, tbdelta, paths, revnum, checkpath, listdir):
# we need to detect those branches. It's a little thorny and slow, but
# seems to be the best option.
elif paths[p].copyfrom_path and not p.startswith('tags/'):
paths_need_discovery.extend(['%s/%s' % (p,x[0])
paths_need_discovery.extend(['%s/%s' % (p, x[0])
for x in listdir(p, revnum)
if x[1] == 'f'])
......@@ -539,6 +590,9 @@ def branches_in_paths(meta, tbdelta, paths, revnum, checkpath, listdir):
def convert_rev(ui, meta, svn, r, tbdelta, firstrun):
# this server fails at replay
if meta.filemap:
raise hgutil.Abort('filemaps currently unsupported with stupid replay.')
branches = branches_in_paths(meta, tbdelta, r.paths, r.revnum,
svn.checkpath, svn.list_files)
brpaths = branches.values()
......@@ -659,7 +713,7 @@ def convert_rev(ui, meta, svn, r, tbdelta, firstrun):
meta.authors[r.author],
date,
extra)
ha = meta.repo.commitctx(current_ctx)
ha = meta.repo.svn_commitctx(current_ctx)
if not tag:
if (not origbranch in meta.branches
......
......@@ -55,7 +55,7 @@ def verify(ui, repo, args=None, **opts):
svnfiles.add(fn)
fp = fn
if branchpath:
fp = branchpath + '/' + fn
fp = branchpath + '/' + fn
data, mode = svn.get_file(posixpath.normpath(fp), srev)
fctx = ctx[fn]
dmatch = fctx.data() == data
......@@ -95,6 +95,7 @@ def rebuildmeta(ui, repo, args, **opts):
if not os.path.exists(svnmetadir):
os.makedirs(svnmetadir)
lastpulled = open(os.path.join(svnmetadir, 'lastpulled'), 'wb')
revmap = open(os.path.join(svnmetadir, 'rev_map'), 'w')
revmap.write('1\n')
last_rev = -1
......@@ -120,13 +121,18 @@ def rebuildmeta(ui, repo, args, **opts):
# it would make us use O(revisions^2) time, so we perform an extra traversal
# of the repository instead. During this traversal, we find all converted
# changesets that close a branch, and store their first parent
youngest = 0
for rev in repo:
util.progress(ui, 'prepare', rev, total=numrevs)
ctx = repo[rev]
extra = ctx.extra()
convinfo = extra.get('convert_revision', None)
if not convinfo:
continue
svnrevnum = int(convinfo.rsplit('@', 1)[1])
youngest = max(youngest, svnrevnum)
if not convinfo or not extra.get('close', None):
if extra.get('close', None) is None:
continue
droprev = lambda x: x.rsplit('@', 1)[0]
......@@ -136,6 +142,7 @@ def rebuildmeta(ui, repo, args, **opts):
if droprev(parentinfo) == droprev(convinfo):
closed.add(parentctx.rev())
lastpulled.write(str(youngest) + '\n')
util.progress(ui, 'prepare', None, total=numrevs)
for rev in repo:
......@@ -238,7 +245,8 @@ def rebuildmeta(ui, repo, args, **opts):
if parentpath.startswith('tags/') and parentextra.get('close'):
continue
elif parentpath.startswith('branches/'): branch = parentpath[len('branches/'):]
elif parentpath.startswith('branches/'):
branch = parentpath[len('branches/'):]
elif parentpath == 'trunk':
branch = None
else:
......
......@@ -12,6 +12,14 @@ try:
except (ImportError, AttributeError), e:
subrepo = None
passpegrev = True # see svnsubrepo below
try:
canonpath = hgutil.canonpath
except (ImportError, AttributeError):
from mercurial import scmutil
canonpath = scmutil.canonpath
passpegrev = False
import util
class externalsfile(dict):
......@@ -51,7 +59,6 @@ class externalsfile(dict):
def read(self, data):
self.clear()
fp = cStringIO.StringIO(data)
dirs = {}
target = None
for line in fp.readlines():
if not line.strip():
......@@ -112,7 +119,7 @@ def parsedefinition(line):
revgroup = 2
path, rev, source = m.group(1, 2, 3)
try:
nrev = int(rev)
int(rev) # ensure revision is int()able, so we bail otherwise
norevline = line[:m.start(revgroup)] + '{REV}' + line[m.end(revgroup):]
except (TypeError, ValueError):
norevline = line
......@@ -139,6 +146,9 @@ def parsedefinitions(ui, repo, svnroot, exts):