mirror of
https://github.com/mnauw/git-remote-hg.git
synced 2025-11-02 09:35:47 +01:00
1714 lines
51 KiB
Python
Executable File
1714 lines
51 KiB
Python
Executable File
#!/usr/bin/env python2
|
|
#
|
|
# Copyright (c) 2012 Felipe Contreras
|
|
#
|
|
|
|
# Inspired by Rocco Rutte's hg-fast-export
|
|
|
|
# Just copy to your ~/bin, or anywhere in your $PATH.
|
|
# Then you can clone with:
|
|
# git clone hg::/path/to/mercurial/repo/
|
|
#
|
|
# For remote repositories a local clone is stored in
|
|
# "$GIT_DIR/hg/origin/clone/.hg/".
|
|
|
|
from mercurial import hg, ui, bookmarks, context, encoding
|
|
from mercurial import node, error, extensions, discovery, util
|
|
from mercurial import changegroup
|
|
|
|
import re
|
|
import sys
|
|
import os
|
|
import json
|
|
import shutil
|
|
import subprocess
|
|
import urllib
|
|
import atexit
|
|
import urlparse
|
|
import hashlib
|
|
import time as ptime
|
|
|
|
#
|
|
# If you want to see Mercurial revisions as Git commit notes:
|
|
# git config core.notesRef refs/notes/hg
|
|
#
|
|
# If you are not in hg-git-compat mode and want to disable the tracking of
|
|
# named branches:
|
|
# git config --global remote-hg.track-branches false
|
|
#
|
|
# If you want the equivalent of hg's clone/pull--insecure option:
|
|
# git config --global remote-hg.insecure true
|
|
#
|
|
# If you want to switch to hg-git compatibility mode:
|
|
# git config --global remote-hg.hg-git-compat true
|
|
#
|
|
# git:
|
|
# Sensible defaults for git.
|
|
# hg bookmarks are exported as git branches, hg branches are prefixed
|
|
# with 'branches/', HEAD is a special case.
|
|
#
|
|
# hg:
|
|
# Emulate hg-git.
|
|
# Only hg bookmarks are exported as git branches.
|
|
# Commits are modified to preserve hg information and allow bidirectionality.
|
|
#
|
|
|
|
NAME_RE = re.compile('^([^<>]+)')
|
|
AUTHOR_RE = re.compile('^([^<>]+?)? ?[<>]([^<>]*)(?:$|>)')
|
|
EMAIL_RE = re.compile(r'([^ \t<>]+@[^ \t<>]+)')
|
|
AUTHOR_HG_RE = re.compile('^(.*?) ?<(.*?)(?:>(.*))?$')
|
|
RAW_AUTHOR_RE = re.compile('^(\w+) (?:(.+)? )?<(.*)> (\d+) ([+-]\d+)')
|
|
|
|
VERSION = 2
|
|
|
|
def die(msg, *args):
|
|
sys.stderr.write('ERROR: %s\n' % (msg % args))
|
|
sys.exit(1)
|
|
|
|
def warn(msg, *args):
|
|
sys.stderr.write('WARNING: %s\n' % (msg % args))
|
|
|
|
def gitmode(flags):
|
|
return 'l' in flags and '120000' or 'x' in flags and '100755' or '100644'
|
|
|
|
def gittz(tz):
|
|
return '%+03d%02d' % (-tz / 3600, -tz % 3600 / 60)
|
|
|
|
def hgmode(mode):
|
|
m = { '100755': 'x', '120000': 'l' }
|
|
return m.get(mode, '')
|
|
|
|
def hghex(n):
|
|
return node.hex(n)
|
|
|
|
def hgbin(n):
|
|
return node.bin(n)
|
|
|
|
def hgref(ref):
|
|
return ref.replace('___', ' ')
|
|
|
|
def gitref(ref):
|
|
return ref.replace(' ', '___')
|
|
|
|
def check_version(*check):
|
|
if not hg_version:
|
|
return True
|
|
return hg_version >= check
|
|
|
|
def get_config(config):
|
|
cmd = ['git', 'config', '--get', config]
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
|
output, _ = process.communicate()
|
|
return output
|
|
|
|
def get_config_bool(config, default=False):
|
|
value = get_config(config).rstrip('\n')
|
|
if value == "true":
|
|
return True
|
|
elif value == "false":
|
|
return False
|
|
else:
|
|
return default
|
|
|
|
def rev_parse(rev):
|
|
cmd = ['git', 'rev-parse', '--verify', '-q', rev]
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
|
output, _ = process.communicate()
|
|
return output
|
|
|
|
def get_rev_hg(commit):
|
|
cmd = ['git', 'notes', '--ref', 'refs/notes/hg', 'show', commit]
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
hgrev, _ = process.communicate()
|
|
return hgrev
|
|
|
|
class Marks:
|
|
|
|
def __init__(self, path, repo):
|
|
self.path = path
|
|
self.repo = repo
|
|
self.clear()
|
|
self.load()
|
|
|
|
if self.version < VERSION:
|
|
if self.version == 1:
|
|
self.upgrade_one()
|
|
|
|
# upgraded?
|
|
if self.version < VERSION:
|
|
self.clear()
|
|
self.version = VERSION
|
|
|
|
def clear(self):
|
|
self.tips = {}
|
|
self.marks = {}
|
|
self.rev_marks = {}
|
|
self.last_mark = 0
|
|
self.version = 0
|
|
self.last_note = 0
|
|
|
|
def load(self):
|
|
if not os.path.exists(self.path):
|
|
return
|
|
|
|
tmp = json.load(open(self.path))
|
|
|
|
self.tips = tmp['tips']
|
|
self.marks = tmp['marks']
|
|
self.last_mark = tmp['last-mark']
|
|
self.version = tmp.get('version', 1)
|
|
self.last_note = tmp.get('last-note', 0)
|
|
|
|
for rev, mark in self.marks.iteritems():
|
|
self.rev_marks[mark] = rev
|
|
|
|
def upgrade_one(self):
|
|
def get_id(rev):
|
|
return hghex(self.repo.changelog.node(int(rev)))
|
|
self.tips = dict((name, get_id(rev)) for name, rev in self.tips.iteritems())
|
|
self.marks = dict((get_id(rev), mark) for rev, mark in self.marks.iteritems())
|
|
self.rev_marks = dict((mark, get_id(rev)) for mark, rev in self.rev_marks.iteritems())
|
|
self.version = 2
|
|
|
|
def dict(self):
|
|
return { 'tips': self.tips, 'marks': self.marks,
|
|
'last-mark': self.last_mark, 'version': self.version,
|
|
'last-note': self.last_note }
|
|
|
|
def store(self):
|
|
json.dump(self.dict(), open(self.path, 'w'))
|
|
|
|
def __str__(self):
|
|
return str(self.dict())
|
|
|
|
def from_rev(self, rev):
|
|
return self.marks[rev]
|
|
|
|
def to_rev(self, mark):
|
|
return str(self.rev_marks[mark])
|
|
|
|
def next_mark(self):
|
|
self.last_mark += 1
|
|
return self.last_mark
|
|
|
|
def get_mark(self, rev):
|
|
self.last_mark += 1
|
|
self.marks[rev] = self.last_mark
|
|
return self.last_mark
|
|
|
|
def new_mark(self, rev, mark):
|
|
self.marks[rev] = mark
|
|
self.rev_marks[mark] = rev
|
|
self.last_mark = mark
|
|
|
|
def is_marked(self, rev):
|
|
return rev in self.marks
|
|
|
|
def get_tip(self, branch):
|
|
return str(self.tips[branch])
|
|
|
|
def set_tip(self, branch, tip):
|
|
self.tips[branch] = tip
|
|
|
|
class ParserContext:
|
|
|
|
def __init__(self):
|
|
# known context attributes
|
|
self.localref = None
|
|
self.remoteref = None
|
|
self.gitmarks = None
|
|
self.hghelper = None
|
|
self.revs = []
|
|
|
|
class Parser:
|
|
|
|
def __init__(self, repo, cmdstream=sys.stdin, ctx=ParserContext()):
|
|
self.repo = repo
|
|
self.cmdstream = cmdstream
|
|
self.line = self.get_line()
|
|
self.context = ctx
|
|
|
|
def get_line(self):
|
|
return self.cmdstream.readline().strip()
|
|
|
|
def __getitem__(self, i):
|
|
return self.line.split()[i]
|
|
|
|
def check(self, word):
|
|
return self.line.startswith(word)
|
|
|
|
def each_block(self, separator):
|
|
while self.line != separator:
|
|
yield self.line
|
|
self.line = self.get_line()
|
|
|
|
def __iter__(self):
|
|
return self.each_block('')
|
|
|
|
def next(self):
|
|
self.line = self.get_line()
|
|
if self.line == 'done':
|
|
self.line = None
|
|
|
|
def get_mark(self):
|
|
i = self.line.index(':') + 1
|
|
return int(self.line[i:])
|
|
|
|
def get_data(self):
|
|
if not self.check('data'):
|
|
return None
|
|
i = self.line.index(' ') + 1
|
|
size = int(self.line[i:])
|
|
return self.cmdstream.read(size)
|
|
|
|
def get_author(self):
|
|
ex = None
|
|
m = RAW_AUTHOR_RE.match(self.line)
|
|
if not m:
|
|
return None
|
|
_, name, email, date, tz = m.groups()
|
|
if name and 'ext:' in name:
|
|
m = re.match('^(.+?) ext:\((.+)\)$', name)
|
|
if m:
|
|
name = m.group(1)
|
|
ex = urllib.unquote(m.group(2))
|
|
|
|
if email != bad_mail:
|
|
if name:
|
|
user = '%s <%s>' % (name, email)
|
|
else:
|
|
user = '<%s>' % (email)
|
|
else:
|
|
user = name
|
|
|
|
if ex:
|
|
user += ex
|
|
|
|
tz = int(tz)
|
|
tz = ((tz / 100) * 3600) + ((tz % 100) * 60)
|
|
return (user, int(date), -tz)
|
|
|
|
def fix_file_path(path):
|
|
path = os.path.normpath(path)
|
|
if not os.path.isabs(path):
|
|
return path
|
|
return os.path.relpath(path, '/')
|
|
|
|
def export_files(files):
|
|
final = []
|
|
for f in files:
|
|
fid = node.hex(f.filenode())
|
|
|
|
if fid in filenodes:
|
|
mark = filenodes[fid]
|
|
else:
|
|
mark = marks.next_mark()
|
|
filenodes[fid] = mark
|
|
d = f.data()
|
|
|
|
print "blob"
|
|
print "mark :%u" % mark
|
|
print "data %d" % len(d)
|
|
print d
|
|
|
|
path = fix_file_path(f.path())
|
|
final.append((gitmode(f.flags()), mark, path))
|
|
|
|
return final
|
|
|
|
def get_filechanges(repo, ctx, parent):
|
|
modified = set()
|
|
added = set()
|
|
removed = set()
|
|
|
|
# load earliest manifest first for caching reasons
|
|
prev = parent.manifest().copy()
|
|
cur = ctx.manifest()
|
|
|
|
for fn in cur:
|
|
if fn in prev:
|
|
if (cur.flags(fn) != prev.flags(fn) or cur[fn] != prev[fn]):
|
|
modified.add(fn)
|
|
del prev[fn]
|
|
else:
|
|
added.add(fn)
|
|
removed |= set(prev.keys())
|
|
|
|
return added | modified, removed
|
|
|
|
def fixup_user_git(user):
|
|
name = mail = None
|
|
user = user.replace('"', '')
|
|
m = AUTHOR_RE.match(user)
|
|
if m:
|
|
name = m.group(1)
|
|
mail = m.group(2).strip()
|
|
else:
|
|
m = EMAIL_RE.match(user)
|
|
if m:
|
|
mail = m.group(1)
|
|
else:
|
|
m = NAME_RE.match(user)
|
|
if m:
|
|
name = m.group(1).strip()
|
|
return (name, mail)
|
|
|
|
def fixup_user_hg(user):
|
|
def sanitize(name):
|
|
# stole this from hg-git
|
|
return re.sub('[<>\n]', '?', name.lstrip('< ').rstrip('> '))
|
|
|
|
m = AUTHOR_HG_RE.match(user)
|
|
if m:
|
|
name = sanitize(m.group(1))
|
|
mail = sanitize(m.group(2))
|
|
ex = m.group(3)
|
|
if ex:
|
|
name += ' ext:(' + urllib.quote(ex) + ')'
|
|
else:
|
|
name = sanitize(user)
|
|
if '@' in user:
|
|
mail = name
|
|
else:
|
|
mail = None
|
|
|
|
return (name, mail)
|
|
|
|
def fixup_user(user):
|
|
if mode == 'git':
|
|
name, mail = fixup_user_git(user)
|
|
else:
|
|
name, mail = fixup_user_hg(user)
|
|
|
|
if not name:
|
|
name = bad_name
|
|
if not mail:
|
|
mail = bad_mail
|
|
|
|
return '%s <%s>' % (name, mail)
|
|
|
|
def updatebookmarks(repo, peer):
|
|
remotemarks = peer.listkeys('bookmarks')
|
|
|
|
# delete bookmarks locally that disappeared on remote
|
|
localmarks = bookmarks.listbookmarks(repo)
|
|
remote = set(remotemarks.keys())
|
|
local = set(localmarks.keys())
|
|
for bmark in local - remote:
|
|
bookmarks.pushbookmark(repo, bmark, localmarks[bmark], '')
|
|
# also delete private ref
|
|
pbookmark = '%s/bookmarks/%s' % (prefix, bmark)
|
|
subprocess.call(['git', 'update-ref', '-d', pbookmark])
|
|
|
|
# now add or update remote bookmarks to local, if any
|
|
localmarks = repo._bookmarks
|
|
if not remotemarks:
|
|
return
|
|
|
|
for k, v in remotemarks.iteritems():
|
|
localmarks[k] = hgbin(v)
|
|
|
|
if check_version(3, 6):
|
|
lock = tr = None
|
|
try:
|
|
lock = repo.lock()
|
|
tr = repo.transaction('bookmark')
|
|
localmarks.recordchange(tr)
|
|
tr.close()
|
|
finally:
|
|
if tr is not None:
|
|
tr.release()
|
|
if lock is not None:
|
|
lock.release()
|
|
else:
|
|
if hasattr(localmarks, 'write'):
|
|
localmarks.write()
|
|
else:
|
|
bookmarks.write(repo)
|
|
|
|
def get_repo(url, alias):
|
|
global peer
|
|
|
|
myui = ui.ui()
|
|
myui.setconfig('ui', 'interactive', 'off')
|
|
myui.fout = sys.stderr
|
|
|
|
if get_config_bool('remote-hg.insecure'):
|
|
myui.setconfig('web', 'cacerts', '')
|
|
|
|
extensions.loadall(myui)
|
|
|
|
if hg.islocal(url) and not os.environ.get('GIT_REMOTE_HG_TEST_REMOTE'):
|
|
repo = hg.repository(myui, url)
|
|
if not os.path.exists(dirname):
|
|
os.makedirs(dirname)
|
|
else:
|
|
shared_path = os.path.join(gitdir, 'hg')
|
|
|
|
# check and upgrade old organization
|
|
hg_path = os.path.join(shared_path, '.hg')
|
|
if os.path.exists(shared_path) and not os.path.exists(hg_path):
|
|
repos = os.listdir(shared_path)
|
|
for x in repos:
|
|
local_hg = os.path.join(shared_path, x, 'clone', '.hg')
|
|
if not os.path.exists(local_hg):
|
|
continue
|
|
if not os.path.exists(hg_path):
|
|
shutil.move(local_hg, hg_path)
|
|
shutil.rmtree(os.path.join(shared_path, x, 'clone'))
|
|
|
|
# setup shared repo (if not there)
|
|
try:
|
|
hg.peer(myui, {}, shared_path, create=True)
|
|
except error.RepoError:
|
|
pass
|
|
|
|
if not os.path.exists(dirname):
|
|
os.makedirs(dirname)
|
|
|
|
local_path = os.path.join(dirname, 'clone')
|
|
if not os.path.exists(local_path):
|
|
hg.share(myui, shared_path, local_path, update=False)
|
|
else:
|
|
# make sure the shared path is always up-to-date
|
|
util.writefile(os.path.join(local_path, '.hg', 'sharedpath'), hg_path)
|
|
|
|
repo = hg.repository(myui, local_path)
|
|
try:
|
|
peer = hg.peer(repo.ui, {}, url)
|
|
except:
|
|
die('Repository error')
|
|
|
|
if check_version(3, 0):
|
|
from mercurial import exchange
|
|
exchange.pull(repo, peer, heads=None, force=True)
|
|
else:
|
|
repo.pull(peer, heads=None, force=True)
|
|
|
|
updatebookmarks(repo, peer)
|
|
|
|
return repo
|
|
|
|
def rev_to_mark(rev):
|
|
return marks.from_rev(rev.hex())
|
|
|
|
def mark_to_rev(mark):
|
|
return marks.to_rev(mark)
|
|
|
|
# Get a range of revisions in the form of a..b (git committish)
|
|
def gitrange(repo, a, b):
|
|
positive = []
|
|
pending = set([int(b)])
|
|
negative = int(a)
|
|
for cur in xrange(b, -1, -1):
|
|
if not pending:
|
|
break
|
|
|
|
parents = [p for p in repo.changelog.parentrevs(cur) if p >= 0]
|
|
|
|
if cur in pending:
|
|
if cur > negative:
|
|
positive.append(cur)
|
|
pending.remove(cur)
|
|
for p in parents:
|
|
if p > negative:
|
|
pending.add(p)
|
|
|
|
positive.reverse()
|
|
return positive
|
|
|
|
def export_ref(repo, name, kind, head):
|
|
ename = '%s/%s' % (kind, name)
|
|
try:
|
|
tip = marks.get_tip(ename)
|
|
tip = repo[tip]
|
|
except:
|
|
tip = repo[-1]
|
|
|
|
revs = gitrange(repo, tip, head)
|
|
|
|
total = len(revs)
|
|
tip = tip.rev()
|
|
|
|
for rev in revs:
|
|
|
|
c = repo[rev]
|
|
node = c.node()
|
|
|
|
if marks.is_marked(c.hex()):
|
|
continue
|
|
|
|
(manifest, user, (time, tz), files, desc, extra) = repo.changelog.read(node)
|
|
rev_branch = extra['branch']
|
|
|
|
author = "%s %d %s" % (fixup_user(user), time, gittz(tz))
|
|
if 'committer' in extra:
|
|
try:
|
|
cuser, ctime, ctz = extra['committer'].rsplit(' ', 2)
|
|
committer = "%s %s %s" % (fixup_user(cuser), ctime, gittz(int(ctz)))
|
|
except ValueError:
|
|
cuser = extra['committer']
|
|
committer = "%s %d %s" % (fixup_user(cuser), time, gittz(tz))
|
|
else:
|
|
committer = author
|
|
|
|
parents = [repo[p] for p in repo.changelog.parentrevs(rev) if p >= 0]
|
|
|
|
if len(parents) == 0:
|
|
modified = c.manifest().keys()
|
|
removed = []
|
|
else:
|
|
modified, removed = get_filechanges(repo, c, parents[0])
|
|
|
|
desc += '\n'
|
|
|
|
if mode == 'hg':
|
|
extra_msg = ''
|
|
|
|
if rev_branch != 'default':
|
|
extra_msg += 'branch : %s\n' % rev_branch
|
|
|
|
renames = []
|
|
for f in c.files():
|
|
if f not in c.manifest():
|
|
continue
|
|
rename = c.filectx(f).renamed()
|
|
if rename:
|
|
renames.append((rename[0], f))
|
|
|
|
for e in renames:
|
|
extra_msg += "rename : %s => %s\n" % e
|
|
|
|
for key, value in extra.iteritems():
|
|
if key in ('author', 'committer', 'encoding', 'message', 'branch', 'hg-git'):
|
|
continue
|
|
else:
|
|
extra_msg += "extra : %s : %s\n" % (key, urllib.quote(value))
|
|
|
|
if extra_msg:
|
|
desc += '\n--HG--\n' + extra_msg
|
|
|
|
if len(parents) == 0 and rev:
|
|
print 'reset %s/%s' % (prefix, ename)
|
|
|
|
modified_final = export_files(c.filectx(f) for f in modified)
|
|
|
|
print "commit %s/%s" % (prefix, ename)
|
|
print "mark :%d" % (marks.get_mark(c.hex()))
|
|
print "author %s" % (author)
|
|
print "committer %s" % (committer)
|
|
print "data %d" % (len(desc))
|
|
print desc
|
|
|
|
if len(parents) > 0:
|
|
print "from :%s" % (rev_to_mark(parents[0]))
|
|
if len(parents) > 1:
|
|
print "merge :%s" % (rev_to_mark(parents[1]))
|
|
|
|
for f in removed:
|
|
print "D %s" % (fix_file_path(f))
|
|
for f in modified_final:
|
|
print "M %s :%u %s" % f
|
|
print
|
|
|
|
progress = (rev - tip)
|
|
if (progress % 100 == 0):
|
|
print "progress revision %d '%s' (%d/%d)" % (rev, name, progress, total)
|
|
|
|
# make sure the ref is updated
|
|
print "reset %s/%s" % (prefix, ename)
|
|
print "from :%u" % rev_to_mark(head)
|
|
print
|
|
|
|
pending_revs = set(revs) - notes
|
|
if pending_revs:
|
|
note_mark = marks.next_mark()
|
|
ref = "refs/notes/hg"
|
|
|
|
print "commit %s" % ref
|
|
print "mark :%d" % (note_mark)
|
|
print "committer remote-hg <> %d %s" % (ptime.time(), gittz(ptime.timezone))
|
|
desc = "Notes for %s\n" % (name)
|
|
print "data %d" % (len(desc))
|
|
print desc
|
|
# continue incrementally on current notes branch (whenever possible)
|
|
# to avoid wiping out present content upon fetch of new repo
|
|
current_note = rev_parse(ref)
|
|
if current_note and not len(notes):
|
|
print 'from %s^0' % (ref)
|
|
# but track along with the previous ref as import goes along
|
|
elif marks.last_note:
|
|
print "from :%u" % (marks.last_note)
|
|
|
|
for rev in pending_revs:
|
|
notes.add(rev)
|
|
c = repo[rev]
|
|
print "N inline :%u" % rev_to_mark(c)
|
|
msg = c.hex()
|
|
print "data %d" % (len(msg))
|
|
print msg
|
|
print
|
|
|
|
marks.last_note = note_mark
|
|
|
|
marks.set_tip(ename, head.hex())
|
|
|
|
def export_tag(repo, tag):
|
|
export_ref(repo, tag, 'tags', repo[hgref(tag)])
|
|
|
|
def export_bookmark(repo, bmark):
|
|
head = bmarks[hgref(bmark)]
|
|
export_ref(repo, bmark, 'bookmarks', head)
|
|
|
|
def export_branch(repo, branch):
|
|
tip = get_branch_tip(repo, branch)
|
|
head = repo[tip]
|
|
export_ref(repo, branch, 'branches', head)
|
|
|
|
def export_head(repo):
|
|
export_ref(repo, g_head[0], 'bookmarks', g_head[1])
|
|
|
|
def do_capabilities(parser):
|
|
print "import"
|
|
if capability_push:
|
|
print "push"
|
|
else:
|
|
print "export"
|
|
print "refspec refs/heads/branches/*:%s/branches/*" % prefix
|
|
print "refspec refs/heads/*:%s/bookmarks/*" % prefix
|
|
print "refspec refs/tags/*:%s/tags/*" % prefix
|
|
|
|
path = os.path.join(dirname, 'marks-git')
|
|
|
|
if os.path.exists(path):
|
|
print "*import-marks %s" % path
|
|
print "*export-marks %s" % path
|
|
print "option"
|
|
# nothing really depends on the private refs being up to date
|
|
# (export is limited anyway by the current git marks)
|
|
# and they are not always updated correctly (dry-run, bookmark delete, ...)
|
|
# (might resolve some dry-run breakage also)
|
|
print "no-private-update"
|
|
|
|
print
|
|
|
|
def branch_tip(branch):
|
|
return branches[branch][-1]
|
|
|
|
def get_branch_tip(repo, branch):
|
|
heads = branches.get(hgref(branch), None)
|
|
if not heads:
|
|
return None
|
|
|
|
# verify there's only one head
|
|
if (len(heads) > 1):
|
|
warn("Branch '%s' has more than one head, consider merging" % branch)
|
|
return branch_tip(hgref(branch))
|
|
|
|
return heads[0]
|
|
|
|
def list_head(repo, cur):
|
|
global g_head, fake_bmark
|
|
|
|
if 'default' not in branches:
|
|
# empty repo
|
|
return
|
|
|
|
node = repo[branch_tip('default')]
|
|
head = 'master' if 'master' not in bmarks else 'default'
|
|
fake_bmark = head
|
|
bmarks[head] = node
|
|
|
|
head = gitref(head)
|
|
print "@refs/heads/%s HEAD" % head
|
|
g_head = (head, node)
|
|
|
|
def do_list(parser):
|
|
repo = parser.repo
|
|
for bmark, node in bookmarks.listbookmarks(repo).iteritems():
|
|
bmarks[bmark] = repo[node]
|
|
|
|
cur = repo.dirstate.branch()
|
|
orig = peer if peer else repo
|
|
|
|
for branch, heads in orig.branchmap().iteritems():
|
|
# only open heads
|
|
heads = [h for h in heads if 'close' not in repo.changelog.read(h)[5]]
|
|
if heads:
|
|
branches[branch] = heads
|
|
|
|
list_head(repo, cur)
|
|
|
|
# for export command a ref's old_sha1 is taken from private namespace ref
|
|
# for push command a fake one is provided
|
|
# that avoids having the ref status reported as a new branch/tag
|
|
# (though it will be marked as FETCH_FIRST prior to push,
|
|
# but that's ok as we will provide proper status)
|
|
for_push = (parser.line.find('for-push') >= 0)
|
|
sha1 = 'f' * 40 if (capability_push and for_push) else '?'
|
|
|
|
if track_branches:
|
|
for branch in branches:
|
|
print "%s refs/heads/branches/%s" % (sha1, gitref(branch))
|
|
|
|
for bmark in bmarks:
|
|
if bmarks[bmark].hex() == '0' * 40:
|
|
warn("Ignoring invalid bookmark '%s'", bmark)
|
|
else:
|
|
print "%s refs/heads/%s" % (sha1, gitref(bmark))
|
|
|
|
for tag, node in repo.tagslist():
|
|
if tag == 'tip':
|
|
continue
|
|
print "%s refs/tags/%s" % (sha1, gitref(tag))
|
|
|
|
print
|
|
|
|
def do_import(parser):
|
|
repo = parser.repo
|
|
|
|
path = os.path.join(dirname, 'marks-git')
|
|
|
|
print "feature done"
|
|
if os.path.exists(path):
|
|
print "feature import-marks=%s" % path
|
|
print "feature export-marks=%s" % path
|
|
print "feature force"
|
|
sys.stdout.flush()
|
|
|
|
tmp = encoding.encoding
|
|
encoding.encoding = 'utf-8'
|
|
|
|
# lets get all the import lines
|
|
while parser.check('import'):
|
|
ref = parser[1]
|
|
|
|
if (ref == 'HEAD'):
|
|
export_head(repo)
|
|
elif ref.startswith('refs/heads/branches/'):
|
|
branch = ref[len('refs/heads/branches/'):]
|
|
export_branch(repo, branch)
|
|
elif ref.startswith('refs/heads/'):
|
|
bmark = ref[len('refs/heads/'):]
|
|
export_bookmark(repo, bmark)
|
|
elif ref.startswith('refs/tags/'):
|
|
tag = ref[len('refs/tags/'):]
|
|
export_tag(repo, tag)
|
|
|
|
parser.next()
|
|
|
|
encoding.encoding = tmp
|
|
|
|
print 'done'
|
|
|
|
def parse_blob(parser):
|
|
parser.next()
|
|
mark = parser.get_mark()
|
|
parser.next()
|
|
data = parser.get_data()
|
|
blob_marks[mark] = data
|
|
parser.next()
|
|
|
|
def get_file_metadata(repo, p1, files):
|
|
for e in files:
|
|
f = files[e]
|
|
if 'rename' in f and 'mode' not in f:
|
|
old = f['rename']
|
|
ctx = repo[p1][old]
|
|
m = ''
|
|
# .isexec(), .islink() only around in newer versions
|
|
if hasattr(ctx, 'isexec'):
|
|
m = ctx.isexec()
|
|
else:
|
|
m = ('x' in ctx.flags())
|
|
if hasattr(ctx, 'islink'):
|
|
m = ctx.islink()
|
|
else:
|
|
m = ('l' in ctx.flags())
|
|
f['mode'] = m
|
|
f['data'] = ctx.data()
|
|
|
|
def get_merge_files(repo, p1, p2, files):
|
|
for e in repo[p1].files():
|
|
if e not in files:
|
|
if e not in repo[p1].manifest():
|
|
continue
|
|
f = { 'ctx': repo[p1][e] }
|
|
files[e] = f
|
|
|
|
def split_line_pathnames(line):
|
|
if line[2] != '"':
|
|
return line.split(' ', 2)
|
|
else:
|
|
t = line[0]
|
|
p = 3
|
|
while p >= 0:
|
|
if line[p] == '"' and line[p - 1] != '\\':
|
|
return line[0], line[2:p+1], line[p+2:]
|
|
p = line.find('"', p + 1)
|
|
# hm, should not happen
|
|
die('Malformed file command: %s' % (line))
|
|
|
|
def c_style_unescape(string):
|
|
if string[0] == string[-1] == '"':
|
|
return string.decode('string-escape')[1:-1]
|
|
return string
|
|
|
|
def parse_commit(parser):
|
|
from_mark = merge_mark = None
|
|
|
|
remoteref = parser.context.remoteref
|
|
ref = parser[1] if not remoteref else remoteref
|
|
parser.next()
|
|
|
|
commit_mark = parser.get_mark()
|
|
parser.next()
|
|
author = parser.get_author()
|
|
parser.next()
|
|
committer = parser.get_author()
|
|
parser.next()
|
|
data = parser.get_data()
|
|
parser.next()
|
|
if parser.check('from'):
|
|
from_mark = parser.get_mark()
|
|
parser.next()
|
|
if parser.check('merge'):
|
|
merge_mark = parser.get_mark()
|
|
parser.next()
|
|
if parser.check('merge'):
|
|
die('octopus merges are not supported yet')
|
|
|
|
# fast-export adds an extra newline
|
|
if data[-1] == '\n':
|
|
data = data[:-1]
|
|
|
|
files = {}
|
|
|
|
for line in parser:
|
|
if parser.check('M'):
|
|
t, m, mark_ref, path = line.split(' ', 3)
|
|
mark = int(mark_ref[1:])
|
|
f = { 'mode': hgmode(m), 'data': blob_marks[mark] }
|
|
elif parser.check('D'):
|
|
t, path = line.split(' ', 1)
|
|
f = { 'deleted': True }
|
|
elif parser.check('R'):
|
|
t, old, path = split_line_pathnames(line)
|
|
old = c_style_unescape(old)
|
|
f = { 'rename': old }
|
|
# also mark old deleted
|
|
files[old] = { 'deleted': True }
|
|
elif parser.check('C'):
|
|
t, old, path = split_line_pathnames(line)
|
|
f = { 'rename': c_style_unescape(old) }
|
|
else:
|
|
die('Unknown file command: %s' % line)
|
|
path = c_style_unescape(path)
|
|
files[path] = files.get(path, {})
|
|
files[path].update(f)
|
|
|
|
# only export the commits if we are on an internal proxy repo
|
|
if dry_run and not peer:
|
|
parsed_refs[ref] = None
|
|
return
|
|
|
|
# check if this is an hg commit we have in some other repo
|
|
gitmarks = parser.context.gitmarks
|
|
if gitmarks:
|
|
gitcommit = gitmarks.to_rev(commit_mark)
|
|
hgrev = get_rev_hg(gitcommit)
|
|
if hgrev:
|
|
hghelper = parser.context.hghelper
|
|
if not hghelper:
|
|
print "error %s rejected not pushing hg based commit %s" % (ref, gitcommit)
|
|
raise UserWarning("check-hg-commits")
|
|
# must be in some local repo
|
|
# find it and push it to the target local repo
|
|
# (rather than making a commit into it)
|
|
# probably not already in target repo, but let's make sure
|
|
if hgrev not in parser.repo:
|
|
srepo = hghelper.githgrepo.find_hg_repo(hgrev)
|
|
if not srepo:
|
|
# pretty bad, if identified as hg revision, we should have it somewhere
|
|
# but is possible if the originating repo has been removed now
|
|
# warn elaborately and fail given the current settings
|
|
description = "\n" \
|
|
"commit %s corresponds \nto hg revision %s,\n" \
|
|
"but could not find latter in any fetched hg repo.\n" \
|
|
"Please resolve the inconsistency or disable pushing hg commits" \
|
|
% (gitcommit, hgrev)
|
|
die(description)
|
|
warn('Pushing hg changeset %s for %s' % (hgrev, gitcommit))
|
|
# target is local repo so should have a root
|
|
# force push since otherwise forcibly commit anyway
|
|
# (and needed for multiple head case etc)
|
|
push(srepo, hg.peer(srepo.ui, {}, parser.repo.root), [hgbin(hgrev)], True)
|
|
else:
|
|
# could already be present, particularly in shared proxy repo
|
|
warn('Using hg changeset %s for %s' % (hgrev, gitcommit))
|
|
# track mark and are done here
|
|
parsed_refs[ref] = hgrev
|
|
marks.new_mark(hgrev, commit_mark)
|
|
return
|
|
|
|
def getfilectx(repo, memctx, f):
|
|
of = files[f]
|
|
if 'deleted' in of:
|
|
if check_version(3, 2):
|
|
return None
|
|
else:
|
|
raise IOError
|
|
if 'ctx' in of:
|
|
if mode == 'hg':
|
|
ctx = of['ctx']
|
|
is_exec = ctx.isexec()
|
|
is_link = ctx.islink()
|
|
if check_version(3, 1):
|
|
return context.memfilectx(repo, f, ctx.data(),
|
|
is_link, is_exec)
|
|
else:
|
|
return context.memfilectx(f, ctx.data(),
|
|
is_link, is_exec)
|
|
else:
|
|
return of['ctx']
|
|
is_exec = of['mode'] == 'x'
|
|
is_link = of['mode'] == 'l'
|
|
rename = of.get('rename', None)
|
|
if check_version(3, 1):
|
|
return context.memfilectx(repo, f, of['data'],
|
|
is_link, is_exec, rename)
|
|
else:
|
|
return context.memfilectx(f, of['data'],
|
|
is_link, is_exec, rename)
|
|
|
|
repo = parser.repo
|
|
|
|
user, date, tz = author
|
|
extra = {}
|
|
|
|
if committer != author:
|
|
extra['committer'] = "%s %u %u" % committer
|
|
|
|
if from_mark:
|
|
p1 = mark_to_rev(from_mark)
|
|
else:
|
|
p1 = '0' * 40
|
|
|
|
if merge_mark:
|
|
p2 = mark_to_rev(merge_mark)
|
|
else:
|
|
p2 = '0' * 40
|
|
|
|
#
|
|
# If files changed from any of the parents, hg wants to know, but in git if
|
|
# nothing changed from the first parent, nothing changed.
|
|
#
|
|
if merge_mark:
|
|
get_merge_files(repo, p1, p2, files)
|
|
|
|
# need to obtain file metadata for copied and renamed files that have
|
|
# no filemodify line; let's get that from the old file in parent revision
|
|
if from_mark:
|
|
get_file_metadata(repo, p1, files)
|
|
|
|
# Check if the ref is supposed to be a named branch
|
|
if ref.startswith('refs/heads/branches/'):
|
|
branch = ref[len('refs/heads/branches/'):]
|
|
extra['branch'] = hgref(branch)
|
|
|
|
if mode == 'hg':
|
|
i = data.find('\n--HG--\n')
|
|
if i >= 0:
|
|
tmp = data[i + len('\n--HG--\n'):].strip()
|
|
for k, v in [e.split(' : ', 1) for e in tmp.split('\n')]:
|
|
if k == 'rename':
|
|
old, new = v.split(' => ', 1)
|
|
files[new]['rename'] = old
|
|
elif k == 'branch':
|
|
extra[k] = v
|
|
elif k == 'extra':
|
|
ek, ev = v.split(' : ', 1)
|
|
extra[ek] = urllib.unquote(ev)
|
|
data = data[:i]
|
|
|
|
ctx = context.memctx(repo, (p1, p2), data,
|
|
files.keys(), getfilectx,
|
|
user, (date, tz), extra)
|
|
|
|
tmp = encoding.encoding
|
|
encoding.encoding = 'utf-8'
|
|
|
|
node = hghex(repo.commitctx(ctx))
|
|
|
|
encoding.encoding = tmp
|
|
|
|
parsed_refs[ref] = node
|
|
marks.new_mark(node, commit_mark)
|
|
parser.context.revs.append(node)
|
|
|
|
def parse_reset(parser):
|
|
remoteref = parser.context.remoteref
|
|
ref = parser[1] if not remoteref else remoteref
|
|
parser.next()
|
|
# ugh
|
|
if parser.check('commit'):
|
|
parse_commit(parser)
|
|
return
|
|
if not parser.check('from'):
|
|
return
|
|
from_mark = parser.get_mark()
|
|
parser.next()
|
|
|
|
try:
|
|
rev = mark_to_rev(from_mark)
|
|
except KeyError:
|
|
rev = None
|
|
parsed_refs[ref] = rev
|
|
|
|
def parse_tag(parser):
|
|
name = parser[1]
|
|
parser.next()
|
|
from_mark = parser.get_mark()
|
|
parser.next()
|
|
tagger = parser.get_author()
|
|
parser.next()
|
|
data = parser.get_data()
|
|
parser.next()
|
|
|
|
parsed_tags[name] = (tagger, data)
|
|
|
|
def write_tag(repo, tag, node, msg, author):
|
|
branch = repo[node].branch()
|
|
tip = branch_tip(branch)
|
|
tip = repo[tip]
|
|
|
|
def getfilectx(repo, memctx, f):
|
|
try:
|
|
fctx = tip.filectx(f)
|
|
data = fctx.data()
|
|
except error.ManifestLookupError:
|
|
data = ""
|
|
content = data + "%s %s\n" % (node, tag)
|
|
if check_version(3, 1):
|
|
return context.memfilectx(repo, f, content, False, False, None)
|
|
else:
|
|
return context.memfilectx(f, content, False, False, None)
|
|
|
|
p1 = tip.hex()
|
|
p2 = '0' * 40
|
|
if author:
|
|
user, date, tz = author
|
|
date_tz = (date, tz)
|
|
else:
|
|
cmd = ['git', 'var', 'GIT_COMMITTER_IDENT']
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
|
output, _ = process.communicate()
|
|
m = re.match('^.* <.*>', output)
|
|
if m:
|
|
user = m.group(0)
|
|
else:
|
|
user = repo.ui.username()
|
|
date_tz = None
|
|
|
|
ctx = context.memctx(repo, (p1, p2), msg,
|
|
['.hgtags'], getfilectx,
|
|
user, date_tz, {'branch': branch})
|
|
|
|
tmp = encoding.encoding
|
|
encoding.encoding = 'utf-8'
|
|
|
|
tagnode = repo.commitctx(ctx)
|
|
|
|
encoding.encoding = tmp
|
|
|
|
return (tagnode, branch)
|
|
|
|
def checkheads_bmark(repo, ref, ctx, force):
|
|
bmark = ref[len('refs/heads/'):]
|
|
if bmark not in bmarks:
|
|
# new bmark
|
|
return True
|
|
|
|
ctx_old = bmarks[bmark]
|
|
ctx_new = ctx
|
|
|
|
if not ctx.rev():
|
|
print "error %s unknown" % ref
|
|
return False
|
|
|
|
if not repo.changelog.descendant(ctx_old.rev(), ctx_new.rev()):
|
|
if force:
|
|
print "ok %s forced update" % ref
|
|
else:
|
|
print "error %s non-fast forward" % ref
|
|
return False
|
|
|
|
return True
|
|
|
|
def checkheads(repo, remote, p_revs, force):
|
|
|
|
remotemap = remote.branchmap()
|
|
if not remotemap:
|
|
# empty repo
|
|
return True
|
|
|
|
new = {}
|
|
ret = True
|
|
|
|
for node, ref in p_revs.iteritems():
|
|
ctx = repo[node]
|
|
branch = ctx.branch()
|
|
if branch not in remotemap:
|
|
# new branch
|
|
continue
|
|
if not ref.startswith('refs/heads/branches'):
|
|
if ref.startswith('refs/heads/'):
|
|
if not checkheads_bmark(repo, ref, ctx, force):
|
|
ret = False
|
|
|
|
# only check branches
|
|
continue
|
|
new.setdefault(branch, []).append(ctx.rev())
|
|
|
|
for branch, heads in new.iteritems():
|
|
old = [repo.changelog.rev(x) for x in remotemap[branch]]
|
|
for rev in heads:
|
|
if check_version(2, 3):
|
|
ancestors = repo.changelog.ancestors([rev], stoprev=min(old))
|
|
else:
|
|
ancestors = repo.changelog.ancestors(rev)
|
|
found = False
|
|
|
|
for x in old:
|
|
if x in ancestors:
|
|
found = True
|
|
break
|
|
|
|
if found:
|
|
continue
|
|
|
|
node = repo.changelog.node(rev)
|
|
ref = p_revs[node]
|
|
if force:
|
|
print "ok %s forced update" % ref
|
|
else:
|
|
print "error %s non-fast forward" % ref
|
|
ret = False
|
|
|
|
return ret
|
|
|
|
def push_unsafe(repo, remote, p_revs, force):
|
|
|
|
fci = discovery.findcommonincoming
|
|
commoninc = fci(repo, remote, force=force)
|
|
common, _, remoteheads = commoninc
|
|
fco = discovery.findcommonoutgoing
|
|
outgoing = fco(repo, remote, onlyheads=list(p_revs), commoninc=commoninc, force=force)
|
|
|
|
# nice to know about this rather than assume a bogus error
|
|
# also, some remote peertypes might otherwise be surprised further down
|
|
if not outgoing.missing:
|
|
return None
|
|
|
|
if check_version(3, 2):
|
|
cg = changegroup.getchangegroup(repo, 'push', heads=list(p_revs), common=common)
|
|
elif check_version(3, 0):
|
|
cg = changegroup.getbundle(repo, 'push', heads=list(p_revs), common=common)
|
|
else:
|
|
cg = repo.getbundle('push', heads=list(p_revs), common=common)
|
|
|
|
unbundle = remote.capable('unbundle')
|
|
if unbundle:
|
|
if force:
|
|
remoteheads = ['force']
|
|
ret = remote.unbundle(cg, remoteheads, 'push')
|
|
else:
|
|
ret = remote.addchangegroup(cg, 'push', repo.url())
|
|
|
|
phases = remote.listkeys('phases')
|
|
if phases:
|
|
for head in p_revs:
|
|
# update to public
|
|
remote.pushkey('phases', hghex(head), '1', '0')
|
|
|
|
return ret
|
|
|
|
def push(repo, remote, p_revs, force):
|
|
if hasattr(remote, 'canpush') and not remote.canpush():
|
|
print "error cannot push"
|
|
|
|
if not p_revs:
|
|
# nothing to push
|
|
return
|
|
|
|
lock = None
|
|
unbundle = remote.capable('unbundle')
|
|
if not unbundle:
|
|
lock = remote.lock()
|
|
try:
|
|
ret = push_unsafe(repo, remote, p_revs, force)
|
|
finally:
|
|
if lock is not None:
|
|
lock.release()
|
|
|
|
return ret
|
|
|
|
def check_tip(ref, kind, name, heads):
|
|
try:
|
|
ename = '%s/%s' % (kind, name)
|
|
tip = marks.get_tip(ename)
|
|
except KeyError:
|
|
return True
|
|
else:
|
|
return tip in heads
|
|
|
|
def do_export(parser):
|
|
do_push_hg(parser)
|
|
print
|
|
|
|
def do_push_hg(parser):
|
|
global parsed_refs, parsed_tags
|
|
p_bmarks = []
|
|
p_revs = {}
|
|
|
|
parsed_refs = {}
|
|
parsed_tags = {}
|
|
|
|
parser.next()
|
|
|
|
remoteref = parser.context.remoteref
|
|
if remoteref and not parser.line:
|
|
# if remoteref is in past exported
|
|
# git-fast-export might not produce anything at all
|
|
# that is ok'ish, we will determine parsed_ref another way
|
|
localref = parser.context.localref
|
|
hgrev = get_rev_hg(localref)
|
|
if not hgrev:
|
|
# maybe the notes are not updated
|
|
# happens only on fetch for now ... let's ask for that
|
|
print "error %s fetch first" % remoteref
|
|
return False
|
|
parsed_refs[remoteref] = hgrev
|
|
# now make parser happy
|
|
parser.line = 'done'
|
|
|
|
for line in parser.each_block('done'):
|
|
if parser.check('blob'):
|
|
parse_blob(parser)
|
|
elif parser.check('commit'):
|
|
parse_commit(parser)
|
|
elif parser.check('reset'):
|
|
parse_reset(parser)
|
|
elif parser.check('tag'):
|
|
parse_tag(parser)
|
|
elif parser.check('feature'):
|
|
pass
|
|
else:
|
|
die('unhandled export command: %s' % line)
|
|
|
|
need_fetch = False
|
|
|
|
for ref, node in parsed_refs.iteritems():
|
|
bnode = hgbin(node) if node else None
|
|
if ref.startswith('refs/heads/branches'):
|
|
branch = ref[len('refs/heads/branches/'):]
|
|
if branch in branches and bnode in branches[branch]:
|
|
# up to date
|
|
print "ok %s up to date" % ref
|
|
continue
|
|
|
|
if peer:
|
|
remotemap = peer.branchmap()
|
|
if remotemap and branch in remotemap:
|
|
heads = [hghex(e) for e in remotemap[branch]]
|
|
if not check_tip(ref, 'branches', branch, heads):
|
|
print "error %s fetch first" % ref
|
|
need_fetch = True
|
|
continue
|
|
|
|
p_revs[bnode] = ref
|
|
print "ok %s" % ref
|
|
elif ref.startswith('refs/heads/'):
|
|
bmark = ref[len('refs/heads/'):]
|
|
new = node
|
|
old = bmarks[bmark].hex() if bmark in bmarks else ''
|
|
|
|
if old == new:
|
|
print "ok %s up to date" % ref
|
|
continue
|
|
|
|
print "ok %s" % ref
|
|
if bmark != fake_bmark and \
|
|
not (bmark == 'master' and bmark not in parser.repo._bookmarks):
|
|
p_bmarks.append((ref, bmark, old, new))
|
|
|
|
if peer:
|
|
remote_old = peer.listkeys('bookmarks').get(bmark)
|
|
if remote_old:
|
|
if not check_tip(ref, 'bookmarks', bmark, remote_old):
|
|
print "error %s fetch first" % ref
|
|
need_fetch = True
|
|
continue
|
|
|
|
p_revs[bnode] = ref
|
|
elif ref.startswith('refs/tags/'):
|
|
if dry_run:
|
|
print "ok %s" % ref
|
|
continue
|
|
tag = ref[len('refs/tags/'):]
|
|
tag = hgref(tag)
|
|
author, msg = parsed_tags.get(tag, (None, None))
|
|
if mode == 'git':
|
|
if not msg:
|
|
msg = 'Added tag %s for changeset %s' % (tag, node[:12])
|
|
tagnode, branch = write_tag(parser.repo, tag, node, msg, author)
|
|
p_revs[tagnode] = 'refs/heads/branches/' + gitref(branch)
|
|
else:
|
|
fp = parser.repo.opener('localtags', 'a')
|
|
fp.write('%s %s\n' % (node, tag))
|
|
fp.close()
|
|
p_revs[bnode] = ref
|
|
print "ok %s" % ref
|
|
else:
|
|
# transport-helper/fast-export bugs
|
|
continue
|
|
|
|
if need_fetch:
|
|
return
|
|
|
|
if dry_run:
|
|
if peer and not force_push:
|
|
checkheads(parser.repo, peer, p_revs, force_push)
|
|
return
|
|
|
|
success = True
|
|
if peer:
|
|
if not checkheads(parser.repo, peer, p_revs, force_push):
|
|
return False
|
|
ret = push(parser.repo, peer, p_revs, force_push)
|
|
# None ok: nothing to push
|
|
if ret != None and not ret:
|
|
# do not update bookmarks
|
|
return
|
|
|
|
# update remote bookmarks
|
|
remote_bmarks = peer.listkeys('bookmarks')
|
|
for ref, bmark, old, new in p_bmarks:
|
|
if force_push:
|
|
old = remote_bmarks.get(bmark, '')
|
|
if not peer.pushkey('bookmarks', bmark, old, new):
|
|
success = False
|
|
print "error %s" % ref
|
|
else:
|
|
# update local bookmarks
|
|
for ref, bmark, old, new in p_bmarks:
|
|
if not bookmarks.pushbookmark(parser.repo, bmark, old, new):
|
|
success = False
|
|
print "error %s" % ref
|
|
|
|
return success
|
|
|
|
def delete_bookmark(parser, ref):
|
|
bmark = ref[len('refs/heads/'):]
|
|
if bmark == fake_bmark:
|
|
return False
|
|
# delete local (proxy or target)
|
|
old = bmarks[bmark].hex() if bmark in bmarks else ''
|
|
if not old:
|
|
return False
|
|
ok = False
|
|
if old:
|
|
ok = bookmarks.pushbookmark(parser.repo, bmark, old, '')
|
|
# propagate to peer if appropriate
|
|
if ok and peer:
|
|
remote_bmarks = peer.listkeys('bookmarks')
|
|
old = remote_bmarks.get(bmark, '')
|
|
ok = peer.pushkey('bookmarks', bmark, old, '')
|
|
# delete private ref
|
|
if ok:
|
|
pbookmark = '%s/bookmarks/%s' % (prefix, bmark)
|
|
subprocess.call(['git', 'update-ref', '-d', pbookmark])
|
|
return ok
|
|
|
|
def do_push_refspec(parser, refspec, revs):
|
|
global force_push
|
|
|
|
force = (refspec[0] == '+')
|
|
refs = refspec.strip('+').split(':')
|
|
# check for delete
|
|
if (not refs[0]) and refs[1].startswith('refs/heads') and \
|
|
not refs[1].startswith('refs/heads/branches'):
|
|
if not delete_bookmark(parser, refs[1]):
|
|
print "error %s could not delete"% (refs[1])
|
|
else:
|
|
print "ok %s" % (refs[1])
|
|
return
|
|
# sanity check on remote ref
|
|
if not (refs[1].startswith('refs/heads') or refs[1].startswith('refs/tags')):
|
|
print "error %s refspec not supported " % refs[1]
|
|
return
|
|
ctx = ParserContext()
|
|
if refs[0] != refs[1]:
|
|
# would work and tag as requested, but pushing to a hg permanent branch
|
|
# based on a rename rather than a git branch is probably not a good idea
|
|
if refs[1].startswith('refs/heads/branches'):
|
|
print "error %s not allowed for permanent branch" % refs[1]
|
|
return
|
|
ctx.remoteref = refs[1]
|
|
ctx.localref = refs[0]
|
|
# ok, fire up git-fast-export and process it
|
|
cmd = ['git', 'fast-export', '--use-done-feature']
|
|
fast_export_options = get_config('remote-hg.fast-export-options')
|
|
if not fast_export_options:
|
|
fast_export_options = '-M -C'
|
|
cmd.extend(fast_export_options.strip().split())
|
|
marks = os.path.join(dirname, 'marks-git')
|
|
if os.path.exists(marks):
|
|
cmd.append('--import-marks=%s' % marks)
|
|
# optionally reuse existing hg commits in local repos
|
|
check_hg_commits = get_config('remote-hg.check-hg-commits').strip()
|
|
use_hg_commits = check_hg_commits in ('fail', 'push')
|
|
# no commit of marks if dry-dry_run
|
|
# and only commit if all went ok,
|
|
# otherwise some commits may no longer be exported next time/try around
|
|
tmpmarks = ''
|
|
if use_hg_commits or not dry_run:
|
|
tmpmarks = os.path.join(dirname, 'marks-git-%d' % (os.getpid()))
|
|
cmd.append('--export-marks=%s' % tmpmarks)
|
|
cmd.append(refs[0])
|
|
# a parameter would obviously be nicer here ...
|
|
force_push = force
|
|
ok = False
|
|
tmpfastexport = None
|
|
try:
|
|
if use_hg_commits:
|
|
# we need the mapping from marks to commit
|
|
# so store the output first to a file (and marks get saved also),
|
|
# and then process that file
|
|
tmpfastexport = open(os.path.join(dirname, 'git-fast-export-%d' % (os.getpid())), 'w+b')
|
|
subprocess.check_call(cmd, stdin=None, stdout=tmpfastexport)
|
|
try:
|
|
import imp
|
|
ctx.hghelper = imp.load_source('hghelper', \
|
|
os.path.join(os.path.dirname(__file__), 'git-hg-helper'))
|
|
ctx.hghelper.init_git(gitdir)
|
|
ctx.gitmarks = ctx.hghelper.GitMarks(tmpmarks)
|
|
# let processing know it should not bother pushing if not requested
|
|
if check_hg_commits != 'push':
|
|
ctx.hghelper = None
|
|
except:
|
|
die("check-hg-commits setup failed; is git-hg-helper also installed?")
|
|
tmpfastexport.seek(0)
|
|
try:
|
|
nparser = Parser(parser.repo, tmpfastexport, ctx)
|
|
ok = do_push_hg(nparser)
|
|
except UserWarning:
|
|
ok = False
|
|
else:
|
|
# simply feed fast-export directly to processing
|
|
export = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE)
|
|
nparser = Parser(parser.repo, export.stdout, ctx)
|
|
ok = do_push_hg(nparser)
|
|
finally:
|
|
if tmpmarks and os.path.exists(tmpmarks):
|
|
if ok and not dry_run:
|
|
# the commits made it through, now we can commit
|
|
os.rename(tmpmarks, marks)
|
|
revs[:] = nparser.context.revs
|
|
else:
|
|
os.remove(tmpmarks)
|
|
if tmpfastexport and os.path.exists(tmpfastexport.name):
|
|
tmpfastexport.close()
|
|
os.remove(tmpfastexport.name)
|
|
|
|
def update_notes(revs, desc):
|
|
if revs:
|
|
# spin up fast-import
|
|
gitmarks = os.path.join(dirname, 'marks-git')
|
|
# marks should exist by now
|
|
# no export of marks since notes commits are not relevant
|
|
proc = subprocess.Popen(['git', 'fast-import', '--done', '--quiet',
|
|
'--import-marks=%s' % gitmarks], stdin=subprocess.PIPE, stdout=sys.stderr)
|
|
# now feed fast-import
|
|
dest = proc.stdin
|
|
|
|
note_mark = marks.next_mark()
|
|
ref = "refs/notes/hg"
|
|
dest.write("commit %s\n" % ref)
|
|
dest.write("mark :%d\n" % (note_mark))
|
|
dest.write("committer remote-hg <> %d %s\n" % (ptime.time(), gittz(ptime.timezone)))
|
|
dest.write("data %d\n" % (len(desc)))
|
|
dest.write(desc + '\n')
|
|
current_note = rev_parse(ref)
|
|
if current_note:
|
|
dest.write('from %s^0\n' % (ref))
|
|
for rev in revs:
|
|
dest.write("N inline :%u\n" % marks.from_rev(rev))
|
|
dest.write("data %d\n" % (len(rev)))
|
|
dest.write(rev + '\n')
|
|
dest.write('\n')
|
|
dest.write('done\n')
|
|
dest.flush()
|
|
proc.wait()
|
|
|
|
def do_push(parser):
|
|
if os.environ.get('GIT_REMOTE_HG_DEBUG_PUSH'):
|
|
dump = ''
|
|
for line in parser:
|
|
dump += line + '\n'
|
|
die('DEBUG push:\n%s' % (dump))
|
|
revs = []
|
|
for line in parser:
|
|
if parser.check('push'):
|
|
localrevs = []
|
|
do_push_refspec(parser, line.lstrip('push '), localrevs)
|
|
revs.extend(localrevs)
|
|
else:
|
|
die('unhandled push command: %s' % (line))
|
|
print
|
|
# at this stage, all external processes are done, marks files written
|
|
# so we can use those do update notes if so desired
|
|
if get_config_bool('remote-hg.push-updates-notes') and revs:
|
|
update_notes(revs, "Update notes on push")
|
|
|
|
def do_option(parser):
|
|
global dry_run, force_push
|
|
_, key, value = parser.line.split(' ')
|
|
if key == 'dry-run':
|
|
dry_run = (value == 'true')
|
|
print 'ok'
|
|
elif key == 'force':
|
|
force_push = (value == 'true')
|
|
print 'ok'
|
|
else:
|
|
print 'unsupported'
|
|
|
|
def fix_path(alias, repo, orig_url):
|
|
url = urlparse.urlparse(orig_url, 'file')
|
|
if url.scheme != 'file' or os.path.isabs(os.path.expanduser(url.path)):
|
|
return
|
|
abs_url = urlparse.urljoin("%s/" % os.getcwd(), orig_url)
|
|
cmd = ['git', 'config', 'remote.%s.url' % alias, "hg::%s" % abs_url]
|
|
subprocess.call(cmd)
|
|
|
|
def select_private_refs(alias):
|
|
show_private_refs = get_config_bool('remote-hg.show-private-refs', False)
|
|
# selection is easy, but let's also clean the refs of the alternative
|
|
# in any case, will be recreated along the way as and when needed
|
|
if show_private_refs:
|
|
path = "%s/refs" % (dirname)
|
|
if os.path.exists(path):
|
|
shutil.rmtree(path, True)
|
|
# in refs space
|
|
return 'refs/hg/%s' % alias
|
|
else:
|
|
refs = subprocess.Popen(['git', 'for-each-ref', \
|
|
'--format=delete %(refname)', 'refs/hg'], stdout=subprocess.PIPE)
|
|
update = subprocess.Popen(['git', 'update-ref', '--stdin'], stdin=refs.stdout)
|
|
refs.stdout.close() # helps with SIGPIPE
|
|
update.communicate()
|
|
# keep private implementation refs really private
|
|
return 'hg/%s/refs' % alias
|
|
|
|
def main(args):
|
|
global prefix, gitdir, dirname, branches, bmarks
|
|
global marks, blob_marks, parsed_refs
|
|
global peer, mode, bad_mail, bad_name
|
|
global track_branches, force_push, is_tmp
|
|
global parsed_tags
|
|
global filenodes
|
|
global fake_bmark, hg_version
|
|
global dry_run
|
|
global notes, alias
|
|
global capability_push
|
|
|
|
marks = None
|
|
is_tmp = False
|
|
gitdir = os.environ.get('GIT_DIR', None)
|
|
|
|
if len(args) < 3:
|
|
die('Not enough arguments.')
|
|
|
|
if not gitdir:
|
|
die('GIT_DIR not set')
|
|
|
|
alias = args[1]
|
|
url = args[2]
|
|
peer = None
|
|
|
|
hg_git_compat = get_config_bool('remote-hg.hg-git-compat')
|
|
track_branches = get_config_bool('remote-hg.track-branches', True)
|
|
capability_push = get_config_bool('remote-hg.capability-push', True)
|
|
force_push = False
|
|
|
|
if hg_git_compat:
|
|
mode = 'hg'
|
|
bad_mail = 'none@none'
|
|
bad_name = ''
|
|
else:
|
|
mode = 'git'
|
|
bad_mail = 'unknown'
|
|
bad_name = 'Unknown'
|
|
|
|
if alias[4:] == url:
|
|
is_tmp = True
|
|
alias = hashlib.sha1(alias).hexdigest()
|
|
|
|
dirname = os.path.join(gitdir, 'hg', alias)
|
|
branches = {}
|
|
bmarks = {}
|
|
blob_marks = {}
|
|
filenodes = {}
|
|
fake_bmark = None
|
|
try:
|
|
version, _, extra = util.version().partition('+')
|
|
version = list(int(e) for e in version.split('.'))
|
|
if extra:
|
|
version[1] += 1
|
|
hg_version = tuple(version)
|
|
except:
|
|
hg_version = None
|
|
dry_run = False
|
|
notes = set()
|
|
|
|
prefix = select_private_refs(alias)
|
|
repo = get_repo(url, alias)
|
|
|
|
if not is_tmp:
|
|
fix_path(alias, peer or repo, url)
|
|
|
|
marks_path = os.path.join(dirname, 'marks-hg')
|
|
marks = Marks(marks_path, repo)
|
|
|
|
if sys.platform == 'win32':
|
|
import msvcrt
|
|
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
|
|
|
|
parser = Parser(repo)
|
|
for line in parser:
|
|
if parser.check('capabilities'):
|
|
do_capabilities(parser)
|
|
elif parser.check('list'):
|
|
do_list(parser)
|
|
elif parser.check('import'):
|
|
do_import(parser)
|
|
elif parser.check('export'):
|
|
do_export(parser)
|
|
elif parser.check('push'):
|
|
do_push(parser)
|
|
elif parser.check('option'):
|
|
do_option(parser)
|
|
else:
|
|
die('unhandled command: %s' % line)
|
|
sys.stdout.flush()
|
|
|
|
marks.store()
|
|
|
|
def bye():
|
|
if is_tmp:
|
|
shutil.rmtree(dirname)
|
|
|
|
if __name__ == '__main__':
|
|
atexit.register(bye)
|
|
sys.exit(main(sys.argv))
|