2007-03-06 17:00:25 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
|
|
# Copyright (c) 2007 Rocco Rutte <pdmef@gmx.net>
|
2007-03-14 10:29:24 +00:00
|
|
|
# License: MIT <http://www.opensource.org/licenses/mit-license.php>
|
2007-03-06 17:00:25 +00:00
|
|
|
|
2007-03-07 01:52:58 +00:00
|
|
|
from mercurial import repo,hg,cmdutil,util,ui,revlog,node
|
2007-03-19 08:45:42 +00:00
|
|
|
from hg2git import setup_repo,fixup_user,get_branch,get_changeset,load_cache,save_cache
|
2007-03-06 17:00:25 +00:00
|
|
|
from tempfile import mkstemp
|
2007-03-12 07:33:40 +00:00
|
|
|
from optparse import OptionParser
|
2007-03-06 17:00:25 +00:00
|
|
|
import re
|
|
|
|
|
import sys
|
|
|
|
|
import os
|
|
|
|
|
|
2007-03-09 12:07:08 +00:00
|
|
|
# silly regex to catch Signed-off-by lines in log message
|
|
|
|
|
sob_re=re.compile('^Signed-[Oo]ff-[Bb]y: (.+)$')
|
2007-03-08 09:37:23 +00:00
|
|
|
# insert 'checkpoint' command after this many commits or none at all if 0
|
|
|
|
|
cfg_checkpoint_count=0
|
2007-03-13 10:59:22 +00:00
|
|
|
# write some progress message every this many file contents written
|
|
|
|
|
cfg_export_boundary=1000
|
2007-03-06 17:00:25 +00:00
|
|
|
|
|
|
|
|
def gitmode(x):
|
|
|
|
|
return x and '100755' or '100644'
|
|
|
|
|
|
|
|
|
|
def wr(msg=''):
|
|
|
|
|
print msg
|
2007-03-13 16:43:20 +00:00
|
|
|
#map(lambda x: sys.stderr.write('\t[%s]\n' % x),msg.split('\n'))
|
2007-03-06 17:00:25 +00:00
|
|
|
|
|
|
|
|
def checkpoint(count):
|
|
|
|
|
count=count+1
|
2007-03-08 09:37:23 +00:00
|
|
|
if cfg_checkpoint_count>0 and count%cfg_checkpoint_count==0:
|
2007-03-06 17:00:25 +00:00
|
|
|
sys.stderr.write("Checkpoint after %d commits\n" % count)
|
|
|
|
|
wr('checkpoint')
|
|
|
|
|
wr()
|
|
|
|
|
return count
|
|
|
|
|
|
|
|
|
|
def get_parent_mark(parent,marks):
|
2007-03-07 01:52:58 +00:00
|
|
|
"""Get the mark for some parent.
|
|
|
|
|
If we saw it in the current session, return :%d syntax and
|
|
|
|
|
otherwise the SHA1 from the cache."""
|
|
|
|
|
return marks.get(str(parent+1),':%d' % (parent+1))
|
|
|
|
|
|
2007-03-07 11:38:56 +00:00
|
|
|
def mismatch(f1,f2):
|
2007-03-07 01:52:58 +00:00
|
|
|
"""See if two revisions of a file are not equal."""
|
|
|
|
|
return node.hex(f1)!=node.hex(f2)
|
|
|
|
|
|
2007-03-07 11:38:56 +00:00
|
|
|
def outer_set(dleft,dright,l,c,r):
|
|
|
|
|
"""Loop over our repository and find all changed and missing files."""
|
2007-03-07 01:52:58 +00:00
|
|
|
for left in dleft.keys():
|
|
|
|
|
right=dright.get(left,None)
|
2007-03-07 11:38:56 +00:00
|
|
|
if right==None:
|
|
|
|
|
# we have the file but our parent hasn't: add to left set
|
2007-03-07 01:52:58 +00:00
|
|
|
l.append(left)
|
2007-03-07 11:38:56 +00:00
|
|
|
elif mismatch(dleft[left],right):
|
|
|
|
|
# we have it but checksums mismatch: add to center set
|
|
|
|
|
c.append(left)
|
2007-03-07 01:52:58 +00:00
|
|
|
for right in dright.keys():
|
|
|
|
|
left=dleft.get(right,None)
|
|
|
|
|
if left==None:
|
2007-03-07 11:38:56 +00:00
|
|
|
# if parent has file but we don't: add to right set
|
2007-03-07 01:52:58 +00:00
|
|
|
r.append(right)
|
2007-03-07 11:38:56 +00:00
|
|
|
# change is already handled when comparing child against parent
|
|
|
|
|
return l,c,r
|
2007-03-07 01:52:58 +00:00
|
|
|
|
|
|
|
|
def get_filechanges(repo,revision,parents,mleft):
|
|
|
|
|
"""Given some repository and revision, find all changed/deleted files."""
|
2007-03-07 11:38:56 +00:00
|
|
|
l,c,r=[],[],[]
|
2007-03-07 01:52:58 +00:00
|
|
|
for p in parents:
|
|
|
|
|
if p<0: continue
|
|
|
|
|
mright=repo.changectx(p).manifest()
|
|
|
|
|
dleft=mleft.keys()
|
|
|
|
|
dleft.sort()
|
|
|
|
|
dright=mright.keys()
|
|
|
|
|
dright.sort()
|
2007-03-07 11:38:56 +00:00
|
|
|
l,c,r=outer_set(mleft,mright,l,c,r)
|
|
|
|
|
return l,c,r
|
2007-03-06 17:00:25 +00:00
|
|
|
|
2007-03-09 12:07:08 +00:00
|
|
|
def get_author(logmessage,committer,authors):
|
|
|
|
|
"""As git distincts between author and committer of a patch, try to
|
|
|
|
|
extract author by detecting Signed-off-by lines.
|
|
|
|
|
|
|
|
|
|
This walks from the end of the log message towards the top skipping
|
|
|
|
|
empty lines. Upon the first non-empty line, it walks all Signed-off-by
|
|
|
|
|
lines upwards to find the first one. For that (if found), it extracts
|
|
|
|
|
authorship information the usual way (authors table, cleaning, etc.)
|
|
|
|
|
|
|
|
|
|
If no Signed-off-by line is found, this defaults to the committer.
|
|
|
|
|
|
|
|
|
|
This may sound stupid (and it somehow is), but in log messages we
|
|
|
|
|
accidentially may have lines in the middle starting with
|
|
|
|
|
"Signed-off-by: foo" and thus matching our detection regex. Prevent
|
|
|
|
|
that."""
|
|
|
|
|
|
|
|
|
|
loglines=logmessage.split('\n')
|
|
|
|
|
i=len(loglines)
|
|
|
|
|
# from tail walk to top skipping empty lines
|
|
|
|
|
while i>=0:
|
|
|
|
|
i-=1
|
|
|
|
|
if len(loglines[i].strip())==0: continue
|
|
|
|
|
break
|
|
|
|
|
if i>=0:
|
|
|
|
|
# walk further upwards to find first sob line, store in 'first'
|
|
|
|
|
first=None
|
|
|
|
|
while i>=0:
|
|
|
|
|
m=sob_re.match(loglines[i])
|
|
|
|
|
if m==None: break
|
|
|
|
|
first=m
|
|
|
|
|
i-=1
|
|
|
|
|
# if the last non-empty line matches our Signed-Off-by regex: extract username
|
|
|
|
|
if first!=None:
|
|
|
|
|
r=fixup_user(first.group(1),authors)
|
|
|
|
|
return r
|
|
|
|
|
return committer
|
|
|
|
|
|
2007-03-13 10:59:22 +00:00
|
|
|
def export_file_contents(ctx,manifest,files):
|
|
|
|
|
count=0
|
2007-03-14 10:02:15 +00:00
|
|
|
files.sort()
|
2007-03-13 10:59:22 +00:00
|
|
|
max=len(files)
|
|
|
|
|
for file in files:
|
|
|
|
|
fctx=ctx.filectx(file)
|
|
|
|
|
d=fctx.data()
|
|
|
|
|
wr('M %s inline %s' % (gitmode(manifest.execf(file)),file))
|
|
|
|
|
wr('data %d' % len(d)) # had some trouble with size()
|
|
|
|
|
wr(d)
|
|
|
|
|
count+=1
|
|
|
|
|
if count%cfg_export_boundary==0:
|
|
|
|
|
sys.stderr.write('Exported %d/%d files\n' % (count,max))
|
|
|
|
|
if max>cfg_export_boundary:
|
|
|
|
|
sys.stderr.write('Exported %d/%d files\n' % (count,max))
|
|
|
|
|
|
2007-03-14 10:02:15 +00:00
|
|
|
def is_merge(parents):
|
|
|
|
|
c=0
|
|
|
|
|
for parent in parents:
|
|
|
|
|
if parent>=0:
|
|
|
|
|
c+=1
|
|
|
|
|
return c>1
|
|
|
|
|
|
2007-03-12 08:00:18 +00:00
|
|
|
def export_commit(ui,repo,revision,marks,heads,last,max,count,authors,sob):
|
2007-03-14 10:02:15 +00:00
|
|
|
(revnode,_,user,(time,timezone),files,desc,branch,_)=get_changeset(ui,repo,revision,authors)
|
2007-03-06 17:00:25 +00:00
|
|
|
parents=repo.changelog.parentrevs(revision)
|
|
|
|
|
|
|
|
|
|
wr('commit refs/heads/%s' % branch)
|
|
|
|
|
wr('mark :%d' % (revision+1))
|
2007-03-12 08:00:18 +00:00
|
|
|
if sob:
|
|
|
|
|
wr('author %s %d %s' % (get_author(desc,user,authors),time,timezone))
|
2007-03-06 17:00:25 +00:00
|
|
|
wr('committer %s %d %s' % (user,time,timezone))
|
|
|
|
|
wr('data %d' % (len(desc)+1)) # wtf?
|
|
|
|
|
wr(desc)
|
|
|
|
|
wr()
|
|
|
|
|
|
|
|
|
|
src=heads.get(branch,'')
|
|
|
|
|
link=''
|
|
|
|
|
if src!='':
|
|
|
|
|
# if we have a cached head, this is an incremental import: initialize it
|
|
|
|
|
# and kill reference so we won't init it again
|
|
|
|
|
wr('from %s' % src)
|
|
|
|
|
heads[branch]=''
|
2007-03-06 22:11:10 +00:00
|
|
|
sys.stderr.write('Initializing branch [%s] to parent [%s]\n' %
|
|
|
|
|
(branch,src))
|
|
|
|
|
link=src # avoid making a merge commit for incremental import
|
2007-03-07 01:52:58 +00:00
|
|
|
elif link=='' and not heads.has_key(branch) and revision>0:
|
2007-03-06 17:00:25 +00:00
|
|
|
# newly created branch and not the first one: connect to parent
|
|
|
|
|
tmp=get_parent_mark(parents[0],marks)
|
|
|
|
|
wr('from %s' % tmp)
|
|
|
|
|
sys.stderr.write('Link new branch [%s] to parent [%s]\n' %
|
|
|
|
|
(branch,tmp))
|
|
|
|
|
link=tmp # avoid making a merge commit for branch fork
|
|
|
|
|
|
|
|
|
|
if parents:
|
|
|
|
|
l=last.get(branch,revision)
|
|
|
|
|
for p in parents:
|
|
|
|
|
# 1) as this commit implicitely is the child of the most recent
|
|
|
|
|
# commit of this branch, ignore this parent
|
|
|
|
|
# 2) ignore nonexistent parents
|
|
|
|
|
# 3) merge otherwise
|
|
|
|
|
if p==l or p==revision or p<0:
|
|
|
|
|
continue
|
|
|
|
|
tmp=get_parent_mark(p,marks)
|
2007-03-07 01:52:58 +00:00
|
|
|
# if we fork off a branch, don't merge with our parent via 'merge'
|
|
|
|
|
# as we have 'from' already above
|
2007-03-06 17:00:25 +00:00
|
|
|
if tmp==link:
|
|
|
|
|
continue
|
|
|
|
|
sys.stderr.write('Merging branch [%s] with parent [%s] from [r%d]\n' %
|
|
|
|
|
(branch,tmp,p))
|
|
|
|
|
wr('merge %s' % tmp)
|
|
|
|
|
|
|
|
|
|
last[branch]=revision
|
|
|
|
|
heads[branch]=''
|
2007-03-07 01:52:58 +00:00
|
|
|
# we need this later to write out tags
|
|
|
|
|
marks[str(revision)]=':%d'%(revision+1)
|
2007-03-06 19:47:51 +00:00
|
|
|
|
2007-03-06 17:00:25 +00:00
|
|
|
ctx=repo.changectx(str(revision))
|
|
|
|
|
man=ctx.manifest()
|
2007-03-14 10:02:15 +00:00
|
|
|
added,changed,removed,type=[],[],[],''
|
2007-03-06 17:00:25 +00:00
|
|
|
|
2007-03-13 10:59:22 +00:00
|
|
|
if revision==0:
|
|
|
|
|
# first revision: feed in full manifest
|
2007-03-14 10:02:15 +00:00
|
|
|
added=man.keys()
|
|
|
|
|
type='full'
|
|
|
|
|
elif is_merge(parents):
|
|
|
|
|
# later merge revision: feed in changed manifest
|
|
|
|
|
# for many files comparing checksums is expensive so only do it for
|
|
|
|
|
# merges where we really need it due to hg's revlog logic
|
2007-03-13 10:59:22 +00:00
|
|
|
added,changed,removed=get_filechanges(repo,revision,parents,man)
|
2007-03-14 10:02:15 +00:00
|
|
|
type='thorough delta'
|
|
|
|
|
else:
|
|
|
|
|
# later non-merge revision: feed in changed manifest
|
|
|
|
|
# if we have exactly one parent, just take the changes from the
|
|
|
|
|
# manifest without expensively comparing checksums
|
|
|
|
|
f=repo.status(repo.lookup(parents[0]),revnode)[:3]
|
|
|
|
|
added,changed,removed=f[1],f[0],f[2]
|
|
|
|
|
type='simple delta'
|
|
|
|
|
|
|
|
|
|
sys.stderr.write('Exporting %s revision %d/%d with %d/%d/%d added/changed/removed files\n' %
|
|
|
|
|
(type,revision+1,max,len(added),len(changed),len(removed)))
|
|
|
|
|
|
|
|
|
|
map(lambda r: wr('D %s' % r),removed)
|
|
|
|
|
export_file_contents(ctx,man,added+changed)
|
2007-03-06 17:00:25 +00:00
|
|
|
wr()
|
2007-03-14 10:02:15 +00:00
|
|
|
|
2007-03-06 17:00:25 +00:00
|
|
|
return checkpoint(count)
|
|
|
|
|
|
2007-03-09 11:09:57 +00:00
|
|
|
def export_tags(ui,repo,marks_cache,start,end,count,authors):
|
2007-03-06 17:00:25 +00:00
|
|
|
l=repo.tagslist()
|
|
|
|
|
for tag,node in l:
|
2007-03-07 11:33:03 +00:00
|
|
|
# ignore latest revision
|
|
|
|
|
if tag=='tip': continue
|
2007-03-06 17:00:25 +00:00
|
|
|
rev=repo.changelog.rev(node)
|
2007-03-07 11:33:03 +00:00
|
|
|
# ignore those tags not in our import range
|
|
|
|
|
if rev<start or rev>=end: continue
|
|
|
|
|
|
2007-03-14 08:34:18 +00:00
|
|
|
ref=get_parent_mark(rev,marks_cache)
|
2007-03-06 17:00:25 +00:00
|
|
|
if ref==None:
|
2007-03-07 11:33:03 +00:00
|
|
|
sys.stderr.write('Failed to find reference for creating tag'
|
|
|
|
|
' %s at r%d\n' % (tag,rev))
|
2007-03-06 17:00:25 +00:00
|
|
|
continue
|
|
|
|
|
sys.stderr.write('Exporting tag [%s] at [hg r%d] [git %s]\n' % (tag,rev,ref))
|
2007-03-14 08:34:18 +00:00
|
|
|
wr('reset refs/tags/%s' % tag)
|
2007-03-06 17:00:25 +00:00
|
|
|
wr('from %s' % ref)
|
|
|
|
|
wr()
|
|
|
|
|
count=checkpoint(count)
|
|
|
|
|
return count
|
|
|
|
|
|
2007-03-12 08:54:30 +00:00
|
|
|
def load_authors(filename):
|
|
|
|
|
cache={}
|
|
|
|
|
if not os.path.exists(filename):
|
|
|
|
|
return cache
|
|
|
|
|
f=open(filename,'r')
|
|
|
|
|
l=0
|
2007-03-12 10:26:46 +00:00
|
|
|
lre=re.compile('^([^=]+)[ ]*=[ ]*(.+)$')
|
2007-03-12 08:54:30 +00:00
|
|
|
for line in f.readlines():
|
|
|
|
|
l+=1
|
|
|
|
|
m=lre.match(line)
|
|
|
|
|
if m==None:
|
|
|
|
|
sys.stderr.write('Invalid file format in [%s], line %d\n' % (filename,l))
|
|
|
|
|
continue
|
|
|
|
|
# put key:value in cache, key without ^:
|
2007-03-12 10:45:32 +00:00
|
|
|
cache[m.group(1).strip()]=m.group(2).strip()
|
2007-03-12 08:54:30 +00:00
|
|
|
f.close()
|
|
|
|
|
sys.stderr.write('Loaded %d authors\n' % l)
|
|
|
|
|
return cache
|
|
|
|
|
|
2007-03-13 16:43:20 +00:00
|
|
|
def verify_heads(ui,repo,cache,force):
|
2007-03-06 17:00:25 +00:00
|
|
|
def getsha1(branch):
|
2007-03-12 11:13:48 +00:00
|
|
|
try:
|
|
|
|
|
f=open(os.getenv('GIT_DIR','/dev/null')+'/refs/heads/'+branch)
|
|
|
|
|
sha1=f.readlines()[0].split('\n')[0]
|
|
|
|
|
f.close()
|
|
|
|
|
return sha1
|
|
|
|
|
except IOError:
|
|
|
|
|
return None
|
2007-03-06 17:00:25 +00:00
|
|
|
|
2007-03-10 14:28:45 +00:00
|
|
|
branches=repo.branchtags()
|
|
|
|
|
l=[(-repo.changelog.rev(n), n, t) for t, n in branches.items()]
|
|
|
|
|
l.sort()
|
|
|
|
|
|
2007-03-13 16:31:57 +00:00
|
|
|
# get list of hg's branches to verify, don't take all git has
|
2007-03-10 14:28:45 +00:00
|
|
|
for _,_,b in l:
|
|
|
|
|
b=get_branch(b)
|
2007-03-06 17:00:25 +00:00
|
|
|
sha1=getsha1(b)
|
|
|
|
|
c=cache.get(b)
|
2007-03-14 10:13:27 +00:00
|
|
|
if sha1!=None and c!=None:
|
|
|
|
|
sys.stderr.write('Verifying branch [%s]\n' % b)
|
2007-03-06 17:00:25 +00:00
|
|
|
if sha1!=c:
|
2007-03-14 10:29:24 +00:00
|
|
|
sys.stderr.write('Error: Branch [%s] modified outside hg-fast-export:'
|
2007-03-06 17:00:25 +00:00
|
|
|
'\n%s (repo) != %s (cache)\n' % (b,sha1,c))
|
2007-03-13 16:43:20 +00:00
|
|
|
if not force: return False
|
2007-03-13 16:31:57 +00:00
|
|
|
|
|
|
|
|
# verify that branch has exactly one head
|
|
|
|
|
t={}
|
|
|
|
|
for h in repo.heads():
|
2007-03-14 10:02:15 +00:00
|
|
|
(_,_,_,_,_,_,branch,_)=get_changeset(ui,repo,h)
|
2007-03-13 16:31:57 +00:00
|
|
|
if t.get(branch,False):
|
|
|
|
|
sys.stderr.write('Error: repository has at least one unnamed head: hg r%s\n' %
|
|
|
|
|
repo.changelog.rev(h))
|
2007-03-13 16:43:20 +00:00
|
|
|
if not force: return False
|
2007-03-13 16:31:57 +00:00
|
|
|
t[branch]=True
|
|
|
|
|
|
2007-03-06 17:00:25 +00:00
|
|
|
return True
|
|
|
|
|
|
2007-03-13 16:43:20 +00:00
|
|
|
def hg2git(repourl,m,marksfile,headsfile,tipfile,authors={},sob=False,force=False):
|
2007-03-06 17:00:25 +00:00
|
|
|
_max=int(m)
|
|
|
|
|
|
|
|
|
|
marks_cache=load_cache(marksfile)
|
|
|
|
|
heads_cache=load_cache(headsfile)
|
|
|
|
|
state_cache=load_cache(tipfile)
|
|
|
|
|
|
|
|
|
|
ui,repo=setup_repo(repourl)
|
|
|
|
|
|
2007-03-13 16:43:20 +00:00
|
|
|
if not verify_heads(ui,repo,heads_cache,force):
|
2007-03-08 11:21:21 +00:00
|
|
|
return 1
|
2007-03-06 17:00:25 +00:00
|
|
|
|
|
|
|
|
tip=repo.changelog.count()
|
|
|
|
|
|
|
|
|
|
min=int(state_cache.get('tip',0))
|
|
|
|
|
max=_max
|
2007-03-12 08:00:18 +00:00
|
|
|
if _max<0:
|
2007-03-06 17:00:25 +00:00
|
|
|
max=tip
|
|
|
|
|
|
2007-03-07 11:06:34 +00:00
|
|
|
c=0
|
2007-03-06 17:00:25 +00:00
|
|
|
last={}
|
|
|
|
|
for rev in range(min,max):
|
2007-03-12 08:12:08 +00:00
|
|
|
c=export_commit(ui,repo,rev,marks_cache,heads_cache,last,max,c,authors,sob)
|
2007-03-06 17:00:25 +00:00
|
|
|
|
2007-03-09 11:09:57 +00:00
|
|
|
c=export_tags(ui,repo,marks_cache,min,max,c,authors)
|
2007-03-07 11:33:03 +00:00
|
|
|
|
|
|
|
|
sys.stderr.write('Issued %d commands\n' % c)
|
2007-03-06 17:00:25 +00:00
|
|
|
|
|
|
|
|
state_cache['tip']=max
|
|
|
|
|
state_cache['repo']=repourl
|
|
|
|
|
save_cache(tipfile,state_cache)
|
2007-03-08 11:21:21 +00:00
|
|
|
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
if __name__=='__main__':
|
2007-03-12 07:33:40 +00:00
|
|
|
def bail(parser,opt):
|
|
|
|
|
sys.stderr.write('Error: No %s option given\n' % opt)
|
|
|
|
|
parser.print_help()
|
|
|
|
|
sys.exit(2)
|
|
|
|
|
|
|
|
|
|
parser=OptionParser()
|
|
|
|
|
|
|
|
|
|
parser.add_option("-m","--max",type="int",dest="max",
|
|
|
|
|
help="Maximum hg revision to import")
|
|
|
|
|
parser.add_option("--marks",dest="marksfile",
|
|
|
|
|
help="File to read git-fast-import's marks from")
|
|
|
|
|
parser.add_option("--heads",dest="headsfile",
|
|
|
|
|
help="File to read last run's git heads from")
|
|
|
|
|
parser.add_option("--status",dest="statusfile",
|
|
|
|
|
help="File to read status from")
|
|
|
|
|
parser.add_option("-r","--repo",dest="repourl",
|
|
|
|
|
help="URL of repo to import")
|
2007-03-12 08:00:18 +00:00
|
|
|
parser.add_option("-s",action="store_true",dest="sob",
|
|
|
|
|
default=False,help="Enable parsing Signed-off-by lines")
|
2007-03-12 08:54:30 +00:00
|
|
|
parser.add_option("-A","--authors",dest="authorfile",
|
|
|
|
|
help="Read authormap from AUTHORFILE")
|
2007-03-13 16:43:20 +00:00
|
|
|
parser.add_option("-f","--force",action="store_true",dest="force",
|
|
|
|
|
default=False,help="Ignore validation errors by force")
|
2007-03-12 07:33:40 +00:00
|
|
|
|
|
|
|
|
(options,args)=parser.parse_args()
|
|
|
|
|
|
2007-03-12 08:00:18 +00:00
|
|
|
m=-1
|
2007-03-12 07:33:40 +00:00
|
|
|
if options.max!=None: m=options.max
|
|
|
|
|
|
|
|
|
|
if options.marksfile==None: bail(parser,'--marks')
|
|
|
|
|
if options.marksfile==None: bail(parser,'--heads')
|
|
|
|
|
if options.marksfile==None: bail(parser,'--status')
|
|
|
|
|
if options.marksfile==None: bail(parser,'--repo')
|
|
|
|
|
|
2007-03-12 08:54:30 +00:00
|
|
|
a={}
|
|
|
|
|
if options.authorfile!=None:
|
|
|
|
|
a=load_authors(options.authorfile)
|
|
|
|
|
|
2007-03-12 07:33:40 +00:00
|
|
|
sys.exit(hg2git(options.repourl,m,options.marksfile,options.headsfile,
|
2007-03-13 16:43:20 +00:00
|
|
|
options.statusfile,authors=a,sob=options.sob,force=options.force))
|