5 Replicate (replay) changesets from one SVN repository to another:
6 * Maintains full logical history (e.g. uses "svn copy" for renames).
7 * Maintains original commit messages.
8 * Cannot maintain original commit date, but appends original commit date
9 for each commit message: "Date: %d".
10 * Optionally maintain source author info. (Only supported if accessing
11 target SVN repo via file://)
12 * Optionally run an external shell script before each replayed commit
13 to give the ability to dynamically exclude or modify files as part
16 License: GPLv2, the same as hgsvn.
17 Author: Tony Duckles (https://github.com/tonyduckles/svn2svn)
18 (This is a forked and modified verison of http://code.google.com/p/svn2svn/)
30 from optparse
import OptionParser
31 from subprocess
import Popen
, PIPE
32 from datetime
import datetime
35 from xml
.etree
import cElementTree
as ET
38 from xml
.etree
import ElementTree
as ET
41 import cElementTree
as ET
43 from elementtree
import ElementTree
as ET
45 svn_log_args
= ['log', '--xml', '-v']
46 svn_info_args
= ['info', '--xml']
47 svn_checkout_args
= ['checkout', '-q']
48 svn_status_args
= ['status', '--xml', '-v', '--ignore-externals']
52 debug_runsvn_timing
= False # Display how long each "svn" OS command took to run?
53 # Setup verbosity options
54 runsvn_showcmd
= True # Display every "svn" OS command we run?
55 runsvn_showout
= False # Display the stdout results from every "svn" OS command we run?
56 svnlog_verbose
= True # Display each action + changed-path as we walk the history?
58 # define exception class
59 class ExternalCommandFailed(RuntimeError):
61 An external command failed.
64 def display_error(message
, raise_exception
= True):
66 Display error message, then terminate.
68 print "Error:", message
71 raise ExternalCommandFailed
75 # Windows compatibility code by Bill Baxter
77 def find_program(name
):
79 Find the name of the program for Popen.
80 Windows is finnicky about having the complete file name. Popen
81 won't search the %PATH% for you automatically.
82 (Adapted from ctypes.find_library)
84 # See MSDN for the REAL search order.
85 base
, ext
= os
.path
.splitext(name
)
89 exts
= ['.bat', '.exe']
90 for directory
in os
.environ
['PATH'].split(os
.pathsep
):
92 fname
= os
.path
.join(directory
, base
+ e
)
93 if os
.path
.exists(fname
):
97 def find_program(name
):
99 Find the name of the program for Popen.
100 On Unix, popen isn't picky about having absolute paths.
109 return q
+ s
.replace('\\', '\\\\').replace("'", "'\"'\"'") + q
111 locale_encoding
= locale
.getpreferredencoding()
113 def run_svn(args
, fail_if_stderr
=False, encoding
="utf-8"):
116 exit if svn cmd failed
118 def _transform_arg(a
):
119 if isinstance(a
, unicode):
120 a
= a
.encode(encoding
or locale_encoding
)
121 elif not isinstance(a
, str):
124 t_args
= map(_transform_arg
, args
)
126 cmd
= find_program("svn")
127 cmd_string
= str(" ".join(map(shell_quote
, [cmd
] + t_args
)))
129 print "$", "("+os
.getcwd()+")", cmd_string
130 if debug_runsvn_timing
:
132 pipe
= Popen([cmd
] + t_args
, executable
=cmd
, stdout
=PIPE
, stderr
=PIPE
)
133 out
, err
= pipe
.communicate()
134 if debug_runsvn_timing
:
136 print "(" + str(round(time2
-time1
,4)) + " elapsed)"
137 if out
and runsvn_showout
:
139 if pipe
.returncode
!= 0 or (fail_if_stderr
and err
.strip()):
140 display_error("External program failed (return code %d): %s\n%s"
141 % (pipe
.returncode
, cmd_string
, err
))
144 def svn_date_to_timestamp(svn_date
):
146 Parse an SVN date as read from the XML output and
147 return the corresponding timestamp.
149 # Strip microseconds and timezone (always UTC, hopefully)
150 # XXX there are various ISO datetime parsing routines out there,
151 # cf. http://seehuhn.de/comp/pdate
152 date
= svn_date
.split('.', 2)[0]
153 time_tuple
= time
.strptime(date
, "%Y-%m-%dT%H:%M:%S")
154 return calendar
.timegm(time_tuple
)
156 def parse_svn_info_xml(xml_string
):
158 Parse the XML output from an "svn info" command and extract
159 useful information as a dict.
162 tree
= ET
.fromstring(xml_string
)
163 entry
= tree
.find('.//entry')
165 d
['url'] = entry
.find('url').text
166 d
['revision'] = int(entry
.get('revision'))
167 d
['repos_url'] = tree
.find('.//repository/root').text
168 d
['last_changed_rev'] = int(tree
.find('.//commit').get('revision'))
169 d
['kind'] = entry
.get('kind')
172 def parse_svn_log_xml(xml_string
):
174 Parse the XML output from an "svn log" command and extract
175 useful information as a list of dicts (one per log changeset).
178 tree
= ET
.fromstring(xml_string
)
179 for entry
in tree
.findall('logentry'):
181 d
['revision'] = int(entry
.get('revision'))
182 # Some revisions don't have authors, most notably
183 # the first revision in a repository.
184 author
= entry
.find('author')
185 d
['author'] = author
is not None and author
.text
or None
186 d
['date'] = svn_date_to_timestamp(entry
.find('date').text
)
187 # Some revisions may have empty commit message
188 message
= entry
.find('msg')
189 message
= message
is not None and message
.text
is not None \
190 and message
.text
.strip() or ""
191 # Replace DOS return '\r\n' and MacOS return '\r' with unix return '\n'
192 d
['message'] = message
.replace('\r\n', '\n').replace('\n\r', '\n'). \
194 paths
= d
['changed_paths'] = []
195 for path
in entry
.findall('.//path'):
196 copyfrom_rev
= path
.get('copyfrom-rev')
198 copyfrom_rev
= int(copyfrom_rev
)
201 'kind': path
.get('kind'),
202 'action': path
.get('action'),
203 'copyfrom_path': path
.get('copyfrom-path'),
204 'copyfrom_revision': copyfrom_rev
,
206 # Need to sort paths (i.e. into hierarchical order), so that process_svn_log_entry()
207 # can process actions in depth-first order.
212 def parse_svn_status_xml(xml_string
, base_dir
=None):
214 Parse the XML output from an "svn status" command and extract
215 useful info as a list of dicts (one per status entry).
218 tree
= ET
.fromstring(xml_string
)
219 for entry
in tree
.findall('.//entry'):
221 path
= entry
.get('path')
222 if base_dir
is not None:
223 assert path
.startswith(base_dir
)
224 path
= path
[len(base_dir
):].lstrip('/\\')
226 wc_status
= entry
.find('wc-status')
227 if wc_status
.get('item') == 'external':
228 d
['type'] = 'external'
229 elif wc_status
.get('revision') is not None:
232 d
['type'] = 'unversioned'
236 def get_svn_info(svn_url_or_wc
, rev_number
=None):
238 Get SVN information for the given URL or working copy,
239 with an optionally specified revision number.
240 Returns a dict as created by parse_svn_info_xml().
242 if rev_number
is not None:
243 args
= [svn_url_or_wc
+ "@" + str(rev_number
)]
245 args
= [svn_url_or_wc
]
246 xml_string
= run_svn(svn_info_args
+ args
, fail_if_stderr
=True)
247 return parse_svn_info_xml(xml_string
)
249 def svn_checkout(svn_url
, checkout_dir
, rev_number
=None):
251 Checkout the given URL at an optional revision number.
254 if rev_number
is not None:
255 args
+= ['-r', rev_number
]
256 args
+= [svn_url
, checkout_dir
]
257 return run_svn(svn_checkout_args
+ args
)
259 def run_svn_log(svn_url_or_wc
, rev_start
, rev_end
, limit
, stop_on_copy
=False):
261 Fetch up to 'limit' SVN log entries between the given revisions.
264 args
= ['--stop-on-copy']
267 url
= str(svn_url_or_wc
)
268 if rev_start
!= 'HEAD' and rev_end
!= 'HEAD':
269 args
+= ['-r', '%s:%s' % (rev_start
, rev_end
)]
270 if not "@" in svn_url_or_wc
:
271 url
+= "@" + str(rev_end
)
272 args
+= ['--limit', str(limit
), url
]
273 xml_string
= run_svn(svn_log_args
+ args
)
274 return parse_svn_log_xml(xml_string
)
276 def get_svn_status(svn_wc
, flags
=None):
278 Get SVN status information about the given working copy.
280 # Ensure proper stripping by canonicalizing the path
281 svn_wc
= os
.path
.abspath(svn_wc
)
286 xml_string
= run_svn(svn_status_args
+ args
)
287 return parse_svn_status_xml(xml_string
, svn_wc
)
289 def get_one_svn_log_entry(svn_url
, rev_start
, rev_end
, stop_on_copy
=False):
291 Get the first SVN log entry in the requested revision range.
293 entries
= run_svn_log(svn_url
, rev_start
, rev_end
, 1, stop_on_copy
)
295 display_error("No SVN log for %s between revisions %s and %s" %
296 (svn_url
, rev_start
, rev_end
))
300 def get_first_svn_log_entry(svn_url
, rev_start
, rev_end
):
302 Get the first log entry after/at the given revision number in an SVN branch.
303 By default the revision number is set to 0, which will give you the log
304 entry corresponding to the branch creaction.
306 NOTE: to know whether the branch creation corresponds to an SVN import or
307 a copy from another branch, inspect elements of the 'changed_paths' entry
308 in the returned dictionary.
310 return get_one_svn_log_entry(svn_url
, rev_start
, rev_end
, stop_on_copy
=True)
312 def get_last_svn_log_entry(svn_url
, rev_start
, rev_end
):
314 Get the last log entry before/at the given revision number in an SVN branch.
315 By default the revision number is set to HEAD, which will give you the log
316 entry corresponding to the latest commit in branch.
318 return get_one_svn_log_entry(svn_url
, rev_end
, rev_start
, stop_on_copy
=True)
321 log_duration_threshold
= 10.0
322 log_min_chunk_length
= 10
324 def iter_svn_log_entries(svn_url
, first_rev
, last_rev
):
326 Iterate over SVN log entries between first_rev and last_rev.
328 This function features chunked log fetching so that it isn't too nasty
329 to the SVN server if many entries are requested.
332 chunk_length
= log_min_chunk_length
333 chunk_interval_factor
= 1.0
334 while last_rev
== "HEAD" or cur_rev
<= last_rev
:
335 start_t
= time
.time()
336 stop_rev
= min(last_rev
, cur_rev
+ int(chunk_length
* chunk_interval_factor
))
337 entries
= run_svn_log(svn_url
, cur_rev
, stop_rev
, chunk_length
)
338 duration
= time
.time() - start_t
340 if stop_rev
== last_rev
:
342 cur_rev
= stop_rev
+ 1
343 chunk_interval_factor
*= 2.0
347 cur_rev
= e
['revision'] + 1
348 # Adapt chunk length based on measured request duration
349 if duration
< log_duration_threshold
:
350 chunk_length
= int(chunk_length
* 2.0)
351 elif duration
> log_duration_threshold
* 2:
352 chunk_length
= max(log_min_chunk_length
, int(chunk_length
/ 2.0))
354 def commit_from_svn_log_entry(entry
, files
=None, keep_author
=False):
356 Given an SVN log entry and an optional sequence of files, do an svn commit.
358 # TODO: Run optional external shell hook here, for doing pre-commit filtering
359 # This will use the local timezone for displaying commit times
360 timestamp
= int(entry
['date'])
361 svn_date
= str(datetime
.fromtimestamp(timestamp
))
362 # Uncomment this one one if you prefer UTC commit times
363 #svn_date = "%d 0" % timestamp
365 options
= ["ci", "--force-log", "-m", entry
['message'] + "\nDate: " + svn_date
, "--username", entry
['author']]
367 options
= ["ci", "--force-log", "-m", entry
['message'] + "\nDate: " + svn_date
+ "\nAuthor: " + entry
['author']]
369 options
+= list(files
)
375 Check if a given file/folder is being tracked by Subversion.
376 Prior to SVN 1.6, we could "cheat" and look for the existence of ".svn" directories.
377 With SVN 1.7 and beyond, WC-NG means only a single top-level ".svn" at the root of the working-copy.
378 Use "svn status" to check the status of the file/folder.
380 # TODO: Is there a better way to do this?
381 entries
= get_svn_status(p
)
385 return (d
['type'] == 'normal')
387 def find_svn_ancestors(source_repos_url
, source_base
, source_offset
, copyfrom_path
, copyfrom_rev
):
389 Given a copy-from path (copyfrom_path), walk the SVN history backwards to inspect
390 the ancestory of that path. Build a collection of copyfrom_path+revision pairs
391 for each of the branch-copies since the initial branch-creation. If we find a
392 copyfrom_path which source_base is a substring match of (e.g. we crawled back to
393 the initial branch-copy from trunk), then return the collection of ancestor paths.
394 Otherwise, copyfrom_path has no ancestory compared to source_base.
396 This is useful when comparing "trunk" vs. "branch" paths, to handle cases where a
397 file/folder was renamed in a branch and then that branch was merged back to trunk.
400 * source_repos_url = Full URL to root of repository, e.g. 'file:///path/to/repos'
401 * source_base = e.g. '/trunk'
402 * source_offset = e.g. 'projectA/file1.txt'
403 * copyfrom_path = e.g. '/branches/bug123/projectA/file1.txt'
407 working_path
= copyfrom_path
408 working_base
= copyfrom_path
[:-len(source_offset
)].rstrip('/')
409 working_offset
= source_offset
.strip('/')
410 working_rev
= copyfrom_rev
411 ancestors
= [{'path': [working_base, working_offset], 'revision': working_rev}
]
413 # Get the first "svn log" entry for this path (relative to @rev)
414 #working_path = working_base + "/" + working_offset
416 print ">> find_svn_ancestors: " + source_repos_url
+ working_path
+ "@" + str(working_rev
) + \
417 " (" + working_base
+ " " + working_offset
+ ")"
418 log_entry
= get_first_svn_log_entry(source_repos_url
+ working_path
+ "@" + str(working_rev
), 1, str(working_rev
))
421 # Find the action for our working_path in this revision
422 for d
in log_entry
['changed_paths']:
424 if not path
in working_path
:
426 # Check action-type for this file
428 if action
not in 'MARD':
429 display_error("In SVN rev. %d: action '%s' not supported. \
430 Please report a bug!" % (log_entry
['revision'], action
))
432 debug_desc
= ": " + action
+ " " + path
433 if d
['copyfrom_path']:
434 debug_desc
+= " (from " + d
['copyfrom_path'] + "@" + str(d
['copyfrom_revision']) + ")"
438 # If file/folder was replaced, it has no ancestor
441 # If file/folder was deleted, it has no ancestor
444 # If file/folder was added but not a copy, it has no ancestor
445 if not d
['copyfrom_path']:
447 # Else, file/folder was added and is a copy, so check ancestors
448 path_old
= d
['copyfrom_path']
449 working_path
= working_path
.replace(path
, path_old
)
450 if working_base
in working_path
:
451 # If the new and old working_path share the same working_base, just need to update working_offset.
452 working_offset
= working_path
[len(working_base
)+1:]
454 # Else, assume that working_base has changed but working_offset is the same, e.g. a re-branch.
455 # TODO: Is this a safe assumption?!
456 working_base
= working_path
[:-len(working_offset
)].rstrip('/')
457 working_rev
= d
['copyfrom_revision']
459 print ">> find_svn_ancestors: copy-from: " + working_base
+ " " + working_offset
+ "@" + str(working_rev
)
460 ancestors
.append({'path': [working_base, working_offset], 'revision': working_rev}
)
461 # If we found a copy-from case which matches our source_base, we're done
462 if (path_old
== source_base
) or (path_old
.startswith(source_base
+ "/")):
464 # Else, follow the copy and keep on searching
468 def replay_svn_ancestors(ancestors
, source_repos_url
, source_url
, target_url
):
470 Given an array of ancestor info (find_svn_ancestors), replay the history
471 to correctly track renames ("svn copy/move") across branch-merges.
473 For example, consider a sequence of events like this:
474 1. svn copy /trunk /branches/fix1
475 2. (Make some changes on /branches/fix1)
476 3. svn copy /branches/fix1/Proj1 /branches/fix1/Proj2 " Rename folder
477 4. svn copy /branches/fix1/Proj2/file1.txt /branches/fix1/Proj2/file2.txt " Rename file inside renamed folder
478 5. svn co /trunk && svn merge /branches/fix1
479 After the merge and commit, "svn log -v" with show a delete of /trunk/Proj1
480 and and add of /trunk/Proj2 comp-from /branches/fix1/Proj2. If we were just
481 to do a straight "svn export+add" based on the /branches/fix1/Proj2 folder,
482 we'd lose the logical history that Proj2/file2.txt is really a descendant
485 'source_repos_url' is the full URL to the root of the source repository.
486 'ancestors' is the array returned by find_svn_ancestors() with the final
487 destination info appended to it by process_svn_log_entry().
490 # Ignore ancestors[0], which is the original (pre-branch-copy) trunk path
491 # Ignore ancestors[1], which is the original branch-creation commit
492 # Ignore ancestors[n], which is the final commit back to trunk
493 for idx
in range(1, len(ancestors
)-1):
494 ancestor
= ancestors
[idx
]
495 source_base
= ancestor
['path'][0]
496 source_offset
= ancestor
['path'][1]
497 source_path
= source_base
+ "/" + source_offset
498 source_rev
= ancestor
['revision']
499 source_rev_next
= ancestors
[idx
+1]['revision']
500 # Do a "svn log" on the _parent_ directory of source_path, since trying to get log info
501 # for the "old path" on the revision where the copy/move happened will fail.
502 if "/" in source_path
:
503 p_source_path
= source_path
[:source_path
.rindex('/')]
507 print ">> replay_svn_ancestors: ["+str(idx
)+"]" + source_path
+"@"+str(source_rev
) + " ["+p_source_path
+"@"+str(source_rev
)+":"+str(source_rev_next
-1)+"]"
508 it_log_entries
= iter_svn_log_entries(source_repos_url
+p_source_path
, source_rev
, source_rev_next
-1)
509 for log_entry
in it_log_entries
:
510 #print ">> replay_svn_ancestors: log_entry: (" + source_repos_url+source_base + ")"
512 process_svn_log_entry(log_entry
, source_repos_url
, source_repos_url
+source_base
, target_url
)
514 def process_svn_log_entry(log_entry
, source_repos_url
, source_url
, target_url
, source_offset
=""):
516 Process SVN changes from the given log entry.
517 Returns array of all the paths in the working-copy that were changed,
518 i.e. the paths which need to be "svn commit".
520 'log_entry' is the array structure built by parse_svn_log_xml().
521 'source_repos_url' is the full URL to the root of the source repository.
522 'source_url' is the full URL to the source path in the source repository.
523 'target_url' is the full URL to the target path in the target repository.
525 # Get the relative offset of source_url based on source_repos_url, e.g. u'/branches/bug123'
526 source_base
= source_url
[len(source_repos_url
):]
528 print ">> process_svn_log_entry: " + source_url
+ " (" + source_base
+ ")"
530 svn_rev
= log_entry
['revision']
537 for d
in log_entry
['changed_paths']:
539 msg
= " " + d
['action'] + " " + d
['path']
540 if d
['copyfrom_path']:
541 msg
+= " (from " + d
['copyfrom_path'] + "@" + str(d
['copyfrom_revision']) + ")"
543 # Get the full path for this changed_path
544 # e.g. u'/branches/bug123/projectA/file1.txt'
546 if not path
.startswith(source_base
+ "/"):
547 # Ignore changed files that are not part of this subdir
548 if path
!= source_base
:
549 print ">> process_svn_log_entry: Unrelated path: " + path
+ " (" + source_base
+ ")"
550 unrelated_paths
.append(path
)
552 # Calculate the offset (based on source_base) for this changed_path
553 # e.g. u'projectA/file1.txt'
554 # (path = source_base + "/" + path_offset)
555 path_offset
= path
[len(source_base
):].strip("/")
556 # Get the action for this path
558 if action
not in 'MARD':
559 display_error("In SVN rev. %d: action '%s' not supported. \
560 Please report a bug!" % (svn_rev
, action
))
562 # Try to be efficient and keep track of an explicit list of paths in the
563 # working copy that changed. If we commit from the root of the working copy,
564 # then SVN needs to crawl the entire working copy looking for pending changes.
565 # But, if we gather too many paths to commit, then we wipe commit_paths below
566 # and end-up doing a commit at the root of the working-copy.
567 if len (commit_paths
) < 100:
568 commit_paths
.append(path_offset
)
570 # Special-handling for replace's
572 # If file was "replaced" (deleted then re-added, all in same revision),
573 # then we need to run the "svn rm" first, then change action='A'. This
574 # lets the normal code below handle re-"svn add"'ing the files. This
575 # should replicate the "replace".
576 run_svn(["up", path_offset
])
577 run_svn(["remove", "--force", path_offset
])
580 # Handle all the various action-types
581 # (Handle "add" first, for "svn copy/move" support)
583 # Determine where to export from
584 copyfrom_rev
= svn_rev
587 # Handle cases where this "add" was a copy from another URL in the source repos
588 if d
['copyfrom_revision']:
589 copyfrom_rev
= d
['copyfrom_revision']
590 copyfrom_path
= d
['copyfrom_path']
591 print ">> process_svn_log_entry: copy-to: " + source_base
+ " " + source_offset
+ " " + path_offset
592 if source_base
in copyfrom_path
:
593 # If the copy-from path is inside the current working-copy, no need to check ancestry.
595 copyfrom_path
= copyfrom_path
[len(source_base
):].strip("/")
597 print ">> process_svn_log_entry: Found copy: " + copyfrom_path
+"@"+str(copyfrom_rev
)
600 ancestors
= find_svn_ancestors(source_repos_url
, source_base
, path_offset
,
601 copyfrom_path
, copyfrom_rev
)
603 # Reverse the list, so that we loop in chronological order
605 # Append the current revision
606 ancestors
.append({'path': [source_base, path_offset], 'revision': svn_rev}
)
607 # ancestors[0] is the original (pre-branch-copy) trunk path.
608 # ancestors[1] is the first commit on the new branch.
609 copyfrom_rev
= ancestors
[0]['revision']
610 copyfrom_base
= ancestors
[0]['path'][0]
611 copyfrom_offset
= ancestors
[0]['path'][1]
612 copyfrom_path
= copyfrom_base
+ copyfrom_offset
614 print ">> process_svn_log_entry: FOUND PARENT:"
615 for idx
in range(0,len(ancestors
)):
616 ancestor
= ancestors
[idx
]
617 print " ["+str(idx
)+"] " + ancestor
['path'][0]+" "+ancestor
['path'][1]+"@"+str(ancestor
['revision'])
618 #print ">> process_svn_log_entry: copyfrom_path (before): " + copyfrom_path + " source_base: " + source_base + " p: " + p
619 copyfrom_path
= copyfrom_path
[len(source_base
):].strip("/")
620 #print ">> process_svn_log_entry: copyfrom_path (after): " + copyfrom_path
622 # If this add was a copy-from, do a smart replay of the ancestors' history.
623 # Else just copy/export the files from the source repo and "svn add" them.
626 print ">> process_svn_log_entry: svn_copy: copy-from: " + copyfrom_path
+"@"+str(copyfrom_rev
) + " source_base: "+source_base
+ " len(ancestors): " + str(len(ancestors
))
627 # If we don't have any ancestors, then this is just a straight "svn copy" in the current working-copy.
629 # ...but not if the target is already tracked, because this might run several times for the same path.
630 # TODO: Is there a better way to avoid recusion bugs? Maybe a collection of processed paths?
631 if not in_svn(path_offset
):
632 run_svn(["copy", copyfrom_path
, path_offset
])
634 # Replay any actions which happened to this folder from the ancestor path(s).
635 replay_svn_ancestors(ancestors
, source_repos_url
, source_url
, target_url
)
637 # Create (parent) directory if needed
638 if d
['kind'] == 'dir':
641 p_path
= os
.path
.dirname(path_offset
).strip() or '.'
642 if not os
.path
.exists(p_path
):
644 # Export the entire added tree.
645 run_svn(["export", "--force", "-r", str(copyfrom_rev
),
646 source_repos_url
+ copyfrom_path
+ "@" + str(copyfrom_rev
), path_offset
])
647 run_svn(["add", "--parents", path_offset
])
648 # TODO: Need to copy SVN properties from source repos
651 # Queue "svn remove" commands, to allow the action == 'A' handling the opportunity
652 # to do smart "svn copy" handling on copy/move/renames.
653 removed_paths
.append(path_offset
)
657 display_error("Internal Error: Handling for action='R' not implemented yet.")
660 modified_paths
.append(path_offset
)
663 display_error("Internal Error: pull_svn_rev: Unhandled 'action' value: '" + action
+ "'")
666 for r
in removed_paths
:
667 # TODO: Is the "svn up" here needed?
669 run_svn(["remove", "--force", r
])
672 for m
in modified_paths
:
673 # TODO: Is the "svn up" here needed?
675 m_url
= source_url
+ "/" + m
676 out
= run_svn(["merge", "-c", str(svn_rev
), "--non-recursive",
677 "--non-interactive", "--accept=theirs-full",
678 m_url
+"@"+str(svn_rev
), m
])
681 print "Unrelated paths: (vs. '" + source_base
+ "')"
682 print "*", unrelated_paths
686 def pull_svn_rev(log_entry
, source_repos_url
, source_url
, target_url
, keep_author
=False):
688 Pull SVN changes from the given log entry.
689 Returns the new SVN revision.
690 If an exception occurs, it will rollback to revision 'svn_rev - 1'.
692 # Get the relative offset of source_url based on source_repos_url, e.g. u'/branches/bug123'
693 source_base
= source_url
[len(source_repos_url
):]
695 svn_rev
= log_entry
['revision']
696 commit_paths
= process_svn_log_entry(log_entry
, source_repos_url
, source_url
, target_url
)
698 # If we had too many individual paths to commit, wipe the list and just commit at
699 # the root of the working copy.
700 if len (commit_paths
) > 99:
704 commit_from_svn_log_entry(log_entry
, commit_paths
, keep_author
=keep_author
)
705 except ExternalCommandFailed
:
706 # try to ignore the Properties conflicts on files and dirs
707 # use the copy from original_wc
708 # TODO: Need to re-work this?
709 #has_Conflict = False
710 #for d in log_entry['changed_paths']:
712 # p = p[len(source_base):].strip("/")
713 # if os.path.isfile(p):
714 # if os.path.isfile(p + ".prej"):
715 # has_Conflict = True
716 # shutil.copy(original_wc + os.sep + p, p)
717 # p2=os.sep + p.replace('_', '__').replace('/', '_') \
718 # + ".prej-" + str(svn_rev)
719 # shutil.move(p + ".prej", os.path.dirname(original_wc) + p2)
720 # w="\n### Properties conflicts ignored:"
721 # print "%s %s, in revision: %s\n" % (w, p, svn_rev)
722 # elif os.path.isdir(p):
723 # if os.path.isfile(p + os.sep + "dir_conflicts.prej"):
724 # has_Conflict = True
725 # p2=os.sep + p.replace('_', '__').replace('/', '_') \
726 # + "_dir__conflicts.prej-" + str(svn_rev)
727 # shutil.move(p + os.sep + "dir_conflicts.prej",
728 # os.path.dirname(original_wc) + p2)
729 # w="\n### Properties conflicts ignored:"
730 # print "%s %s, in revision: %s\n" % (w, p, svn_rev)
731 # out = run_svn(["propget", "svn:ignore",
732 # original_wc + os.sep + p])
734 # run_svn(["propset", "svn:ignore", out.strip(), p])
735 # out = run_svn(["propget", "svn:externel",
736 # original_wc + os.sep + p])
738 # run_svn(["propset", "svn:external", out.strip(), p])
741 # commit_from_svn_log_entry(log_entry, commit_paths, keep_author=keep_author)
743 raise ExternalCommandFailed
747 usage
= "Usage: %prog [-a] [-c] [-r SVN rev] <Source SVN URL> <Target SVN URL>"
748 parser
= OptionParser(usage
)
749 parser
.add_option("-a", "--keep-author", action
="store_true",
750 dest
="keep_author", help="Keep revision Author or not")
751 parser
.add_option("-c", "--continue-from-break", action
="store_true",
752 dest
="cont_from_break",
753 help="Continue from previous break")
754 parser
.add_option("-r", "--svn-rev", type="int", dest
="svn_rev",
755 help="SVN revision to checkout from")
756 (options
, args
) = parser
.parse_args()
758 display_error("incorrect number of arguments\n\nTry: svn2svn.py --help",
761 source_url
= args
.pop(0).rstrip("/")
762 target_url
= args
.pop(0).rstrip("/")
763 if options
.keep_author
:
768 # Find the greatest_rev in the source repo
769 svn_info
= get_svn_info(source_url
)
770 greatest_rev
= svn_info
['revision']
774 # if old working copy does not exist, disable continue mode
775 # TODO: Better continue support. Maybe include source repo's rev # in target commit info?
776 if not os
.path
.exists(dup_wc
):
777 options
.cont_from_break
= False
779 if not options
.cont_from_break
:
780 # Warn if Target SVN URL existed
781 cmd
= find_program("svn")
782 pipe
= Popen([cmd
] + ["list"] + [target_url
], executable
=cmd
,
783 stdout
=PIPE
, stderr
=PIPE
)
784 out
, err
= pipe
.communicate()
785 if pipe
.returncode
== 0:
786 print "Target SVN URL: %s existed!" % target_url
789 print "Press 'Enter' to Continue, 'Ctrl + C' to Cancel..."
790 print "(Timeout in 5 seconds)"
791 rfds
, wfds
, efds
= select
.select([sys
.stdin
], [], [], 5)
793 # Get log entry for the SVN revision we will check out
795 # If specify a rev, get log entry just before or at rev
796 svn_start_log
= get_last_svn_log_entry(source_url
, 1, options
.svn_rev
)
798 # Otherwise, get log entry of branch creation
799 svn_start_log
= get_first_svn_log_entry(source_url
, 1, greatest_rev
)
801 # This is the revision we will start from for source_url
802 svn_rev
= svn_start_log
['revision']
804 # Check out a working copy of target_url
805 dup_wc
= os
.path
.abspath(dup_wc
)
806 if os
.path
.exists(dup_wc
):
807 shutil
.rmtree(dup_wc
)
808 svn_checkout(target_url
, dup_wc
)
811 # For the initial commit to the target URL, export all the contents from
812 # the source URL at the start-revision.
813 paths
= run_svn(["list", "-r", str(svn_rev
), source_url
+"@"+str(svn_rev
)])
814 paths
= paths
.strip("\n").split("\n")
819 # Directories have a trailing slash in the "svn list" output
821 path
=path
.rstrip('/')
822 if not os
.path
.exists(path
):
825 run_svn(["export", "--force", "-r" , str(svn_rev
), source_url
+"/"+path
+"@"+str(svn_rev
), path
])
826 run_svn(["add", path
])
827 commit_from_svn_log_entry(svn_start_log
, [], keep_author
)
829 dup_wc
= os
.path
.abspath(dup_wc
)
833 svn_info
= get_svn_info(source_url
)
834 # Get the base URL for the source repos, e.g. u'svn://svn.example.com/svn/repo'
835 source_repos_url
= svn_info
['repos_url']
837 if options
.cont_from_break
:
838 svn_rev
= svn_info
['revision'] - 1
842 # Load SVN log starting from svn_rev + 1
843 it_log_entries
= iter_svn_log_entries(source_url
, svn_rev
+ 1, greatest_rev
)
846 for log_entry
in it_log_entries
:
847 pull_svn_rev(log_entry
, source_repos_url
, source_url
, target_url
, keep_author
)
849 except KeyboardInterrupt:
850 print "\nStopped by user."
852 run_svn(["revert", "--recursive", "."])
854 print "\nCommand failed with following error:\n"
855 traceback
.print_exc()
857 run_svn(["revert", "--recursive", "."])
863 if __name__
== "__main__":