From dd4739d72393440b183bb926aa84aae3ffee285e Mon Sep 17 00:00:00 2001 From: Tony Duckles Date: Mon, 23 Jan 2012 23:31:02 -0600 Subject: [PATCH] * Use ui.status for all status messages (verbose and debug) * Show "Committed revision" after commits to target repo * Fixes to in_svn() * Show "svn status" results before cleanup when we catch an external-command exception --- svn2svn/run/svn2svn.py | 233 ++++++++++++++++++++--------------------- 1 file changed, 115 insertions(+), 118 deletions(-) diff --git a/svn2svn/run/svn2svn.py b/svn2svn/run/svn2svn.py index 9f2979a..a6ed957 100644 --- a/svn2svn/run/svn2svn.py +++ b/svn2svn/run/svn2svn.py @@ -41,32 +41,46 @@ def commit_from_svn_log_entry(entry, files=None, keep_author=False, revprops=[]) # Uncomment this one one if you prefer UTC commit times #svn_date = "%d 0" % timestamp if keep_author: - options = ["ci", "--force-log", "-m", entry['message'] + "\nDate: " + svn_date, "--username", entry['author']] + options = ["commit", "--force-log", "-m", entry['message'] + "\nDate: " + svn_date, "--username", entry['author']] else: - options = ["ci", "--force-log", "-m", entry['message'] + "\nDate: " + svn_date + "\nAuthor: " + entry['author']] + options = ["commit", "--force-log", "-m", entry['message'] + "\nDate: " + svn_date + "\nAuthor: " + entry['author']] if revprops: for r in revprops: options += ["--with-revprop", r['name']+"="+str(r['value'])] if files: options += list(files) - print "(Committing source rev #"+str(entry['revision'])+"...)" - run_svn(options) + output = run_svn(options) + if output: + output_lines = output.strip("\n").split("\n") + rev = "" + for line in output_lines: + if line[0:19] == 'Committed revision ': + rev = line[19:].rstrip('.') + break + if rev: + ui.status("Committed revision %s.", rev) -def in_svn(p, in_repo=False): +def in_svn(p, require_in_repo=False, prefix=""): """ Check if a given file/folder is being tracked by Subversion. Prior to SVN 1.6, we could "cheat" and look for the existence of ".svn" directories. With SVN 1.7 and beyond, WC-NG means only a single top-level ".svn" at the root of the working-copy. Use "svn status" to check the status of the file/folder. """ - entries = svnclient.get_svn_status(p) + entries = svnclient.get_svn_status(p, no_recursive=True) if not entries: - return False - d = entries[0] - # If caller requires this path to be in the SVN repo, prevent returning True for locally-added paths. - if in_repo and (d['status'] == 'added' or d['revision'] is None): return False - return True if (d['type'] == 'normal' or d['status'] == 'added') else False + d = entries[0] + if require_in_repo and (d['status'] == 'added' or d['revision'] is None): + # If caller requires this path to be in the SVN repo, prevent returning True + # for paths that are only locally-added. + ret = False + else: + # Don't consider files tracked as deleted in the WC as under source-control. + # Consider files which are locally added/copied as under source-control. + ret = True if not (d['status'] == 'deleted') and (d['type'] == 'normal' or d['status'] == 'added' or d['copied'] == 'true') else False + ui.status(prefix + ">> in_svn('%s', require_in_repo=%s) --> %s", p, str(require_in_repo), str(ret), level=ui.DEBUG, color='GREEN') + return ret def find_svn_ancestors(svn_repos_url, base_path, source_path, source_rev, prefix = ""): """ @@ -89,8 +103,8 @@ def find_svn_ancestors(svn_repos_url, base_path, source_path, source_rev, prefix (full_path = svn_repos_url+base_path+"/"+path_offset) 'source_rev' is the revision to start walking the history of source_path backwards from. """ - if debug: - print prefix+"\x1b[33m" + ">> find_svn_ancestors: Start: ("+svn_repos_url+") source_path: "+source_path+"@"+str(source_rev)+" base_path: "+base_path + "\x1b[0m" + ui.status(prefix + ">> find_svn_ancestors: Start: (%s) source_path: %s base_path: %s", + svn_repos_url, source_path+"@"+str(source_rev), base_path, level=ui.DEBUG, color='YELLOW') done = False working_path = base_path+"/"+source_path working_rev = source_rev @@ -98,19 +112,16 @@ def find_svn_ancestors(svn_repos_url, base_path, source_path, source_rev, prefix ancestors_temp = [] while not done: # Get the first "svn log" entry for this path (relative to @rev) - if debug: - print prefix+"\x1b[33m" + ">> find_svn_ancestors: " + svn_repos_url + working_path+"@"+str(working_rev) + "\x1b[0m" - log_entry = svnclient.get_first_svn_log_entry(svn_repos_url + working_path+"@"+str(working_rev), 1, str(working_rev), True) + ui.status(prefix + ">> find_svn_ancestors: %s", svn_repos_url + working_path+"@"+str(working_rev), level=ui.DEBUG, color='YELLOW') + log_entry = svnclient.get_first_svn_log_entry(svn_repos_url + working_path+"@"+str(working_rev), 1, working_rev, True) if not log_entry: - if debug: - print prefix+"\x1b[33m" + ">> find_svn_ancestors: Done: no log_entry" + "\x1b[0m" + ui.status(prefix + ">> find_svn_ancestors: Done: no log_entry", level=ui.DEBUG, color='YELLOW') done = True break # If we found a copy-from case which matches our base_path, we're done. # ...but only if we've at least tried to search for the first copy-from path. if first_iter_done and working_path.startswith(base_path): - if debug: - print prefix+"\x1b[33m" + ">> find_svn_ancestors: Done: Found working_path.startswith(base_path) and first_iter_done=True" + "\x1b[0m" + ui.status(prefix + ">> find_svn_ancestors: Done: Found working_path.startswith(base_path) and first_iter_done=True", level=ui.DEBUG, color='YELLOW') done = True break first_iter_done = True @@ -122,8 +133,7 @@ def find_svn_ancestors(svn_repos_url, base_path, source_path, source_rev, prefix changed_paths_temp.append({'path': path, 'data': d}) if not changed_paths_temp: # If no matches, then we've hit the end of the chain and this path has no ancestry back to base_path. - if debug: - print prefix+"\x1b[33m" + ">> find_svn_ancestors: Done: No matching changed_paths" + "\x1b[0m" + ui.status(prefix + ">> find_svn_ancestors: Done: No matching changed_paths", level=ui.DEBUG, color='YELLOW') done = True continue # Reverse-sort any matches, so that we start with the most-granular (deepest in the tree) path. @@ -138,31 +148,29 @@ def find_svn_ancestors(svn_repos_url, base_path, source_path, source_rev, prefix if action not in 'MARD': raise UnsupportedSVNAction("In SVN rev. %d: action '%s' not supported. Please report a bug!" % (log_entry['revision'], action)) - if debug: - debug_desc = "> " + action + " " + path - if d['copyfrom_path']: - debug_desc += " (from " + d['copyfrom_path']+"@"+str(d['copyfrom_revision']) + ")" - print prefix+"\x1b[33m" + debug_desc + "\x1b[0m" + ui.status(prefix + "> %s %s%s", action, path, + (" (from %s)" % (d['copyfrom_path']+"@"+str(d['copyfrom_revision']))) if d['copyfrom_path'] else "", + level=ui.DEBUG, color='YELLOW') if action == 'D': # If file/folder was deleted, it has no ancestor ancestors_temp = [] - if debug: - print prefix+"\x1b[33m" + ">> find_svn_ancestors: Done: deleted" + "\x1b[0m" + ui.status(prefix + ">> find_svn_ancestors: Done: deleted", level=ui.DEBUG, color='YELLOW') done = True break if action in 'RA': # If file/folder was added/replaced but not a copy, it has no ancestor if not d['copyfrom_path']: ancestors_temp = [] - if debug: - print prefix+"\x1b[33m" + ">> find_svn_ancestors: Done: "+("Added" if action == "A" else "Replaced")+" with no copyfrom_path" + "\x1b[0m" + ui.status(prefix + ">> find_svn_ancestors: Done: %s with no copyfrom_path", + "Added" if action == "A" else "Replaced", + level=ui.DEBUG, color='YELLOW') done = True break # Else, file/folder was added/replaced and is a copy, so add an entry to our ancestors list # and keep checking for ancestors - if debug: - print prefix+"\x1b[33m" + ">> find_svn_ancestors: Found copy-from ("+action+"): " + \ - path + " --> " + d['copyfrom_path']+"@"+str(d['copyfrom_revision']) + "\x1b[0m" + ui.status(prefix + ">> find_svn_ancestors: Found copy-from (action=%s): %s --> %s", + action, path, d['copyfrom_path']+"@"+str(d['copyfrom_revision']), + level=ui.DEBUG, color='YELLOW') ancestors_temp.append({'path': path, 'revision': log_entry['revision'], 'copyfrom_path': d['copyfrom_path'], 'copyfrom_rev': d['copyfrom_revision']}) working_path = working_path.replace(d['path'], d['copyfrom_path']) @@ -178,32 +186,31 @@ def find_svn_ancestors(svn_repos_url, base_path, source_path, source_rev, prefix working_path = working_path.replace(d['path'], d['copyfrom_path']) working_rev = d['copyfrom_rev'] ancestors.append({'path': working_path, 'revision': working_rev}) - if debug: - max_len = 0 - for idx in range(len(ancestors)): - d = ancestors[idx] - max_len = max(max_len, len(d['path']+"@"+str(d['revision']))) - print prefix+"\x1b[93m" + ">> find_svn_ancestors: Found parent ancestors: " + "\x1b[0m" - for idx in range(len(ancestors)-1): - d = ancestors[idx] - d_next = ancestors[idx+1] - print prefix+"\x1b[33m" + " ["+str(idx)+"] " + str(d['path']+"@"+str(d['revision'])).ljust(max_len) + \ - " <-- " + str(d_next['path']+"@"+str(d_next['revision'])).ljust(max_len) + "\x1b[0m" + max_len = 0 + for idx in range(len(ancestors)): + d = ancestors[idx] + max_len = max(max_len, len(d['path']+"@"+str(d['revision']))) + ui.status(prefix + ">> find_svn_ancestors: Found parent ancestors:", level=ui.DEBUG, color='YELLOW_B') + for idx in range(len(ancestors)-1): + d = ancestors[idx] + d_next = ancestors[idx+1] + ui.status(prefix + " [%s] %s <-- %s", idx, + str(d['path']+"@"+str(d['revision'])).ljust(max_len), + str(d_next['path']+"@"+str(d_next['revision'])).ljust(max_len), + level=ui.DEBUG, color='YELLOW') else: - if debug: - print prefix+"\x1b[33m" + ">> find_svn_ancestors: No ancestor-chain found: " + svn_repos_url+base_path+"/"+source_path+"@"+(str(source_rev)) + "\x1b[0m" + ui.status(prefix + ">> find_svn_ancestors: No ancestor-chain found: %s", + svn_repos_url+base_path+"/"+source_path+"@"+str(source_rev), level=ui.DEBUG, color='YELLOW') return ancestors def get_rev_map(rev_map, src_rev, prefix): """ Find the equivalent rev # in the target repo for the given rev # from the source repo. """ - if debug: - print prefix + "\x1b[32m" + ">> get_rev_map("+str(src_rev)+")" + "\x1b[0m" + ui.status(prefix + ">> get_rev_map(%s)", src_rev, level=ui.DEBUG, color='GREEN') # Find the highest entry less-than-or-equal-to src_rev for rev in range(src_rev, 0, -1): - if debug: - print prefix + "\x1b[32m" + ">> get_rev_map: rev="+str(rev)+" in_rev_map="+str(rev in rev_map) + "\x1b[0m" + ui.status(prefix + ">> get_rev_map: rev=%s in_rev_map=%s", rev, str(rev in rev_map), level=ui.DEBUG, color='BLACK_B') if rev in rev_map: return rev_map[rev] # Else, we fell off the bottom of the rev_map. Ruh-roh... @@ -216,10 +223,10 @@ def get_svn_dirlist(svn_path, svn_rev = ""): args = ["list"] path = svn_path if svn_rev: - args += ["-r", str(svn_rev)] + args += ["-r", svn_rev] path += "@"+str(svn_rev) args += [path] - paths = run_svn(args, False, True) + paths = run_svn(args, no_fail=True) paths = paths.strip("\n").split("\n") if len(paths)>1 else [] return paths @@ -266,9 +273,9 @@ def do_svn_add(source_repos_url, source_url, path_offset, target_url, source_rev 'is_dir' is whether path_offset is a directory (rather than a file). """ source_base = source_url[len(source_repos_url):] - if debug: - print prefix + "\x1b[32m" + ">> do_svn_add: " + source_base+"/"+path_offset+"@"+str(source_rev) + \ - (" (parent-copyfrom: "+parent_copyfrom_path+"@"+str(parent_copyfrom_rev)+")" if parent_copyfrom_path else "") + "\x1b[0m" + ui.status(prefix + ">> do_svn_add: %s %s", source_base+"/"+path_offset+"@"+str(source_rev), + " (parent-copyfrom: "+parent_copyfrom_path+"@"+str(parent_copyfrom_rev)+")" if parent_copyfrom_path else "", + level=ui.DEBUG, color='GREEN') # Check if the given path has ancestors which chain back to the current source_base found_ancestor = False ancestors = find_svn_ancestors(source_repos_url, source_base, path_offset, source_rev, prefix+" ") @@ -278,57 +285,52 @@ def do_svn_add(source_repos_url, source_url, path_offset, target_url, source_rev copyfrom_rev = ancestors[len(ancestors)-1]['revision'] if ancestors else "" if ancestors: # The copy-from path has ancestory back to source_url. - if debug: - print prefix + "\x1b[32;1m" + ">> do_svn_add: Check copy-from: Found parent: " + copyfrom_path+"@"+str(copyfrom_rev) + "\x1b[0m" + ui.status(prefix + ">> do_svn_add: Check copy-from: Found parent: %s", copyfrom_path+"@"+str(copyfrom_rev), + level=ui.DEBUG, color='GREEN', bold=True) found_ancestor = True # Map the copyfrom_rev (source repo) to the equivalent target repo rev #. This can # return None in the case where copyfrom_rev is *before* our source_start_rev. tgt_rev = get_rev_map(rev_map, copyfrom_rev, prefix+" ") - if debug: - print prefix + "\x1b[32m" + ">> do_svn_add: get_rev_map: " + str(copyfrom_rev) + " (source) -> " + str(tgt_rev) + " (target)" + "\x1b[0m" + ui.status(prefix + ">> do_svn_add: get_rev_map: %s (source) -> %s (target)", copyfrom_rev, tgt_rev, level=ui.DEBUG, color='GREEN') else: - if debug: - print prefix + "\x1b[32;1m" + ">> do_svn_add: Check copy-from: No ancestor chain found." + "\x1b[0m" + ui.status(prefix + ">> do_svn_add: Check copy-from: No ancestor chain found.", level=ui.DEBUG, color='GREEN') found_ancestor = False if found_ancestor and tgt_rev: # Check if this path_offset in the target WC already has this ancestry, in which # case there's no need to run the "svn copy" (again). - path_in_svn = in_svn(path_offset) - log_entry = svnclient.get_last_svn_log_entry(path_offset, 1, 'HEAD', get_changed_paths=False) if in_svn(path_offset, True) else [] + path_in_svn = in_svn(path_offset, prefix=prefix+" ") + log_entry = svnclient.get_last_svn_log_entry(path_offset, 1, 'HEAD', get_changed_paths=False) if in_svn(path_offset, require_in_repo=True, prefix=prefix+" ") else [] if (not log_entry or (log_entry['revision'] != tgt_rev)): copyfrom_offset = copyfrom_path[len(source_base):].strip('/') - if debug: - print prefix + "\x1b[32m" + ">> do_svn_add: svn_copy: Copy-from: " + copyfrom_path+"@"+str(copyfrom_rev) + "\x1b[0m" - print prefix + "in_svn("+path_offset+") = " + str(path_in_svn) - print prefix + "copyfrom_path: "+copyfrom_path+" parent_copyfrom_path: "+parent_copyfrom_path - print prefix + "copyfrom_rev: "+str(copyfrom_rev)+" parent_copyfrom_rev: "+str(parent_copyfrom_rev) + ui.status(prefix + ">> do_svn_add: svn_copy: Copy-from: %s", copyfrom_path+"@"+str(copyfrom_rev), level=ui.DEBUG, color='GREEN') + ui.status(prefix + " copyfrom: %s", copyfrom_path+"@"+str(copyfrom_rev), level=ui.DEBUG, color='GREEN') + ui.status(prefix + " p_copyfrom: %s", parent_copyfrom_path+"@"+str(parent_copyfrom_rev) if parent_copyfrom_path else "", level=ui.DEBUG, color='GREEN') if path_in_svn and \ ((parent_copyfrom_path and copyfrom_path.startswith(parent_copyfrom_path)) and \ (parent_copyfrom_rev and copyfrom_rev == parent_copyfrom_rev)): # When being called recursively, if this child entry has the same ancestor as the # the parent, then no need to try to run another "svn copy". - if debug: - print prefix + "\x1b[32m" + ">> do_svn_add: svn_copy: Same ancestry as parent: " + parent_copyfrom_path+"@"+str(parent_copyfrom_rev) + "\x1b[0m" + ui.status(prefix + ">> do_svn_add: svn_copy: Same ancestry as parent: %s", + parent_copyfrom_path+"@"+str(parent_copyfrom_rev),level=ui.DEBUG, color='GREEN') pass else: # Copy this path from the equivalent path+rev in the target repo, to create the # equivalent history. - if parent_copyfrom_path and svnlog_verbose: + if parent_copyfrom_path: # If we have a parent copy-from path, we mis-match that so display a status # message describing the action we're mimic'ing. If path_in_svn, then this # is logically a "replace" rather than an "add". - print " "+('R' if path_in_svn else 'A')+" "+source_base+"/"+path_offset+" (from "+ancestors[1]['path']+"@"+str(copyfrom_rev)+")" + ui.status(" %s %s (from %s)", ('R' if path_in_svn else 'A'), source_base+"/"+path_offset, ancestors[1]['path']+"@"+str(copyfrom_rev), level=ui.VERBOSE) if path_in_svn: # If local file is already under version-control, then this is a replace. - if debug: - print prefix + "\x1b[32m" + ">> do_svn_add: pre-copy: local path already exists: " + path_offset + "\x1b[0m" + ui.status(prefix + ">> do_svn_add: pre-copy: local path already exists: %s", path_offset, level=ui.DEBUG, color='GREEN') run_svn(["remove", "--force", path_offset]) run_svn(["copy", "-r", tgt_rev, target_url+"/"+copyfrom_offset+"@"+str(tgt_rev), path_offset]) # Export the final version of this file/folder from the source repo, to make # sure we're up-to-date. export_paths = _add_export_path(export_paths, path_offset) else: - print prefix + "\x1b[32m" + ">> do_svn_add: Skipped 'svn copy': " + path_offset + "\x1b[0m" + ui.status(prefix + ">> do_svn_add: Skipped 'svn copy': %s", path_offset, level=ui.DEBUG, color='GREEN') else: # Else, either this copy-from path has no ancestry back to source_url OR copyfrom_rev comes # before our initial source_start_rev (i.e. tgt_rev == None), so can't do a "svn copy". @@ -338,14 +340,14 @@ def do_svn_add(source_repos_url, source_url, path_offset, target_url, source_rev p_path = path_offset if is_dir else os.path.dirname(path_offset).strip() or '.' if not os.path.exists(p_path): run_svn(["mkdir", p_path]) - if not in_svn(path_offset): + if not in_svn(path_offset, prefix=prefix+" "): if is_dir: # Export the final verison of all files in this folder. export_paths = _add_export_path(export_paths, path_offset) else: # Export the final verison of this file. We *need* to do this before running # the "svn add", even if we end-up re-exporting this file again via export_paths. - run_svn(["export", "--force", "-r", str(source_rev), + run_svn(["export", "--force", "-r", source_rev, source_repos_url+source_base+"/"+path_offset+"@"+str(source_rev), path_offset]) # If not already under version-control, then "svn add" this file/folder. run_svn(["add", "--parents", path_offset]) @@ -364,9 +366,8 @@ def do_svn_add_dir(source_repos_url, source_url, path_offset, source_rev, target # associated remote repo folder. (Is this a problem?) paths_local = get_svn_dirlist(path_offset) paths_remote = get_svn_dirlist(source_url+"/"+path_offset, source_rev) - if debug: - print prefix + "\x1b[32m" + ">> do_svn_add_dir: paths_local: " + str(paths_local) + "\x1b[0m" - print prefix + "\x1b[32m" + ">> do_svn_add_dir: paths_remote: " + str(paths_remote) + "\x1b[0m" + ui.status(prefix + ">> do_svn_add_dir: paths_local: %s", str(paths_local), level=ui.DEBUG, color='GREEN') + ui.status(prefix + ">> do_svn_add_dir: paths_remote: %s", str(paths_remote), level=ui.DEBUG, color='GREEN') # Update files/folders which exist in remote but not local for path in paths_remote: path_is_dir = True if path[-1] == "/" else False @@ -377,8 +378,7 @@ def do_svn_add_dir(source_repos_url, source_url, path_offset, source_rev, target # Remove files/folders which exist in local but not remote for path in paths_local: if not path in paths_remote: - if svnlog_verbose: - print " D " + source_base+"/"+path_offset+"/"+path + ui.status(" %s %s", 'D', source_base+"/"+path_offset+"/"+path, level=ui.VERBOSE) run_svn(["remove", "--force", path_offset+"/"+path]) # TODO: Does this handle deleted folders too? Wouldn't want to have a case # where we only delete all files from folder but leave orphaned folder around. @@ -405,8 +405,7 @@ def process_svn_log_entry(log_entry, source_repos_url, source_url, target_url, \ # e.g. '/branches/bug123' source_base = source_url[len(source_repos_url):] source_rev = log_entry['revision'] - if debug: - print prefix + "\x1b[32m" + ">> process_svn_log_entry: " + source_url+"@"+str(source_rev) + "\x1b[0m" + ui.status(prefix + ">> process_svn_log_entry: %s", source_url+"@"+str(source_rev), level=ui.DEBUG, color='GREEN') for d in log_entry['changed_paths']: # Get the full path for this changed_path # e.g. '/branches/bug123/projectA/file1.txt' @@ -414,8 +413,7 @@ def process_svn_log_entry(log_entry, source_repos_url, source_url, target_url, \ if not path.startswith(source_base + "/"): # Ignore changed files that are not part of this subdir if path != source_base: - if debug: - print prefix + "\x1b[90m" + ">> process_svn_log_entry: Unrelated path: " + path + " (" + source_base + ")" + "\x1b[0m" + ui.status(prefix + ">> process_svn_log_entry: Unrelated path: %s (base: %s)", path, source_base, level=ui.DEBUG, color='GREEN') continue # Calculate the offset (based on source_base) for this changed_path # e.g. 'projectA/file1.txt' @@ -426,13 +424,12 @@ def process_svn_log_entry(log_entry, source_repos_url, source_url, target_url, \ if action not in 'MARD': raise UnsupportedSVNAction("In SVN rev. %d: action '%s' not supported. Please report a bug!" % (source_rev, action)) - if svnlog_verbose and (action not in 'D'): + if action not in 'D': # (Note: Skip displaying action message for 'D' here since we'll display that # message when we process the deferred delete actions at the end.) - msg = " " + action + " " + d['path'] - if d['copyfrom_path']: - msg += " (from " + d['copyfrom_path']+"@"+str(d['copyfrom_revision']) + ")" - print prefix + msg + ui.status(" %s %s%s", action, d['path'], + (" (from %s)" % (d['copyfrom_path']+"@"+str(d['copyfrom_revision']))) if d['copyfrom_path'] else "", + level=ui.VERBOSE) # Try to be efficient and keep track of an explicit list of paths in the # working copy that changed. If we commit from the root of the working copy, @@ -478,11 +475,12 @@ def process_svn_log_entry(log_entry, source_repos_url, source_url, target_url, \ else: # Export the final verison of this file. We *need* to do this before running # the "svn add", even if we end-up re-exporting this file again via export_paths. - run_svn(["export", "--force", "-r", str(source_rev), + run_svn(["export", "--force", "-r", source_rev, source_repos_url+source_base+"/"+path_offset+"@"+str(source_rev), path_offset]) - # TODO: Do we need the in_svn check here? - #if not in_svn(path_offset): - run_svn(["add", "--parents", path_offset]) + if not in_svn(path_offset, prefix=prefix+" "): + # Need to use in_svn here to handle cases where client committed the parent + # folder and each indiv sub-folder. + run_svn(["add", "--parents", path_offset]) # TODO: Need to copy SVN properties from source repos elif action == 'D': @@ -494,7 +492,7 @@ def process_svn_log_entry(log_entry, source_repos_url, source_url, target_url, \ elif action == 'M': # TODO: Is "svn merge -c" correct here? Should this just be an "svn export" plus # proplist updating? - out = run_svn(["merge", "-c", str(source_rev), "--non-recursive", + out = run_svn(["merge", "-c", source_rev, "--non-recursive", "--non-interactive", "--accept=theirs-full", source_url+"/"+path_offset+"@"+str(source_rev), path_offset]) @@ -506,24 +504,24 @@ def process_svn_log_entry(log_entry, source_repos_url, source_url, target_url, \ if removed_paths: path_base = source_url[len(source_repos_url):] for path_offset in removed_paths: - if svnlog_verbose: - print " D " + path_base+"/"+path_offset + ui.status(" %s %s", 'D', path_base+"/"+path_offset, level=ui.VERBOSE) run_svn(["remove", "--force", path_offset]) # Export the final version of all add'd paths from source_url if export_paths: for path_offset in export_paths: - run_svn(["export", "--force", "-r", str(source_rev), + run_svn(["export", "--force", "-r", source_rev, source_repos_url+source_base+"/"+path_offset+"@"+str(source_rev), path_offset]) return commit_paths def disp_svn_log_summary(log_entry): - print "\n(Starting source rev #"+str(log_entry['revision'])+":)" - print "r"+str(log_entry['revision']) + " | " + \ - log_entry['author'] + " | " + \ - str(datetime.fromtimestamp(int(log_entry['date'])).isoformat(' ')) - print log_entry['message'] - print "------------------------------------------------------------------------" + ui.status("") + ui.status("r%s | %s | %s", + log_entry['revision'], + log_entry['author'], + str(datetime.fromtimestamp(int(log_entry['date'])).isoformat(' '))) + ui.status(log_entry['message']) + ui.status("------------------------------------------------------------------------") def pull_svn_rev(log_entry, source_repos_url, source_repos_uuid, source_url, target_url, rev_map, keep_author=False): """ @@ -548,7 +546,6 @@ def pull_svn_rev(log_entry, source_repos_url, source_repos_uuid, source_url, tar {'name':'source_url', 'value':source_url}, {'name':'source_rev', 'value':source_rev}] commit_from_svn_log_entry(log_entry, commit_paths, keep_author=keep_author, revprops=revprops) - print "(Finished source rev #"+str(source_rev)+")" def run_parser(parser): """ @@ -559,10 +556,10 @@ def run_parser(parser): parser.remove_option("--help") parser.add_option("-h", "--help", dest="show_help", action="store_true", help="show this help message and exit") - parser.add_option("-v", "--verbose", dest="verbosity", const=20, + parser.add_option("-v", "--verbose", dest="verbosity", const=ui.VERBOSE, default=10, action="store_const", help="enable additional output") - parser.add_option("--debug", dest="verbosity", const=30, + parser.add_option("--debug", dest="verbosity", const=ui.DEBUG, action="store_const", help="enable debugging output") options, args = parser.parse_args() @@ -633,10 +630,10 @@ def real_main(options, args): # For the initial commit to the target URL, export all the contents from # the source URL at the start-revision. - paths = run_svn(["list", "-r", str(source_start_rev), source_url+"@"+str(source_start_rev)]) + paths = run_svn(["list", "-r", source_start_rev, source_url+"@"+str(source_start_rev)]) if len(paths)>1: disp_svn_log_summary(svnclient.get_one_svn_log_entry(source_url, source_start_rev, source_start_rev)) - print "(Initial import)" + ui.status("(Initial import)", level=ui.VERBOSE) paths = paths.strip("\n").split("\n") for path in paths: # For each top-level file/folder... @@ -649,13 +646,12 @@ def real_main(options, args): path=path.rstrip('/') if not os.path.exists(path): os.makedirs(path) - run_svn(["export", "--force", "-r" , str(source_start_rev), source_url+"/"+path+"@"+str(source_start_rev), path]) + run_svn(["export", "--force", "-r" , source_start_rev, source_url+"/"+path+"@"+str(source_start_rev), path]) run_svn(["add", path]) revprops = [{'name':'source_uuid', 'value':source_repos_uuid}, {'name':'source_url', 'value':source_url}, {'name':'source_rev', 'value':source_start_rev}] commit_from_svn_log_entry(svn_start_log, [], keep_author=keep_author, revprops=revprops) - print "(Finished source rev #"+str(source_start_rev)+")" else: wc_target = os.path.abspath(wc_target) os.chdir(wc_target) @@ -673,13 +669,13 @@ def real_main(options, args): pull_svn_rev(log_entry, source_repos_url, source_repos_uuid, source_url, target_url, rev_map, keep_author) # Update our target working-copy, to ensure everything says it's at the new HEAD revision - run_svn(["up"]) + run_svn(["update"]) + # TODO: Run "svn cleanup" every 50 commits if SVN 1.7+, to clean-up orphaned ".svn/pristines/*" # Update rev_map, mapping table of source-repo rev # -> target-repo rev # - dup_info = get_svn_info(target_url) + dup_info = svnclient.get_svn_info(target_url) dup_rev = dup_info['revision'] source_rev = log_entry['revision'] - if debug: - print "\x1b[32m" + ">> main: rev_map.add: source_rev=%s target_rev=%s" % (source_rev, dup_rev) + "\x1b[0m" + ui.status(">> main: rev_map.add: source_rev=%s target_rev=%s", source_rev, dup_rev, level=ui.DEBUG, color='GREEN') rev_map[source_rev] = dup_rev except KeyboardInterrupt: @@ -691,10 +687,11 @@ def real_main(options, args): print "\nCommand failed with following error:\n" traceback.print_exc() run_svn(["cleanup"]) + print run_svn(["status"]) run_svn(["revert", "--recursive", "."]) # TODO: Run "svn status" and pro-actively delete any "?" orphaned entries, to clean-up the WC? finally: - run_svn(["up"]) + run_svn(["update"]) print "\nFinished!" def main(): -- 2.43.0