import md5
def md5_to_hex(md5sum):
hexform = ""
- for ix in xrange(len(md5sum)):
+ for ix in range(len(md5sum)):
hexform = hexform + "%02x" % ord(md5sum[ix])
return hexform.lower()
args = sys.argv[2:]
if args and not isinstance(args[0], unicode):
- for i in xrange(len(args)):
+ for i in range(len(args)):
args[i] = portage._unicode_decode(args[i])
try:
# Sort ebuilds in ascending order for the KEYWORDS.dropped check.
pkgsplits = {}
- for i in xrange(len(ebuildlist)):
+ for i in range(len(ebuildlist)):
ebuild_split = portage.pkgsplit(ebuildlist[i])
pkgsplits[ebuild_split] = ebuildlist[i]
ebuildlist[i] = ebuild_split
ebuildlist.sort(key=cmp_sort_key(portage.pkgcmp))
- for i in xrange(len(ebuildlist)):
+ for i in range(len(ebuildlist)):
ebuildlist[i] = pkgsplits[ebuildlist[i]]
del pkgsplits
ignore_priority_range = [None]
ignore_priority_range.extend(
- xrange(UnmergeDepPriority.MIN, UnmergeDepPriority.MAX + 1))
+ range(UnmergeDepPriority.MIN, UnmergeDepPriority.MAX + 1))
while not graph.empty():
for ignore_priority in ignore_priority_range:
nodes = graph.root_nodes(ignore_priority=ignore_priority)
if secs:
print(">>> Waiting",secs,"seconds before starting...")
print(">>> (Control-C to abort)...\n"+doing+" in: ", end=' ')
- ticks=range(secs)
+ ticks=list(range(secs))
ticks.reverse()
for sec in ticks:
sys.stdout.write(colorize("UNMERGE_WARN", str(sec+1)+" "))
# If two packages conflict, discard the lower version.
discard_pkgs = set()
greedy_pkgs.sort(reverse=True)
- for i in xrange(len(greedy_pkgs) - 1):
+ for i in range(len(greedy_pkgs) - 1):
pkg1 = greedy_pkgs[i]
if pkg1 in discard_pkgs:
continue
- for j in xrange(i + 1, len(greedy_pkgs)):
+ for j in range(i + 1, len(greedy_pkgs)):
pkg2 = greedy_pkgs[j]
if pkg2 in discard_pkgs:
continue
break
if not selected_nodes and \
not (prefer_asap and asap_nodes):
- for i in xrange(priority_range.NONE,
+ for i in range(priority_range.NONE,
priority_range.MEDIUM_SOFT + 1):
ignore_priority = priority_range.ignore_priority[i]
nodes = get_nodes(ignore_priority=ignore_priority)
mergeable_nodes = set(nodes)
if prefer_asap and asap_nodes:
nodes = asap_nodes
- for i in xrange(priority_range.SOFT,
+ for i in range(priority_range.SOFT,
priority_range.MEDIUM_SOFT + 1):
ignore_priority = priority_range.ignore_priority[i]
for node in nodes:
mylist.append((x, 0, True))
last_merge_depth = 0
- for i in xrange(len(mylist)-1,-1,-1):
+ for i in range(len(mylist)-1,-1,-1):
graph_key, depth, ordered = mylist[i]
if not ordered and depth == 0 and i > 0 \
and graph_key == mylist[i-1][0] and \
# and disable the entire repo display in this case.
repoadd_set = set()
- for mylist_index in xrange(len(mylist)):
+ for mylist_index in range(len(mylist)):
x, depth, ordered = mylist[mylist_index]
pkg_type = x[0]
myroot = x[1]
if len(loadavg_split) < 3:
raise OSError('unknown')
loadavg_floats = []
- for i in xrange(3):
+ for i in range(3):
try:
loadavg_floats.append(float(loadavg_split[i]))
except ValueError:
myaction = 'deselect'
if myargs and not isinstance(myargs[0], unicode):
- for i in xrange(len(myargs)):
+ for i in range(len(myargs)):
myargs[i] = portage._unicode_decode(myargs[i])
myfiles += myargs
# listed in "world" as they would be remerged on the next update of "world" or the
# relevant package sets.
unknown_sets = set()
- for cp in xrange(len(pkgmap)):
+ for cp in range(len(pkgmap)):
for cpv in pkgmap[cp]["selected"].copy():
try:
pkg = _pkg(cpv)
cp_dict[k].update(v)
pkgmap = [unordered[cp] for cp in sorted(unordered)]
- for x in xrange(len(pkgmap)):
+ for x in range(len(pkgmap)):
selected = pkgmap[x]["selected"]
if not selected:
continue
if clean_delay and not autoclean:
countdown(int(settings["CLEAN_DELAY"]), ">>> Unmerging")
- for x in xrange(len(pkgmap)):
+ for x in range(len(pkgmap)):
for y in pkgmap[x]["selected"]:
writemsg_level(">>> Unmerging "+y+"...\n", noiselevel=-1)
emergelog(xterm_titles, "=== Unmerging... ("+y+")")
x_split = x.split('_')
if len(x_split) == 1:
continue
- for i in xrange(len(x_split) - 1):
+ for i in range(len(x_split) - 1):
k = '_'.join(x_split[:i+1])
if k in use_expand_split:
v = use_expand_iuses.get(k)
comment_valid = -1
for pmask in pmasklists:
pmask_filename = os.path.join(pmask[0], "package.mask")
- for i in xrange(len(pmask[1])):
+ for i in range(len(pmask[1])):
l = pmask[1][i].strip()
if l == "":
comment = ""
for k in self.auxdbkey_order:
new_content.append(values.get(k, ''))
new_content.append('\n')
- for i in xrange(magic_line_count - len(self.auxdbkey_order)):
+ for i in range(magic_line_count - len(self.auxdbkey_order)):
new_content.append('\n')
new_content = ''.join(new_content)
new_content = _unicode_encode(new_content,
d={}
try:
if eclasses[1].isdigit():
- for x in xrange(0, len(eclasses), 2):
+ for x in range(0, len(eclasses), 2):
d[eclasses[x]] = ("", long(eclasses[x + 1]))
else:
# The old format contains paths that will be discarded.
- for x in xrange(0, len(eclasses), 3):
+ for x in range(0, len(eclasses), 3):
d[eclasses[x]] = (eclasses[x + 1], long(eclasses[x + 2]))
except IndexError:
raise cache_errors.CacheCorruption(cpv,
# If found, remove package(s) with duplicate path.
path = d.get("PATH", "")
- for i in xrange(len(pkgindex.packages) - 1, -1, -1):
+ for i in range(len(pkgindex.packages) - 1, -1, -1):
d2 = pkgindex.packages[i]
if path and path == d2.get("PATH"):
# Handle path collisions in $PKGDIR/All
cache_stats.update()
metadata[baseurl]["modified"] = 1
myid = None
- for retry in xrange(3):
+ for retry in range(3):
try:
myid = file_get_metadata(
"/".join((baseurl.rstrip("/"), x.lstrip("/"))),
f.close()
if len(oldentries) == len(myentries):
update_manifest = False
- for i in xrange(len(oldentries)):
+ for i in range(len(oldentries)):
if oldentries[i] != myentries[i]:
update_manifest = True
break
ansi_codes = []
-for x in xrange(30, 38):
+for x in range(30, 38):
ansi_codes.append("%im" % x)
ansi_codes.append("%i;01m" % x)
'0x55FF55', '0xAA5500', '0xFFFF55', '0x0000AA', '0x5555FF', '0xAA00AA',
'0xFF55FF', '0x00AAAA', '0x55FFFF', '0xAAAAAA', '0xFFFFFF']
-for x in xrange(len(rgb_ansi_colors)):
+for x in range(len(rgb_ansi_colors)):
codes[rgb_ansi_colors[x]] = esc_seq + ansi_codes[x]
del x
if fd.isdigit())
else:
def get_open_fds():
- return xrange(max_fd_limit)
+ return range(max_fd_limit)
sandbox_capable = (os.path.isfile(SANDBOX_BINARY) and
os.access(SANDBOX_BINARY, os.X_OK))
components = name.split('.')
parent_scope = scope
- for i in xrange(len(components)):
+ for i in range(len(components)):
alias = components[i]
if i < len(components) - 1:
parent_name = ".".join(components[:i+1])
def testDecodeInt(self):
- for n in xrange(1000):
+ for n in range(1000):
self.assertEqual(decodeint(encodeint(n)), n)
for n in (2 ** 32 - 1,):