failed_pkg = self._failed_pkgs_all[-1]
build_dir = failed_pkg.build_dir
log_file = None
+ log_file_real = None
log_paths = [failed_pkg.build_log]
pass
else:
if log_path.endswith('.gz'):
+ log_file_real = log_file
log_file = gzip.GzipFile(filename='',
mode='rb', fileobj=log_file)
noiselevel=-1)
finally:
log_file.close()
+ if log_file_real is not None:
+ log_file_real.close()
failure_log_shown = True
# Dump mod_echo output now since it tends to flood the terminal.
except EnvironmentError:
return
+ f_real = None
if logfile.endswith('.gz'):
+ f_real = f
f = gzip.GzipFile(filename='', mode='rb', fileobj=f)
am_maintainer_mode = []
msg.extend("\t" + line for line in make_jobserver)
_eqawarn(msg)
+ f.close()
+ if f_real is not None:
+ f_real.close()
+
def _post_src_install_chost_fix(settings):
"""
It's possible that the ebuild has changed the
build_info_dir = os.path.join(mysettings['PORTAGE_BUILDDIR'],
'build-info')
- io.open(_unicode_encode(os.path.join(build_info_dir,
+ f = io.open(_unicode_encode(os.path.join(build_info_dir,
'SIZE'), encoding=_encodings['fs'], errors='strict'),
mode='w', encoding=_encodings['repo.content'],
- errors='strict').write(_unicode_decode(str(size) + '\n'))
+ errors='strict')
+ f.write(_unicode_decode(str(size) + '\n'))
+ f.close()
- io.open(_unicode_encode(os.path.join(build_info_dir,
+ f = io.open(_unicode_encode(os.path.join(build_info_dir,
'BUILD_TIME'), encoding=_encodings['fs'], errors='strict'),
mode='w', encoding=_encodings['repo.content'],
- errors='strict').write(_unicode_decode("%.0f\n" % (time.time(),)))
+ errors='strict')
+ f.write(_unicode_decode("%.0f\n" % (time.time(),)))
+ f.close()
use = frozenset(mysettings['PORTAGE_USE'].split())
for k in _vdb_use_conditional_keys:
except OSError:
pass
continue
- io.open(_unicode_encode(os.path.join(build_info_dir,
+ f = io.open(_unicode_encode(os.path.join(build_info_dir,
k), encoding=_encodings['fs'], errors='strict'),
mode='w', encoding=_encodings['repo.content'],
- errors='strict').write(_unicode_decode(v + '\n'))
+ errors='strict')
+ f.write(_unicode_decode(v + '\n'))
+ f.close()
_reapply_bsdflags_to_image(mysettings)
logdir = normalize_path(mysettings["PORT_LOGDIR"])
logid_path = os.path.join(mysettings["PORTAGE_BUILDDIR"], ".logid")
if not os.path.exists(logid_path):
- open(_unicode_encode(logid_path), 'w')
+ open(_unicode_encode(logid_path), 'w').close()
logid_time = _unicode_decode(time.strftime("%Y%m%d-%H%M%S",
time.gmtime(os.stat(logid_path).st_mtime)),
encoding=_encodings['content'], errors='replace')
if update_data or \
file_path not in prev_mtimes or \
long(prev_mtimes[file_path]) != mystat[stat.ST_MTIME]:
- content = io.open(_unicode_encode(file_path,
+ f = io.open(_unicode_encode(file_path,
encoding=_encodings['fs'], errors='strict'),
- mode='r', encoding=_encodings['repo.content'], errors='replace'
- ).read()
+ mode='r', encoding=_encodings['repo.content'], errors='replace')
+ content = f.read()
+ f.close()
update_data.append((file_path, mystat, content))
return update_data
def load(self):
""" Reload the registry data from file """
self._data = None
+ f = None
try:
- self._data = pickle.load(
- open(_unicode_encode(self._filename,
- encoding=_encodings['fs'], errors='strict'), 'rb'))
+ f = open(_unicode_encode(self._filename,
+ encoding=_encodings['fs'], errors='strict'), 'rb')
+ self._data = pickle.load(f)
except (ValueError, pickle.UnpicklingError) as e:
writemsg_level(_("!!! Error loading '%s': %s\n") % \
(self._filename, e), level=logging.ERROR, noiselevel=-1)
raise PermissionDenied(self._filename)
else:
raise
+ finally:
+ if f is not None:
+ f.close()
if self._data is None:
self._data = {}
self._data_orig = self._data.copy()