You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
calculate-utils-3-builder/pym/builder/builder.py

2273 lines
94 KiB

9 years ago
# -*- coding: utf-8 -*-
# Copyright 2015-2016 Mir Calculate. http://www.calculate-linux.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import OrderedDict
9 years ago
from itertools import chain
import re
import sys
import time
import stat
import json
from calculate.core.server.gen_pid import search_worked_process2
from calculate.core.setup_package import ChainProgressTemplate
from calculate.lib.cl_template import templateFunction, SystemIni
from calculate.lib.datavars import DataVars, Variable, DataVarsError
from calculate.lib.utils.colortext import (TextState, get_color_print)
from calculate.lib.utils.common import CmdlineParams, getTupleVersion
from calculate.lib.utils.portage import (BinaryPackage, get_binary_file,
EixVersionParser,
clear_binhost_garbage, WorldFile)
from calculate.lib.utils.text import _u
from calculate.lib.utils.binhosts import Binhosts
from calculate.lib.utils.files import (
pathJoin, PercentProgress, getProgPath, process, STDOUT, removeDir,
makeDirectory, writeFile, readLinesFile, chmod, chown, FilePermission,
find, FindFileType, removeFileWithEmptyDirectory, check_rw,
tar_xz_directory, sha256sum, quite_unlink,
copyWithPath, countFiles, listDirectory, getRunCommands, readFile,
readFileEx, DirectoryRCS, RCSError, readLinesFile)
from calculate.lib.utils.mount import isMount
from calculate.lib.utils.git import Git
from .variables.action import Actions
from calculate.lib.utils.portage import (ReposConf, EmergeLog,
EmergeLogFiltered,
EmergeLogNamedTask,
InstalledPackageInfo, EbuildInfoError,
EbuildInfo, ChrootEix, getRequires,
get_packages_files_directory,
get_manifest_files_directory,
PackageList, VDB_PATH,
hide_packages, unhide_packages,
LibraryProviders,
PackageError, getInstalledAtom)
from calculate.lib.utils.tools import ReverseKey
from itertools import groupby
from calculate.update.emerge_parser import (EmergeParser,
EmergeError, EmergeCommand, Chroot,
Linux32, CommandExecutor,
RevdepPercentBlock, EmergeCache)
from calculate.lib.cl_log import log
from calculate.update.update_tasks import EmergeMark
from .build_storage import Build
from calculate.update.update import Update, variable_module
from calculate.install.distr import (Distributive, IsoDistributive,
DistributiveError, ContainerDistributive)
import shutil
from functools import partial
import os
from os import path
from .datavars import BuilderError
from .emerge_fetch import EmergeFetcher, EmergeFetcherError
from calculate.lib.utils.grub import GrubCommand
3 years ago
from functools import reduce
9 years ago
_ = lambda x: x
from calculate.lib.cl_lang import (setLocalTranslate, getLazyLocalTranslate)
setLocalTranslate('cl_builder3', sys.modules[__name__])
__ = getLazyLocalTranslate(_)
9 years ago
class Builder(Update):
"""Основной объект для выполнения действий связанных со сборкой системы
"""
9 years ago
class Method():
Prepare = "builder_prepare"
Break = "builder_break"
Update = "builder_update"
Restore = "builder_restore"
Image = "builder_image"
Profile = "builder_profile"
UpdateMenu = "update_livemenu"
Container = "builder_container"
All = (Prepare, Break, Update, Restore, Image, Profile)
def init(self):
self.pretend_package_list = {}
self.update_map = {}
self.color_print = get_color_print()
self.emerge_cache = EmergeCache()
self.pkgnum = None
self.pkgnummax = None
self.world_data = ""
self.binhosts_data = None
self.refresh_binhost = False
self.gpgdata_md5 = []
self.gpg_changed = False
self.base = False
def mount_target(self, target):
dir_distro = target.convertToDirectory()
dir_distro.mountSystemDirectories(skip=())
return True
def umount_system(self, target):
dir_distro = target.convertToDirectory()
dir_distro.umountSystemDirectories()
return True
def detach_target(self, target):
"""
@type target: Distributive
"""
if target:
target.reserve()
return True
def close_build(self, build, clear=False, clear_pkg=False):
"""
@type build:Build
"""
if build:
9 years ago
builder_path = self.clVars.Get('cl_builder_path')
if builder_path:
if isMount(builder_path):
build.close_distributive()
build.remove()
9 years ago
if builder_path:
if clear:
build.distributive.post_clear()
pkgdir = self.clVars.Get('cl_builder_pkgdir')
if clear_pkg:
if '/remote/' in pkgdir:
if path.exists(pkgdir):
removeDir(pkgdir)
Distributive.clear_empty_directories(pkgdir)
return True
def restore_build(self, build):
if build:
build.restore()
return True
def save_build(self, build, dv):
build.pkgdir = self.clVars.Get('cl_builder_pkgdir')
if dv:
overlays = dv.Get('cl_update_rep_name')
build.set_overlays(overlays)
build.save()
return True
def set_builder_action(self, action_name):
self.clVars.Set('cl_builder_action', action_name, force=True)
return True
def prepare_iso(self, dn):
self.endTask()
self.startTask(_("Prepare ISO data"))
root_path = path.relpath(dn, self.clVars.Get('cl_builder_path'))
self.applyTemplates(self.clVars.Get('cl_builder_target'),
False, False, root_path)
self.endTask()
self.startTask(_("Pack ISO image"))
self.addProgress()
def prepare_container_data(self, dn):
"""
Подготовить мета данные rootfs.tar.xz
:param dn:
:return:
"""
self.set_builder_action(Actions.ImageContainer)
root_path = path.relpath(dn, self.clVars.Get('cl_builder_path'))
self.applyTemplates(self.clVars.Get('cl_builder_target'),
False, False, root_path)
self.endTask()
return True
def pack_container_data(self, dn, image):
"""
Запаковать содержимое в dn к образу root.tar.xz
:param dn:
:param image:
:return:
"""
for fn in listDirectory(dn, fullPath=True):
if path.isdir(fn):
fn_basename = path.basename(fn)
tar_xz_directory(fn,
path.join(image.basedirectory,
"%s.tar.xz") % fn_basename)
else:
fn_basename = path.basename(fn)
shutil.copy(fn, path.join(image, fn_basename))
return True
def create_digest_container(self, image):
"""
Создать контрольную сумму для файлов контейнера
:param image:
:return:
"""
sumfn = "SHA256SUMS"
with writeFile(path.join(image.basedirectory, sumfn)) as f_sum:
for fn in listDirectory(image.basedirectory, fullPath=True):
if fn.endswith(sumfn):
continue
digest = sha256sum(fn)
f_sum.write("%s %s\n" % (digest, path.basename(fn)))
return True
def update_http_meta(self, dn):
"""
Обновить индексные файлы для lxc-download (meta/1.0/index-user,
index-system)
:param dn:
:return:
"""
data = []
re_subbuild = re.compile("\/[^-]+-[^-]+-(\d+)-[^-]+$", re.I)
for container_dn in listDirectory(dn, fullPath=True):
with ContainerDistributive(container_dn) as distro:
info = distro.get_information()
if not info['cl_profile_name']:
continue
match_subbuild = re_subbuild.search(container_dn)
if match_subbuild:
subbuild = int(match_subbuild.group(1))
else:
subbuild = 0
data.append({
'dist': info['os_linux_shortname'],
'release': "live",
'arch': info['os_arch_machine'],
'variant': "default",
'name': info['os_linux_name'],
'build': info['os_linux_build'],
'fn': path.join("container", path.basename(container_dn)),
'subbuild': subbuild
})
def sort_key(info):
return (ReverseKey(getTupleVersion(info['release'])),
info['arch'],
info['dist'], ReverseKey(info['build']),
ReverseKey(info['subbuild']),
ReverseKey(info['fn']))
data.sort(key=sort_key)
products_data = {}
filesize = lambda x: os.stat(x).st_size
with writeFile(path.join(dn, 'meta/1.0/index-system')) as indexsystem:
with writeFile(path.join(dn, 'meta/1.0/index-user')) as indexuser:
for k, grps in groupby(data, lambda x: (x['dist'],
x['release'],
x['arch'],
x['variant'])):
3 years ago
info = next(grps)
dist_key = "{dist}:{release}:{arch}:{variant}".format(**info)
try:
indexsystem.write("{dist};{release};{arch};"
"{variant};{build};{fn}\n".format(**info))
indexuser.write("{dist};{release};{arch};"
"{variant};{build};{fn}\n".format(**info))
aliases = ("{dist}/{release}/{variant},"
"{dist}/{release}".format(**info))
root_tar_xz_rel = path.join(info["fn"], "rootfs.tar.xz")
root_tar_xz = path.join(path.dirname(dn), root_tar_xz_rel)
lxd_tar_xz_rel = path.join(info["fn"], "lxd.tar.xz")
lxd_tar_xz = path.join(path.dirname(dn), lxd_tar_xz_rel)
combined_sha256 = sha256sum(root_tar_xz, lxd_tar_xz)
product = {
"aliases": aliases,
"versions": {
info["build"]: {
"items": {
"root.tar.xz": {
"ftype": "root.tar.xz",
"sha256": sha256sum(root_tar_xz),
"size": filesize(root_tar_xz),
"path": root_tar_xz_rel,
},
"lxd.tar.xz": {
"ftype": "lxd.tar.xz",
"size": filesize(lxd_tar_xz),
"sha256": sha256sum(lxd_tar_xz),
"combined_sha256": combined_sha256,
"combined_rootxz_sha256":
combined_sha256,
"path": lxd_tar_xz_rel,
}
}
}
},
"release_title": info["release"],
"release": info["release"],
"os": info["name"],
"arch": info["arch"]
}
products_data[dist_key] = product
except BaseException as e:
if isinstance(e, KeyboardInterrupt):
raise
self.printWARNING(_("Failed to index %s") % dist_key)
self.printWARNING(str(e))
index_data = {
"format": "index:1.0",
"index": {
"images": {
"format": "products:1.0",
"datatype": "image-downloads",
"products": list(products_data.keys()),
"path": "streams/v1/images.json"
}
}
}
with writeFile(path.join(dn, 'streams/v1/index.json')) as f:
json.dump(index_data, f)
images_data = {
"content_id": "images",
"format": "products:1.0",
"datatype": "image-downloads",
"products": products_data,
}
with writeFile(path.join(dn, 'streams/v1/images.json')) as f:
json.dump(images_data, f)
return True
def remove_container_data(self, dn):
removeDir(dn)
return True
def prepare_image(self, image):
image.eventPrepareIso.connect(self.prepare_iso)
return True
def remove_repositories(self, repname):
dv = self.clVars.Get('cl_builder_linux_datavars')
rpath = dv.Select("cl_update_rep_path",
where="cl_update_rep_name", eq=repname, limit=1)
chroot_path = path.normpath(self.clVars.Get('cl_chroot_path'))
rpath_orig = rpath[len(chroot_path):]
reposconf = ReposConf(dv.Get('cl_update_reposconf'),
dv.Get('cl_update_reposconf_dir'),
prefix=self.clVars.Get('cl_builder_path'))
if repname not in ("gentoo", "portage"):
reposconf.remove(repname)
removeDir(rpath)
return True
def set_profile(self, profile_shortname, chroot_path):
profile = self.clVars.Select('cl_builder_profile_path',
where='cl_builder_profile_shortname',
eq=profile_shortname, limit=1)
if not profile:
raise BuilderError(_("Failed to determine profile %s") %
9 years ago
profile_shortname)
chroot_join = lambda x: path.join(chroot_path, x)
profile_path = path.relpath(profile, chroot_join('etc/portage'))
try:
for rm_fn in filter(path.lexists,
(chroot_join('etc/make.profile'),
chroot_join('etc/portage/make.profile'))):
os.unlink(rm_fn)
os.symlink(profile_path, chroot_join('etc/portage/make.profile'))
9 years ago
except (OSError, IOError) as e:
raise BuilderError(_("Failed to set the profile: %s") % str(e))
return True
def get_world_snapshot_filename(self, builder_path):
return pathJoin(builder_path, "var/lib/calculate/world.snapshot")
def world_snapshot(self, builder_path):
worldfile = pathJoin(builder_path, "var/lib/portage/world")
worldfile_snapshot = self.get_world_snapshot_filename(builder_path)
if not path.exists(worldfile_snapshot):
with writeFile(worldfile_snapshot) as f:
f.write(readFileEx(worldfile, grab=True).decode("UTF-8"))
return True
def world_diff(self, builder_path):
worldfile_snapshot = self.get_world_snapshot_filename(builder_path)
worldfile = pathJoin(builder_path, "var/lib/portage/world")
self._world_diff(self._chroot_eix_versions(builder_path),
readFile(worldfile_snapshot),
readFile(worldfile),
premess=_("List of worlds differences"))
return True
def _chroot_eix_versions(self, builder_path):
chroot_eix = ChrootEix(builder_path, [], ChrootEix.Option.Exact)
chroot_eix.parser = EixVersionParser()
return chroot_eix
def _display_versions(self, versions):
"""
Вывести версии как в eix
:param versions:
:return:
"""
_print = self.color_print
Colors = TextState.Colors
def _print_version(ver):
__print = _print
if ver.stable:
__print = __print.foreground(Colors.GREEN)
out = ver.version
else:
if ver.hardmask or ver.missing_keyword:
color = Colors.RED
else:
color = Colors.BROWN
if ver.missing_keyword:
out = "**%s" % ver.version
elif ver.unstable_keyword:
out = "~%s" % ver.version
else:
out = ver.version
if ver.hardmask:
out = "[M]%s" % out
__print = __print.foreground(color)
if out and ver.installed:
return __print.invert(out)
return __print(out)
getslot = lambda x:x.slot
for slot, data in groupby(sorted(versions, key=getslot), getslot):
self.printPre(" {slot} {versions}".format(
slot=_print.foreground(Colors.LIGHT_RED).bold("(%s)"%slot),
versions=" ".join(_print_version(ver) for ver in data)
))
def _world_diff(self, eix, snapshot_data, current_data, premess=None):
"""
Вывести изменения между двума world файлами
:param eix:
:param snapshot_data:
:param current_data:
:return:
"""
_print = self.color_print
Colors = TextState.Colors
newsuffix = "[%s]" % _print.invert.foreground(Colors.GREEN)("N")
removesuffix = "[%s]" % _print.invert.foreground(Colors.RED)("D")
world_snapshot = WorldFile(snapshot_data)
world_current = WorldFile(current_data)
predisplay = False
for pkg, added, removed, ommited in world_snapshot.category_diff(
world_current):
if not predisplay and premess:
self.printSUCCESS(premess)
predisplay = True
if added:
self.printPre("%s %s" % (newsuffix,
" ".join(str(x) for x in added)))
if removed:
self.printPre("%s %s" % (removesuffix,
" ".join(str(x) for x in removed)))
eix.package = [pkg]
self._display_versions(eix.get_packages())
return predisplay
def world_snapshot_clean(self, builder_path):
worldfile_snapshot = self.get_world_snapshot_filename(builder_path)
quite_unlink(worldfile_snapshot)
return True
9 years ago
def apply_templates(self, target=None, useClt=None, cltFilter=False,
root=None,
useDispatch=True, action="merge", distro_dv=None,
themes=False):
"""
Применить шаблоны.
Args:
target: дистрибутив, куда необходимо выполнить шаблоны (/ по умолчанию)
useClt: использовать clt шаблоны
cltFilter: применять фильтр на clt шаблоны
root: каталог, куда будут наложны шаблоны (cl_root_path)
"""
from calculate.lib.cl_template import (TemplatesError,
ProgressTemplate,
templateFunction)
9 years ago
templateFunction.installProg = {}
templateFunction.installCategory = []
if target is None:
chroot = '/'
elif isinstance(target, Distributive):
chroot = target.getDirectory()
else:
chroot = target
if root is None:
root = '/'
elif isinstance(root, Distributive):
root = root.getDirectory()
clVars = DataVars()
clTempl = None
try:
clVars.importData()
clVars.Set('os_arch_machine',
self.clVars.Get('builder.os_builder_arch_machine'),
force=True)
if distro_dv:
clVars.Set('cl_template_path',
[pathJoin(chroot, x)
for x in distro_dv.Get('cl_template_path')],
force=True)
clVars.Set('cl_template_path_use',
clVars.Get('cl_template_path'), force=True)
clVars.Set('cl_env_path',
[pathJoin(chroot, x) for x in clVars.Get('cl_env_path')],
force=True)
clVars.Set('cl_make_profile', path.join(chroot,
9 years ago
'etc/portage/make.profile'),
force=True)
clVars.Set('cl_action', action, force=True)
clVars.Set('cl_chroot_status', 'on', force=True)
for copyvar in ("cl_dispatch_conf", "cl_verbose_set",
"update.cl_update_world"):
clVars.Set(copyvar, self.clVars.Get(copyvar), force=True)
clVars.iniCache = {}
clVars.flIniFile()
9 years ago
cltFilter = True if cltFilter in (True, "on") else False
clVars.Set("cl_chroot_path", chroot, True)
clVars.Set("cl_root_path", root, True)
# определение каталогов содержащих шаблоны
9 years ago
dirs_list, files_list = ([], [])
useClt = useClt in ("on", True)
self.addProgress()
9 years ago
nullProgress = lambda *args, **kw: None
dispatch = self.dispatchConf if useDispatch else None
if themes:
for k, v in {'cl_action': 'merge',
'cl_merge_pkg': [None],
'cl_merge_set': "on",
'install.cl_setup': 'themes'}.items():
clVars.Set(k, v, force=True)
9 years ago
clTempl = ProgressTemplate(nullProgress, clVars,
cltObj=useClt,
cltFilter=cltFilter,
printSUCCESS=self.printSUCCESS,
printWARNING=self.printWARNING,
askConfirm=self.askConfirm,
dispatchConf=dispatch,
printERROR=self.printERROR)
def execute_command(cmd, lang):
chroot_path = self.clVars.Get('cl_builder_path')
env = dict(os.environ)
env['TERM'] = "linux"
env['EINFO_QUIET'] = "yes"
return self.chroot_process(
chroot_path, cmd, lang=lang, envdict=dict(os.environ))
# замена выполения команд: вместо оычного запуска - запуск через
# /usr/bin/chroot
clTempl.execute_command = execute_command
clTempl.applyTemplates()
if clTempl.hasError():
if clTempl.getError():
raise TemplatesError(clTempl.getError())
finally:
clVars.close()
if clTempl:
if clTempl.cltObj:
clTempl.cltObj.closeFiles()
clTempl.closeFiles()
return True
def get_prog_path(self, progname):
chroot_path = self.clVars.Get('builder.cl_builder_path')
return getProgPath(progname, chroot_path)
def _eixUpdateCommand(self, eix_cmd, countRep):
chroot_path = self.clVars.Get('cl_builder_path')
return PercentProgress("/usr/bin/chroot", chroot_path, eix_cmd,
"-F", part=countRep or 1, atty=True)
def regenCache(self, repname):
with self.clVars.useDefaultModule("update"):
return super().regenCache(repname)
def prepare_gpg(self):
with self.clVars.useDefaultModule("update"):
return super().prepare_gpg()
def getGit(self):
chroot_path = self.clVars.Get('builder.cl_builder_path')
sshkey = pathJoin(chroot_path,
self.clVars.Get('update.cl_update_sshkey_path'))
if path.exists(sshkey):
return Git(sshkey)
else:
return Git()
def syncRepositories(self, repname, fallback_sync=False,
clean_on_error=True):
with self.clVars.useDefaultModule("update"):
return super().syncRepositories(
repname, fallback_sync=fallback_sync,
clean_on_error=clean_on_error)
def _regenCache_process(self, progname, repname, cpu_num):
chroot_path = self.clVars.Get('builder.cl_builder_path')
return self.chroot_process(chroot_path,
progname, "--repo=%s" % repname, "--update",
"--jobs=%s" % cpu_num, stderr=STDOUT)
def clear_log(self, builder_id_path):
logname = "build-%s" % builder_id_path
mainlog = self.clVars.Get('core.cl_log_path')
logpath = path.join(mainlog, logname)
if path.exists(logpath):
removeDir(logpath)
makeDirectory(logpath)
return True
@variable_module("builder")
def _get_log_file(self):
logname = "build-%s/%s" % (self.clVars.Get('cl_builder_id_path'),
9 years ago
self.clVars.Get('cl_task_name'))
mainlog = self.clVars.Get('core.cl_log_path')
return path.join(mainlog, logname)
def chrootize(self, chroot_path, cmd):
"""
:param chroot_path:
:param cmd:
:return:
"""
arch = self.clVars.Get('os_builder_arch_machine')
local_arch = self.clVars.Get('os_arch_machine')
# упрощенная проверка так как только 64 может собирать 32
if arch != local_arch:
return Linux32(Chroot(chroot_path, cmd))
else:
return Chroot(chroot_path, cmd)
def emerge_ask(self, pretend, *params):
"""
Вывести информацию об обновлении
"""
return self.emerge_list(
self.ListAction.Ask if not pretend else self.ListAction.OnlyShow,
*params)
class ListAction():
Ask = "ask"
OnlyShow = "only_show"
ShowAndRun = "show_and_run"
def emerge_list(self, action, *params):
"""
Выполнить сборку пакетов с предварительным отображением списка
:param action: действия ask - спросить перед продолжением
only_show - только отобразить
show_and_run - отобразить и запустить
:param params:
:return:
"""
deo = self.get_default_emerge_opts()
param = [x for x in params if x.startswith("-")]
packages = [x for x in params if not x.startswith("-")]
chroot_path = self.clVars.Get('cl_builder_path')
logfile = self._get_log_file()
with EmergeParser(self.chrootize(chroot_path, EmergeCommand(
list(packages),
emerge_default_opts=deo,
extra_params=param,
logfile=logfile))) as emerge:
try:
emerge.question.action = lambda x: False
emerge.run()
if emerge.install_packages.list:
emergelike = self.clVars.Get(
'update.cl_update_emergelist_set') == 'on'
self._display_install_package(emerge, emergelike)
if emerge.skipped_packages:
self._display_error(emerge.skipped_packages)
if action is self.ListAction.Ask:
answer = self.askConfirm(
_("Would you like to merge these packages?"), "yes")
if answer == "no":
emerge.command.send("no\n")
raise KeyboardInterrupt
elif action is self.ListAction.OnlyShow:
return True
else:
self.printSUCCESS(_("Nothing to merge"))
9 years ago
except EmergeError:
# self.set_need_update(False)
# self.emerge_cache.drop_cache("Emerge error")
self._display_install_package(emerge, emergelike=True)
self._display_error(emerge.prepare_error)
return False
try:
self._startEmerging(emerge, _("Fetching..."))
except Exception as e:
self.printERROR(_("Emerge error was occurred while fetching packages"))
return False
return True
def depclean(self, without_bdeps=False):
"""
Выполнить очистку системы от лишних пакетов
"""
deo = self.get_default_emerge_opts(depclean=True)
chroot_path = self.clVars.Get('cl_builder_path')
logfile = self._get_log_file()
with EmergeParser(self.chrootize(chroot_path, EmergeCommand(
(["--depclean", "--dynamic-deps=n",
"--with-bdeps=n", "--ask=y"]
if without_bdeps else ["--depclean", "--dynamic-deps=n",
"--ask=y"]),
logfile=logfile,
emerge_default_opts=deo))) as emerge:
try:
emerge.question.action = lambda x: False
emerge.run()
if emerge.uninstall_packages.list:
self._display_remove_list(emerge)
if (self.askConfirm(
_("Would you like to unmerge these unused packages "
"(recommended)?")) != 'yes'):
return True
self._startEmerging(emerge)
else:
self.printSUCCESS(_("Nothing to unmerge"))
except EmergeError:
self._display_error(emerge.prepare_error)
raise
return True
def chroot_command(self, builder_path, command, *params):
"""
Выполенине eix-update для репозиторием
eix-update выполнятется только для тех репозиториев, которые
обновлялись, если cl_update_eixsync_force==auto, либо
все, если cl_update_eixupdate_force==force
"""
cmdpath = self.get_prog_path(command)
if not cmdpath:
return "skip"
p = self.chroot_process(builder_path, cmdpath, *params, stderr=STDOUT)
with writeFile(self._get_log_file()) as f:
f.write(p.read())
if p.failed():
9 years ago
raise BuilderError(_("Failed to execute %s") % command)
return True
def update_task(self, task_name):
"""
Декоратор для добавления меток запуска и останова задачи
"""
def decor(f):
def wrapper(*args, **kwargs):
logger = EmergeLog(EmergeLogNamedTask(task_name),
prefix=self.clVars.Get('cl_builder_path'))
logger.mark_begin_task()
ret = f(*args, **kwargs)
if ret:
logger.mark_end_task()
return ret
return wrapper
return decor
def repair_gentoo_repo(self, dct, chroot):
from portage.package.ebuild.config import config
gentoo_paths = [f'{chroot}/var/db/repos/gentoo', f'{chroot}/usr/portage']
try:
c = config()
if 'gentoo' in c.repositories and 'gentoo' not in dct:
for dir_path in gentoo_paths:
if path.exists(dir_path):
dct['gentoo'] = dir_path
break
except:
pass
finally:
return dct
def get_rebuild_changed_packages(self, builder_path, repository_data):
"""
Получить пакеты ebuild которых изменились
Перебираем все пакеты из var/db/pkg (а именно получаем поля из
environment.bz2)
и сравниваем эти поля с полями из metadata/md5-cache для пакетов.
Таким образом в список не попадут те пакеты, у которых RDEPEND и DEPEND
были изменены динамически например при исправлении "автомагии"
"""
var_db_path = path.join(builder_path, 'var/db/pkg')
map_rep = {k: pathJoin(builder_path, v)
for k, v in repository_data}
map_rep = self.repair_gentoo_repo(map_rep, builder_path)
def rebuild_generator():
for pkg in InstalledPackageInfo.get_install_packages(var_db_path):
try:
if pkg['repository'] not in map_rep:
yield pkg.atom
elif pkg != EbuildInfo(pkg.atom,
map_rep[pkg['repository']]):
yield pkg.atom
except EbuildInfoError:
pass
return ["=%s" % x for x in rebuild_generator()]
def rebuild_changed_packages(self, builder_path, repository_data):
"""
Пересобрать изменённые пакеты
"""
rebuild_list = self.get_rebuild_changed_packages(builder_path,
repository_data)
if rebuild_list:
return self.emerge_list(self.ListAction.ShowAndRun,
"-1", *rebuild_list)
return True
class Driver():
Package = None
Id = None
def __init__(self, builder_path="/", system_ini=None):
self.builder_path = builder_path
self.system_ini = system_ini
def generate(self):
raise StopIteration
def __iter__(self):
yield (self.Id, '', self.Package)
for x in self.generate(self.system_ini):
yield x
class NvidiaDriver(Driver):
Id = 'nvidia-drivers'
Package = 'x11-drivers/nvidia-drivers'
SkipVers = ('72.0.0', '97.0.0', '177.0.0', '305.0.0', '341.0.0')
SkipVersNew = ('71', '96', '173', '304', '340', '367')
def generate_legacy(self):
if os.path.isdir(path.join(self.builder_path, 'var/db/repos/gentoo')):
Eclass = 'var/db/repos/gentoo/eclass/nvidia-driver.eclass'
else:
Eclass = 'usr/portage/eclass/nvidia-driver.eclass'
nvidia_eclass = path.join(self.builder_path, Eclass)
eclassdata = readFile(nvidia_eclass)
reBlock = re.compile(
r"if has \$\{nvidia_gpu\}\s+\\([^;]+);\s*then(.*?)fi", re.S)
reMask = re.compile('>=x11-drivers/nvidia-drivers[^"]+')
for block in reBlock.findall(eclassdata):
nvidia_ids, mask_data = block
m = reMask.search(mask_data)
if m:
mask_str = m.group()
maskver = mask_str.rpartition("-")[2].strip()
# пропустить сборку для 71, 96, 304,. 340
if maskver in self.SkipVers:
continue
yield (self.Id, mask_str, mask_str.replace('>=', '\\<'))
def generate_new(self, system_ini):
for nvidia_serie in system_ini.getKeys('nvidia'):
if _u(nvidia_serie) in self.SkipVersNew:
continue
mask_str = ">=x11-drivers/nvidia-drivers-{}".format(int(nvidia_serie)+1)
yield (self.Id, mask_str, mask_str.replace('>=', '\\<'))
def generate(self, system_ini):
if system_ini.getKeys('nvidia'):
for x in self.generate_new(system_ini):
yield x
else:
for x in self.generate_legacy():
yield x
def pretend_package_install(self, atom, chroot_path, logfile=None):
"""
Получить список пакетов для установки
:return: список пакетов
"""
deo = self.get_default_emerge_opts()
if atom not in self.pretend_package_list:
with EmergeParser(self.chrootize(chroot_path, EmergeCommand(
[atom], extra_params=['-pv', '--ask=n'],
emerge_default_opts=deo,
logfile=logfile))) as emerge:
emerge.question.default_answer = "n"
emerge.run()
9 years ago
self.pretend_package_list[atom] = list(
emerge.install_packages.list)
return self.pretend_package_list[atom]
def _display_video_install_package(self, package_list, drv_name):
"""
Отобразить список устанавливаемых пакетов, если пакет не бинарный
и не является непосредственно видеодрайвером - то он отмечен красной
"*"
:param package_list: список пакетов
:param drv_name: имя драйвера (PN)
:return:
"""
9 years ago
# asterisk = self.color_print.bold("*")
# ebuild_wrong = TextState.Colors.RED
ebuild_color = TextState.Colors.GREEN
binary_color = TextState.Colors.PURPLE
output_package_list = ", ".join(
self.color_print.foreground(binary_color)(str(x))
if x['binary'] else
self.color_print.foreground(ebuild_color)(str(x))
for x in package_list if x['PN'] != drv_name
)
self.printSUCCESS(_("Depends %s") % output_package_list)
def fetch_drivers(self, builder_path, builder_distdir, builder_pkgdir):
"""
Скачать файлы для установки видеодрайверов
:param builder_path:
:param builder_distdir:
:param builder_pkgdir:
:return:
"""
distrdir_perm = (FilePermission.SetGid |
FilePermission.UserAll |
FilePermission.GroupRead |
FilePermission.GroupExecute |
FilePermission.OtherRead |
FilePermission.OtherExecute)
portage_group = 250
root_user = 0
logfile = self._get_log_file()
deo = self.get_default_emerge_opts()
system_ini = SystemIni(self.clVars.Get('cl_builder_linux_datavars'))
driver_list = list(self.NvidiaDriver(builder_path, system_ini))
if os.path.isdir(path.join(self.clVars.Get('cl_chroot_path'), 'var/db/repos/gentoo')):
distrdir = path.join(builder_path, 'var/calculate/distfiles')
pkgdir = path.join(builder_path, 'var/calculate/packages')
else:
distrdir = path.join(builder_path, 'usr/portage/distfiles')
pkgdir = path.join(builder_path, 'usr/portage/packages')
for target_dn in (distrdir, pkgdir):
makeDirectory(target_dn)
chmod(target_dn, distrdir_perm)
chown(target_dn, root_user, portage_group)
pkgdir_files = []
distdir_files = []
repeat_driver_list = []
while driver_list or repeat_driver_list:
drv_name, drv_mask, drv_atom = driver_list.pop(0)
self.startTask(_("Calculating dependencies for %s") %
drv_atom.strip('"').replace("\\","").replace("<", "&lt;"))
package_list = self.pretend_package_install(drv_atom, builder_path,
logfile=logfile)
binary_map = {str(x): x['binary'] for x in package_list}
self._display_video_install_package(package_list, drv_name)
self.startTask(_("Fetching binary packages and sources tarballs") %
[x for x in package_list if x['PN'] == drv_name][0])
ef = EmergeFetcher(self.chrootize(builder_path, EmergeCommand(
["=%s" % x for x in package_list], emerge_default_opts=deo,
extra_params=["-Of", "--ask=n"], logfile="%s.2" % logfile,
env={'DISTDIR': 'var/calculate/distfiles', 'PKGDIR': 'var/calculate/packages'})))
try:
for package in ef:
pkg_name = str(package)
if binary_map.get(pkg_name, False):
for fn in package.files:
pkgdir_files.append("%s/%s" % (package['CATEGORY'],
fn))
else:
for fn in package.files:
distdir_files.append(fn)
if ef.failed():
raise BuilderError(_("Failed to get %s") % drv_name)
except EmergeFetcherError as e:
if e.extension:
9 years ago
self.printPre("\n%s\n" % e.extension)
if e.errno == EmergeFetcherError.FetchErrno.NeedManually:
raise BuilderError(
_("Failed to fetch files for %s") % drv_name)
repeat_driver_list.append([drv_name, drv_mask, drv_atom])
if not driver_list and repeat_driver_list:
driver_list = repeat_driver_list
repeat_driver_list = []
self.printWARNING(_("Waiting for unlock %s")
% driver_list[0][0])
time.sleep(10)
self.startTask(_("Cleaning driver files"))
for source_dn, source, target_dn, target in [
(builder_distdir, distdir_files,
distrdir, find(distrdir,
filetype=FindFileType.RegularFile,
fullpath=False)),
(builder_pkgdir, pkgdir_files,
pkgdir, find(pkgdir,
filetype=FindFileType.RegularFile,
fullpath=False))]:
# удаляем все ненужные файлы
for fn in target:
if fn not in source:
removeFileWithEmptyDirectory(path.join(target_dn, fn),
stopDirectory=target_dn)
for dir in find(pkgdir, filetype='d'):
for fn in os.listdir(dir):
try:
shutil.move(path.join(dir, fn), pkgdir)
except (shutil.Error, FileNotFoundError):
continue
removeDir(dir)
return True
def create_video_data(self, builder_path, repository_data):
system_ini = SystemIni(self.clVars.Get('cl_builder_linux_datavars'))
driver_list = list(chain(self.NvidiaDriver(builder_path, system_ini)))
logfile = self._get_log_file()
cache_file = self.clVars.Get('builder.cl_builder_video_driver_path')
map_rep = dict(repository_data)
video_ebuilds = set()
with writeFile(cache_file) as f:
for drv_name, drv_mask, drv_atom in driver_list:
package_list = self.pretend_package_install(
drv_atom, builder_path, logfile=logfile)
for package in package_list:
if package['binary']:
s = "{category} {pn} {pv} binary {drv} {mask}\n".format(
category=package['CATEGORY'],
pn=package['PN'],
pv=package['PVR'],
drv=drv_name,
mask=drv_mask.strip('"'))
f.write(s)
else:
if package['REPO'] not in map_rep:
raise BuilderError(
_("Failed to determine path "
"for %s repository") % package['REPO'])
s = "{category} {pn} {pv} {dn} {drv} {mask}\n".format(
category=package['CATEGORY'],
pn=package['PN'],
pv=package['PVR'],
dn=map_rep[package['REPO']],
drv=drv_name,
mask=drv_mask.strip('"'))
video_ebuilds.add(
"%s/%s/%s"%(map_rep[package['REPO']],
package['CATEGORY'],
package['PN']))
f.write(s)
self.clVars.Set('cl_builder_video_ebuilds', list(video_ebuilds))
return True
def remove_video_drivers(self, builder_path):
"""
Удалить данные и архивы для установки видео драйверов
:param builder_path: путь до сборки
:return:
"""
cache_file = self.clVars.Get('builder.cl_builder_video_driver_path')
if path.exists(cache_file):
removeFileWithEmptyDirectory(cache_file)
if os.path.isdir(path.join(self.clVars.Get('cl_chroot_path'), 'var/db/repos/gentoo')):
distrdir = path.join(builder_path, 'var/calculate/distfiles')
pkgdir = path.join(builder_path, 'var/calculate/packages')
else:
distrdir = path.join(builder_path, 'usr/portage/distfiles')
pkgdir = path.join(builder_path, 'usr/portage/packages')
for target_dn, target in [
(distrdir, find(distrdir,
fullpath=False)),
(pkgdir, find(pkgdir,
fullpath=False))]:
# удаляем все найденные файлы
for fn in target:
removeFileWithEmptyDirectory(path.join(target_dn, fn),
stopDirectory=target_dn)
return True
def reading_news(self, builder_path):
"""
'Прочитать' новости
:param builder_path: путь до сборки
:return:
"""
eselect_command = "/usr/bin/eselect"
p = self.chroot_process(builder_path, eselect_command, "--colour=no",
"news", "list", stderr=STDOUT)
re_new = re.compile(r"^\s+\[\d+\]\s+N?\s+(\S+)\s+(.*)$", re.M)
for date, title in reversed(re_new.findall(p.read())[-3:]):
self.printSUCCESS("{date} {title}".format(date=date, title=title))
return True
def check_obsolete(self, builder_path):
"""
Проверка на устаревшие установленные пакеты
:param builder_path:
:return:
"""
chroot_eix = ChrootEix(builder_path, [], ChrootEix.Option.TestObsolete)
l = chroot_eix.get_packages()
if l:
self.printERROR(_("Obsolete packages list:"))
mult = self.color_print.bold("*")
for pkg in l:
self.printDefault(
"&nbsp;{mult} {package}".format(
mult=mult, package=pkg['CATEGORY/PN']))
return True
9 years ago
def chroot_emergelike(self, builder_path, cmd, *params):
"""
Запуск команды, которая подразумевает выполнение emerge
"""
cmd_path = self.get_prog_path(cmd)
logfile = self._get_log_file()
if not cmd_path:
raise BuilderError(_("Failed to find the %s command") % cmd)
with EmergeParser(self.chrootize(builder_path, CommandExecutor(
cmd_path, params,
logfile=logfile))) as emerge:
self._startEmerging(emerge)
return True
9 years ago
def chroot_revdep_rebuild(self, builder_path, cmd, *params):
"""
Запуск revdep-rebulid
"""
cmd_path = self.get_prog_path(cmd)
logfile = self._get_log_file()
if not cmd_path:
raise BuilderError(_("Failed to find the %s command") % cmd)
with EmergeParser(self.chrootize(builder_path, CommandExecutor(
cmd_path, params, logfile=logfile))) as emerge:
revdep = RevdepPercentBlock(emerge)
self.addProgress()
revdep.add_observer(self.setProgress)
revdep.action = lambda x: (
self.endTask(), self.startTask(_("Assigning files to packages"))
if "Assign" in revdep else None)
self._startEmerging(emerge)
return True
def update_rep_list(self):
"""
Обновить список доступных репозиториев
:param builder_path:
:return:
"""
builder_path = self.clVars.Get("cl_builder_path")
cmd = "/usr/bin/eselect"
cmd_path = self.get_prog_path(cmd)
logfile = self._get_log_file()
if not cmd_path:
raise BuilderError(_("Failed to find the %s command") % cmd)
repsync = self.chrootize(builder_path, CommandExecutor(cmd_path, ["repository", "list"],
logfile=logfile))
repsync.execute()
return repsync.success()
def syncOtherRepository(self, repname):
"""
Обновить репозиторий через emerge --sync
"""
chroot_path = self.clVars.Get('cl_builder_path')
emerge = self.get_prog_path('/usr/bin/emerge')
if not emerge:
raise BuilderError(_("The Emerge tool is not found"))
rpath = self.clVars.Select('cl_builder_other_rep_path',
where='cl_builder_other_rep_name',
eq=repname, limit=1)
repdirname = path.basename(rpath)
self.stash_cache(rpath, repdirname)
try:
if Git.is_git(rpath):
self.addProgress()
p = PercentProgress(
"/usr/bin/chroot", chroot_path,
emerge, "--sync", repname, part=1, atty=True)
for perc in p.progress():
self.setProgress(perc)
else:
p = self.chroot_process(
chroot_path, emerge, "--sync", repname, stderr=STDOUT)
if p.failed():
raise BuilderError(
_("Failed to update the {rname} repository").format(
rname=repname),
addon=p.read())
finally:
self.unstash_cache(rpath, repdirname)
return True
cap_file = "/var/lib/calculate/filecaps"
def save_file_capabilities(self):
"""
Сохранить информацию о file capabilities при помощи команды filecap
"""
chroot_path = self.clVars.Get('cl_builder_path')
filecap = getProgPath("/usr/bin/filecap")
p = process(filecap, chroot_path)
with writeFile(pathJoin(chroot_path, self.cap_file)) as f:
column_shift = 0
for line in (x.strip() for x in p):
if not line or line.startswith("file"):
continue
if line.startswith("set"):
# we don't need the first column at all
column_shift = 1
continue
line_split = line.split()
fn = line_split[0 + column_shift]
caps = line_split[1 + column_shift:]
fn = fn[len(chroot_path):]
f.write("%s %s\n"% (fn, " ".join(x.rstrip(",") for x in caps)))
return True
def clear_file_capabilities(self):
chroot_path = self.clVars.Get('cl_builder_path')
filecap_fn = pathJoin(chroot_path, self.cap_file)
if path.exists(filecap_fn):
os.unlink(filecap_fn)
return True
def restore_file_capabilities(self):
"""
Восстановить информацию о file capabilities при помощи команды filecap
"""
chroot_path = self.clVars.Get('cl_builder_path')
filecap = getProgPath("/usr/bin/filecap")
p = process(filecap, chroot_path)
filecap_fn = pathJoin(chroot_path, self.cap_file)
if path.exists(filecap_fn):
self.startTask(_("Restoring file capabilities"))
for line in readLinesFile(filecap_fn):
line = line.strip()
if not line:
continue
fn, caps = line.partition(" ")[::2]
fn = pathJoin(chroot_path, fn)
caps = [x.strip() for x in caps.split()]
process(filecap, fn, *caps).success()
self.endTask()
return True
def _update_binhost_packages(self):
"""
Выполнить команду обновления файла binhost (Packages.gz)
:return:
"""
chroot_path = self.clVars.Get('cl_builder_path')
pkgdir = self.clVars.Get('cl_builder_pkgdir_full')
os.system(
'/usr/bin/chroot %s /bin/bash -c '
'"PKGDIR=%s /usr/sbin/emaint binhost -f" &>/dev/null' %
(chroot_path, pkgdir))
def cleanpkg(self):
"""
Очистка системы от устаревших distfiles и packages
:return:
"""
builder_path = self.clVars.Get('cl_builder_path')
portdirs = [
pathJoin(builder_path, x)
for x in self.clVars.Get("builder.cl_builder_repository_location")]
pkgfiles = get_packages_files_directory(*portdirs)
distdirfiles = get_manifest_files_directory(*portdirs)
distdir = self.clVars.Get('builder.cl_builder_linux_distdir')
pkgdir = self.clVars.Get('builder.cl_builder_linux_pkgdir')
logfile = self._get_log_file()
logger = log("update_cleanpkg.log", filename=logfile,
formatter="%(asctime)s - %(clean)s - %(message)s")
return self._cleanpkg(
distdir, pkgdir, distdirfiles, pkgfiles, logger)
def regenPackages(self, chrootPath, pkgDirPath):
"""Regenerate packages and clean SYNC param"""
pathPackages = pathJoin(chrootPath, pkgDirPath, "Packages")
# remove Packages if it recreated
if path.exists(pathPackages):
os.unlink(pathPackages)
self._update_binhost_packages()
if path.exists(pathPackages):
re_keywords = re.compile(
'^(KEYWORDS|SYNC):.*$\n', re.M)
data = readFile(pathPackages)
data_blocks = data.split('\n\n')
modified_blocks = [
"%s\nKEYWORDS: amd64 x86" % re_keywords.sub('', x)
for x in data_blocks[1:] if x.strip()]
with writeFile(pathPackages) as f:
9 years ago
f.write("\n\n".join(data_blocks[:1] + modified_blocks))
def binaryCleaning(self):
"""Clean binary repository"""
# imported from calculate_assemble
chrootPath = self.clVars.Get('cl_builder_path')
pkgDir = pathJoin(chrootPath,
self.clVars.Get('cl_builder_pkgdir_full'))
dbPkg = pathJoin(chrootPath, 'var/db/pkg')
logfile = self._get_log_file()
logger = log("binary_cleanpkg.log", filename=logfile,
formatter="%(asctime)s - %(message)s")
try:
if not path.exists(dbPkg):
os.makedirs(dbPkg)
if not path.exists(pkgDir):
os.makedirs(pkgDir)
if path.exists(dbPkg) and path.exists(pkgDir):
# get pkg list from distro
pkgList = reduce(lambda x, y: x + y,
3 years ago
[[path.join(x, "%s.tbz2" % z) for z
in os.listdir(path.join(dbPkg, x))] for x
in os.listdir(dbPkg)],
[])
# get binary packages
binList = reduce(lambda x, y: x + y,
3 years ago
[[path.join(x, y)[len(pkgDir) + 1:] for y
in os.listdir(path.join(x))] for x
in (z for z in (path.join(pkgDir, o) for o
in os.listdir(pkgDir))
3 years ago
if path.isdir(z))],
[])
# remove files which in binary and not in db/pkg
removeList = list(set(binList) - set(pkgList))
if removeList:
removelist_str = ",".join(
path.basename(x) for x in removeList)
logger.info(removelist_str)
[os.unlink(y) for y in (pathJoin(pkgDir, x) for x in removeList)]
[os.rmdir(y) for y
in (path.join(pkgDir, x) for x
in os.listdir(pkgDir))
if path.isdir(y) and not os.listdir(y)]
9 years ago
self.regenPackages(chrootPath, pkgDir[len(chrootPath):])
except OSError as e:
raise BuilderError(str(e))
return True
def prepare_update_vars(self):
"""
Заместить значения update переменных одноимёнными из builder
"""
vars_map = {
#Применить значение переменной для выбора веток репозиториев
#при обновлении
'update.cl_update_branch_name': 'builder.cl_builder_branch_name',
'update.cl_update_gpg_force': 'builder.cl_builder_gpg_force',
'update.cl_update_gpg_keys': 'builder.cl_builder_gpg_keys',
'update.cl_update_binhost_revision_path':
'builder.cl_builder_binhost_revision_path',
'update.cl_update_binhost_timestamp_path':
'builder.cl_builder_binhost_timestamp_path',
'update.cl_update_last_timestamp':
'builder.cl_builder_last_timestamp',
'update.cl_update_binhost_stable_set':
'builder.cl_builder_binhost_stable_set',
'update.cl_update_binhost_stable_opt_set':
'builder.cl_builder_binhost_stable_opt_set',
'update.cl_update_binhost':
'builder.cl_builder_binhost',
'update.cl_update_sync_rep': 'builder.cl_builder_sync_rep',
'update.cl_update_other_rep_name': 'builder.cl_builder_other_rep_name',
'update.cl_update_usetag_set': 'builder.cl_builder_usetag_set',
'update.cl_update_sync_overlay_rep': 'builder.cl_builder_sync_overlay_rep',
'update.cl_repository_name': 'builder.cl_builder_repository_name'
}
try:
for k,v in vars_map.items():
self.clVars.Set(k, self.clVars.Get(v), force=True)
except DataVarsError as e:
error = BuilderError(_("Failed to prepare variables for synchronization"))
error.addon = e
raise error
return True
@variable_module("builder")
def _get_binhost_logger(self):
logname = "build-%s/%s" % (self.clVars.Get('cl_builder_id_path'),
"binhost-scan.log")
mainlog = self.clVars.Get('core.cl_log_path')
return log("binhost-scan.log",
filename=path.join(mainlog, logname),
formatter="%(message)s")
def isohybrid(self, image_file):
"""
Преобразовать ISO образ в гибридный
:param image_file: iso образ
:return:
"""
isohybrid = getProgPath("/usr/bin/isohybrid")
if not isohybrid:
raise BuilderError(_("Isohybrid utility not found"))
if not path.exists(image_file):
raise BuilderError(_("Image not found"))
if self.clVars.Get('os_builder_arch_machine') == 'x86_64':
cmd = [isohybrid, "--uefi", image_file]
else:
cmd = [isohybrid, image_file]
isohybrid_process = process(*cmd)
return isohybrid_process.success()
def _list_file(self, iso_file):
"""
.list файл по iso файлу
:param iso_file:
:return:
"""
if iso_file.endswith(".iso"):
return "%s.list" % iso_file[:-4]
else:
return "%s.list" % iso_file
def _digest_file(self, iso_file):
return "%s.DIGESTS" % iso_file
def create_package_list(self, chroot, iso_file):
"""
Создает список установленных пакетов в chroot директории и сохраняет в
iso_file
:return:
"""
pkgdir = path.join(chroot, 'var/db/pkg')
list_file = self._list_file(iso_file)
with writeFile(list_file) as f:
f.write("\n".join(sorted(
x.atom for x in InstalledPackageInfo.get_install_packages(
pkg_dir=pkgdir))))
return True
def create_digest(self, isofile):
"""
Создать контрольную сумму для файла
"""
template = """# %(alg)s HASH\n%(digest)s %(filename)s\n"""
digestfile = self._digest_file(isofile)
try:
with writeFile(digestfile) as f:
f.writelines((template % {
'alg': x.upper(),
'digest':
process("%ssum" % x, isofile).read().partition(' ')[0],
'filename': path.basename(isofile)} for x in ["md5", "sha1"]))
except (IOError, OSError):
return False
return True
def get_arch_machine(self):
return self.clVars.Get("builder.os_builder_arch_machine")
@property
def chroot_process(self):
chroot_cmd = getProgPath("/usr/bin/chroot")
if not chroot_cmd:
raise BuilderError(_("Chroot command not found"))
arch = self.clVars.Get('builder.os_builder_arch_machine')
local_arch = self.clVars.Get('os_arch_machine')
process_func = process
if arch != local_arch:
process_func = partial(process_func, "/usr/bin/linux32")
return partial(process_func, chroot_cmd)
def get_default_emerge_opts(self, depclean=False):
dv = self.clVars.Get('builder.cl_builder_linux_datavars')
if dv:
deo = dv.Get('cl_emerge_default_opts')
bdeps_val = self.clVars.Get('cl_builder_with_bdeps_opt_set')
if bdeps_val == "auto":
bdeps = " --with-bdeps-auto=y"
elif bdeps_val == "on":
bdeps = " --with-bdeps=y"
else:
bdeps = " --with-bdeps=n"
return deo + bdeps
else:
return super().get_default_emerge_opts()
def emerge(self, builder_path, use, *params):
extra_params = [x for x in params if x.startswith("-")]
packages = [x for x in params if not x.startswith("-")]
return self._emerge(builder_path, packages, extra_params, use=use)
def _emerge(self, builder_path, packages, params, use="",
env_update=None):
"""
Выполнить сборку пакетов
:param builder_path: chroot путь
:param packages: список атомов для сборки
:param params: список параметров
:param use: USE флаги строкой
:param env_update: обновление env
:return:
"""
logfile = self._get_log_file()
deo = self.get_default_emerge_opts()
with EmergeParser(self.chrootize(builder_path, EmergeCommand(
packages, emerge_default_opts=deo,
extra_params=params, use=use,
env_update=env_update,
logfile=logfile))) as emerge:
try:
emerge.question.action = lambda x: False
emerge.run()
if not emerge.install_packages.list:
return True
except EmergeError:
self.emerge_cache.drop_cache("Emerge error")
self._display_error(emerge.prepare_error)
raise
self._startEmerging(emerge)
return True
def remove_list_digest(self, isofile):
for fn in (self._digest_file(isofile), self._list_file(isofile)):
if path.exists(fn):
os.unlink(fn)
return True
def recount_files(self, builder_path, exclude_files):
"""
Посчитать файлы в собираемой системе и исключить exclude файлы
:param builder_path:
:param exclude_files:
:return:
"""
all_count = countFiles(builder_path)
exclude_sum = sum(countFiles(path.join(builder_path, x))
for x in exclude_files)
self.clVars.Set('os_builder_linux_files', str(all_count - exclude_sum),
force=True)
return True
9 years ago
def create_dev_nodes(self, directory):
"""Create nodes for dev http://dev.gentoo.org/~a3li/openrc.txt"""
devPath = path.join(directory, "dev")
# clean dev
for pathname, dirs, files in os.walk(devPath, topdown=False):
[os.unlink(path.join(pathname, x)) for x in files]
[os.unlink(x) if path.islink(x) else os.rmdir(x) for
x in (path.join(pathname, x) for x in dirs)]
for node, mode, dmode, major, minor in [
3 years ago
("console", 0o600, stat.S_IFCHR, 5, 1),
("tty1", 0o600, stat.S_IFCHR, 4, 1),
("null", 0o666, stat.S_IFCHR, 1, 3),
("zero", 0o666, stat.S_IFCHR, 1, 5)]:
nodePath = path.join(devPath, node)
os.mknod(nodePath, mode | dmode, os.makedev(major, minor))
os.chmod(nodePath, mode)
return True
def check_build_run(self):
"""
Проверить повторный запуск
"""
build_id = self.clVars.Get('cl_builder_id')
names = self.Method.All
pid = os.getpid()
filter_func = lambda x: ('id' in x and x['id'] == build_id
and x['name'] in names
and x['os_pid'] != pid)
dv = self.clVars
if any(search_worked_process2(dv, filter_func=filter_func)):
raise BuilderError(_("Builder action for %s is already running. "
9 years ago
"Try to run later.") % build_id)
return True
def check_chroot_run(self):
"""
Проверить наличие chroot процессов
:return:
"""
builder_path = self.clVars.Get('cl_builder_path')
programs = getRunCommands(chroot=builder_path, withpid=True)
if programs:
pid, cmd = programs[0]
raise BuilderError(
_("Chrooted {cmd} has already run into {id}").format(
cmd="{}[{}]".format(cmd.split('\x00')[0], pid),
9 years ago
id=self.clVars.Get('cl_builder_id')
))
return True
def update_menu(self, dn):
with IsoDistributive(dn) as iso:
d = iso.getDirectory()
root_path = path.relpath(dn, d)
self.clVars.Set('cl_builder_iso_path', dn, force=True)
self.clVars.Set('cl_builder_target', iso, force=True)
self.clVars.Set('cl_builder_path', d, force=True)
self.clVars.getInfo('cl_builder_videodrv_set').autodetect = True
self.clVars.Invalidate('cl_builder_videodrv_set', force=True)
from calculate.lib.cl_template import templateFunction
9 years ago
templateFunction.installProg = {}
templateFunction.installCategory = []
self.applyTemplates(d, False, False, root_path)
process("sync").success()
return True
def remove_flash_tmp(self):
try:
image = self.clVars.Get('cl_builder_image')
if image:
image.close()
except DistributiveError:
pass
flashTmp = path.join(self.clVars.Get('cl_builder_flash_path'), "tmp")
if path.exists(flashTmp) and not listDirectory(flashTmp):
try:
os.rmdir(flashTmp)
9 years ago
except (OSError, IOError) as e:
self.printWARNING(str(e))
return True
def remount_rw(self, dn):
"""
Перемонтировать каталог для чтения/записи (используется для flash)
:param dn: каталог
:return:
"""
if not dn:
raise BuilderError(_("Failed to remount Flash drive"))
p = process('/bin/mount', '-o', 'remount,rw', dn, stderr=STDOUT)
if p.failed():
self.printERROR(p.read().strip())
return False
if not check_rw(dn):
raise BuilderError("Selected device is read-only")
return True
def sync_vmlinuz(self, flash_dn):
"""
Извлечение ядер из iso образов
:return:
"""
boot_dn = path.join(flash_dn, "boot")
for fn in listDirectory(boot_dn, fullPath=True):
if fn not in ("vmlinuz", "initrd"):
try:
if not path.isdir(fn):
os.unlink(fn)
except (IOError, OSError) as e:
self.printERROR(str(e))
raise BuilderError(
_("Failed to clean /boot directory on Flash drive"))
for data in self.clVars.Get('cl_builder_image_data'):
isofn = data[2]
vmlinuz_orig = data[3]
vmlinuz = data[4]
initrd_orig = data[5]
initrd = data[6]
with IsoDistributive(isofn) as iso:
dn = iso.getDirectory()
vmlinuz_orig = path.join(dn, "boot", vmlinuz_orig)
initrd_orig = path.join(dn, "boot", initrd_orig)
vmlinuz = path.join(boot_dn, vmlinuz)
initrd = path.join(boot_dn, initrd)
try:
with open(vmlinuz_orig, 'rb') as r_fd:
with open(vmlinuz, 'wb') as w_fd:
w_fd.write(r_fd.read())
with open(initrd_orig, 'rb') as r_fd:
with open(initrd, 'wb') as w_fd:
w_fd.write(r_fd.read())
except (IOError, OSError) as e:
self.printERROR(str(e))
raise BuilderError(_("Failed to extract kernel from %s")
% isofn)
return True
def iso_migrate(self, flash_path):
"""
Миграция образов из flash:/iso в flash:/linux
:param flash_path:
:return:
"""
try:
old_path = path.join(flash_path, "iso")
new_path = path.join(flash_path, "linux")
if path.exists(old_path):
if listDirectory(old_path):
if path.exists(new_path):
for fn in listDirectory(old_path):
9 years ago
old_fn = path.join(old_path, fn)
new_fn = path.join(new_path, fn)
if path.exists(new_fn):
os.unlink(new_fn)
os.rename(old_fn, new_fn)
else:
os.rename(old_path, new_path)
else:
os.rmdir(old_path)
except OSError as e:
self.printWARNING(
_("Failed to move the ISO images directory on the Flash"))
self.printWARNING(str(e))
return True
def _get_default_params(self, drvs):
"""
Получить параметры загрузки для системы LiveHDD
"""
proprietary = ["nvidia", "fglrx"]
video = self.clVars.Get('cl_builder_x11_video_drv')
audio = self.clVars.Get('cl_builder_audio')
locale = self.clVars.Get('cl_builder_locale_lang')
timezone = self.clVars.Get('cl_builder_timezone')
clock = self.clVars.Get('install.os_install_clock_type')
params = ["%s:%s" % (CmdlineParams.Locale, locale),
"%s:%s" % (CmdlineParams.Timezone, timezone)]
resolution = self.clVars.Get('cl_builder_x11_resolution')
if resolution and resolution != "auto":
params.append("%s:%s" % (CmdlineParams.Resolution, resolution))
if video != "default" and (
video not in proprietary or Variable.isTrue(drvs)):
params.append("%s:%s" % (CmdlineParams.Video, video))
current_video = self.clVars.Get('os_x11_video_drv')
if current_video == video and video != "default":
composite = self.clVars.Get('cl_builder_x11_composite')
params.append("%s:%s" % (CmdlineParams.Composite, composite))
if audio == "alsa":
params.append("audio:alsa")
if clock:
params.append("clock:%s"%clock)
calculate_param = ",".join(params)
if self.clVars.GetBool('cl_builder_docache_set'):
return "%s docache" % calculate_param
else:
return calculate_param
def create_iso_grub_cfg(self, dn):
gc = GrubCommand()
content = []
for label, iso, splash, drvs in self.clVars.ZipVars(
'cl_builder_image_label',
'cl_builder_image_iso',
'cl_builder_image_splash',
'cl_builder_image_drivers'):
default_params = self._get_default_params(drvs)
entry = (
"menuentry '%(label)s' {\n"
"\tset isofile=%(iso)s\n"
"\tloopback loop $isofile\n"
"\tlinux (loop)/boot/vmlinuz root=live "
"iso-scan/filename=$isofile quiet %(splash)s "
"calculate=%(defs)s\n"
"\tinitrd (loop)/boot/initrd\n}\n\n" % {
'label': label,
'iso': gc.get_relpath(iso),
'splash': templateFunction.splash_cmd(splash),
'defs': default_params
})
content.append(entry)
fn_grubcfg = path.join(dn, 'grub.cfg')
try:
write_content = ("\n".join(content)).strip() + "\n"
if write_content.strip():
with writeFile(fn_grubcfg) as f:
f.write(write_content)
else:
self.clear_iso_grub_cfg(dn)
except IOError:
raise BuilderError(_("Failed to write %s") % fn_grubcfg)
return True
def clear_iso_grub_cfg(self, dn):
if dn:
cfg_fn = path.join(dn, 'grub.cfg')
if path.exists(cfg_fn):
try:
os.unlink(cfg_fn)
except OSError:
raise BuilderError(_("Failed to remove %s") % cfg_fn)
return True
def setup_package(self, package):
"""
Обновить конфигурационные файлы системы
"""
clVars = DataVars()
try:
clVars.importData()
clVars.flIniFile()
clVars.Set("cl_root_path", "/", True)
clVars.Set("cl_merge_pkg", [package], True)
clVars.Set("cl_action", 'merge', True)
for copyvar in ("cl_dispatch_conf", "cl_verbose_set",
"cl_template_path_use",
"builder.cl_livemenu_path",
"builder.cl_builder_livemenu_path"):
clVars.Set(copyvar, self.clVars.Get(copyvar), force=True)
useClt = self.clVars.Get('cl_template_clt_set') in (True, "on")
# используем объект шаблонов
# с clt шаблонами, clt фильтром, без использования postDispatchConf
clTempl = ChainProgressTemplate(
self.startTask,
self.endTask,
self.setProgress,
clVars, cltObj=useClt,
cltFilter=True,
printSUCCESS=self.printSUCCESS,
printERROR=self.printERROR,
printWARNING=self.printWARNING,
askConfirm=self.askConfirm,
dispatchConf=self.dispatchConf,
printWarning=False)
clTempl.applyTemplates()
finally:
clVars.close()
self.endTask()
return True
def pretend_emerge_list(self, builder_path, *packages):
"""
Получить список устанавливаемых пакетов
:param builder_path:
:param packages:
:return:
"""
deo = self.get_default_emerge_opts()
logfile = self._get_log_file()
# в случае использования base binhost
env_update = {'PKGDIR': self.clVars.Get('cl_builder_pkgdir_full')}
if self.clVars.GetBool('cl_builder_binhost_base_set'):
env_update["FEATURES"] = "-getbinpkg"
deo += " --with-bdeps=n"
with EmergeParser(self.chrootize(builder_path, EmergeCommand(
["=%s" % x for x in packages] + ["@system"],
emerge_default_opts=deo,
env_update=env_update,
extra_params=["-pKve"], logfile=logfile))) as emerge:
try:
emerge.question.action = lambda x: False
emerge.run()
for pkg in emerge.install_packages.list:
yield pkg
except EmergeError:
self._display_error(emerge.prepare_error)
def check_vardbpkg(self, builder_path):
"""
Проверка на то, что при проверке на автомагические зависимости
/var/db/pkg корректно восстановлен
:param builder_path:
:return:
"""
vdb_path = "var/db/.pkg"
real_vdb_path = path.join(builder_path, VDB_PATH)
hide_vdb_path = path.join(builder_path, vdb_path)
real_vdb_path_with_hide = path.join(builder_path, VDB_PATH, ".pkg")
if path.exists(hide_vdb_path):
raise BuilderError("Wrong build state: /var/db/.pkg found")
if path.exists(real_vdb_path_with_hide):
raise BuilderError("Wrong build state: /var/db/pkg/.pkg found")
if not path.exists(real_vdb_path):
raise BuilderError(_("Wrong build state: /var/db/pkg not found"))
return True
def check_automagic(self, builder_path):
"""
Проверка на наличие неописанных автоматических зависимостей
:param builder_path:
:return:
"""
task = EmergeLogFiltered(EmergeLogNamedTask(EmergeMark.Automagic),
prefix=builder_path)
task.emerge_type = EmergeLogFiltered.EmergeType.Source
cache_list = ("/var/calculate/tmp/%s.checkdep" %
self.clVars.Get("cl_builder_id_path"))
task_list = list(chain(*[list(getInstalledAtom(x, prefix=builder_path))
for x in readLinesFile(cache_list,
grab=True)]))
task_list = list(task.list) + task_list
vdb_path = "var/db/.pkg"
real_vdb_path = path.join(builder_path, VDB_PATH)
hide_vdb_path = path.join(builder_path, vdb_path)
try:
shutil.move(real_vdb_path, hide_vdb_path)
except (IOError, OSError):
raise BuilderError(_("Failed to hide package database"))
automagic = OrderedDict()
automagic_waste = {}
automagic_clear = {}
try:
system_ini = SystemIni(self.clVars.Get('cl_builder_linux_datavars'))
lp = LibraryProviders(vdb_path=vdb_path, prefix=builder_path)
def get_check_data():
for pkg in PackageList(task_list):
def get_all_reqs(pkg):
for arch, lib in getRequires(pkg, vdb_path=vdb_path,
prefix=builder_path):
if arch in lp and lib in lp[arch]:
yield tuple(lp[arch][lib])
yield pkg, list(set(get_all_reqs(pkg)))
check_data = {x: y for x, y in get_check_data()
if x["CATEGORY"] != "virtual"}
if self.clVars.GetBool('cl_builder_binhost_base_set'):
pkgdir = self.clVars.Get('cl_builder_pkgdir_full')
# удалить Packages файл, для того, чтобы он переформировался при
# следующем вызове emerge
index_fn = path.join(pkgdir, "Packages")
if path.exists(index_fn):
os.unlink(index_fn)
for i, data in enumerate(sorted(check_data.items(),
key=lambda x:x[0])):
package, required_pkgs = data
self.startTask(_("Check ({cur} of {maxval}) {package}").format(
cur=i + 1, maxval=len(check_data), package=package))
if not required_pkgs:
self.endTask(True)
continue
pretend = list(self.pretend_emerge_list(builder_path, package))
if not pretend:
self.endTask(False)
self.printERROR(_("Failed to receive the package list"))
else:
# получаем список пакетов которые необходимых для работы
required_pkgs = [x for x in required_pkgs
if all(y not in pretend for y in x)]
required_pkgs = list(set(chain(*required_pkgs)))
clear_req_pkgs = [x for x
in [x.strip() for x
in system_ini.getVar("automagic-clear",
package["CATEGORY/PN"]).split(",")]
if x]
waste_pkgs = [x for x in clear_req_pkgs if
all(y["CATEGORY/PN"] != x
for y in required_pkgs)]
if waste_pkgs:
automagic_waste[package] = waste_pkgs
automagic[package] = []
automagic_clear[package] = []
for pkg in required_pkgs:
if any(x == pkg["CATEGORY/PN"] for x in clear_req_pkgs):
automagic_clear[package].append(pkg)
else:
automagic[package].append(pkg)
self.endTask(True)
finally:
try:
if path.exists(real_vdb_path):
os.rmdir(real_vdb_path)
shutil.move(hide_vdb_path, real_vdb_path)
except (IOError, OSError):
raise BuilderError(_("Failed to unhide package database"))
self._report_automagic(automagic, automagic_clear, automagic_waste)
self._rebuild_automagic(automagic, automagic_clear, builder_path)
return True
def _report_automagic(self, automagic, automagic_clear, automagic_waste):
"""
Сообщить информацию о вычисленных автоматических зависимостей
:param automagic:
:param automagic_clear:
:param automagic_waste:
:return:
"""
for pkg, reqs in automagic.items():
if reqs:
self.printWARNING(
_("Auto depends for package {pkg} from {pkgs} "
"will be used").format(
pkg=pkg, pkgs=",".join(str(x) for x in reqs)))
clear_req_pkgs = automagic_clear.get(pkg, [])
if clear_req_pkgs:
self.printWARNING(
_("Auto depends for package {pkg} from {pkgs} "
"will be cleared").format(
pkg=pkg, pkgs=",".join(str(x) for x in clear_req_pkgs)))
waste_pkgs = automagic_waste.get(pkg, [])
if waste_pkgs:
self.printWARNING(
_("Specified depends for package {pkg} from "
"{pkgs} are obsolete").format(
pkg=pkg, pkgs=",".join(waste_pkgs)))
def _rebuild_automagic(self, automagic, automagic_clear, builder_path):
"""
Пересобрать пакеты с автоматическими зависимостями
:param automagic:
:param automagic_clear:
:param builder_path:
:return:
"""
automagic_log_dn = "/var/log/calculate/automagic"
makeDirectory(automagic_log_dn)
self.pkgnum = 0
self.pkgnummax = sum(1 for k, v in automagic.items()
if v or automagic_clear[k])
work_dn = '/var/calculate/tmp/xpak-%s' % self.clVars.Get(
'cl_builder_id_path')
try:
for pkg, reqs in automagic.items():
clear_req_pkgs = automagic_clear.get(pkg, [])
if not reqs and not clear_req_pkgs:
continue
self.pkgnum += 1
if clear_req_pkgs:
try:
hide_packages(*clear_req_pkgs, prefix=builder_path)
# собрать пакет из исходников
env_update = {
'PKGDIR': self.clVars.Get('cl_builder_pkgdir_full'),
'FEATURES': "-getbinpkg"
}
self._emerge(builder_path, ["=%s" % pkg], ["-O"],
env_update=env_update)
except EmergeError:
old_logfile = self._get_log_file()
pkg_path = str(pkg).replace("/", "_")
new_logfile = '%s/%s-%s.log' % (
automagic_log_dn,
self.clVars.Get('cl_builder_id_path'), pkg_path)
try:
os.rename(old_logfile, new_logfile)
except (OSError, IOError) as e:
self.printERROR("{message}: {error}".format(
message=_("Failed to save build log"),
error=str(e)))
self.printERROR(
_("Failed to merge {package} without "
"{hidden_pkgs}").format(
package=str(pkg),
hidden_pkgs=",".join(str(x) for x in reqs)))
raise
except PackageError as e:
raise BuilderError(str(e))
finally:
try:
unhide_packages(prefix=builder_path, force=True)
except PackageError as e:
raise BuilderError(str(e))
if reqs:
self.printWARNING(
_("({num} of {nummax}) Inject dependences for "
"{package} package").format(num=self.pkgnum,
7 years ago
nummax=self.pkgnummax,
package=str(pkg)))
pkg_fn = get_binary_file(
pkg, self.clVars.Get('cl_builder_pkgdir_full'))
bp = BinaryPackage(pkg_fn, work_dn)
try:
bp["RDEPEND"] = "{oldpkgs} {newpkgs}".format(
oldpkgs=bp["RDEPEND"],
newpkgs=" ".join("%s:%s" % (
x["CATEGORY/PN"], x["SLOT"]) for x in reqs))
bp["autodeps"] = "\n".join("%s:%s" % (
x["CATEGORY/PN"], x["SLOT"]) for x in reqs)
bp.save()
finally:
bp.clear()
finally:
self.pkgnummax = None
self.pkgnum = None
return True
def update_dracut(self, builder_path):
"""
Обновить initramfs
:param builder_path:
:return:
"""
cmd = "/usr/bin/dracut"
cmd_path = self.get_prog_path(cmd)
logfile = self._get_log_file()
if not cmd_path:
raise BuilderError(_("Failed to find the %s command") % cmd)
kver = self.clVars.Get("builder.cl_builder_kernel_ver")
dracut = self.chrootize(
builder_path,
CommandExecutor(cmd_path, ["--xz", "-f", "--kver", kver],
logfile=logfile))
dracut.execute()
return dracut.success()
def set_base_binhost(self, binhost):
"""
Использовать базовый бинарный хост
:param binhost:
:return:
"""
self.clVars.Write('cl_update_binhost', binhost)
self.clVars.Set('cl_update_package_cache_set', 'on')
self.clVars.Set('cl_update_binhost_list', [binhost], force=True)
self.clVars.Set('cl_update_binhost_unstable_list', [binhost], force=True)
self.base = True
return True
def create_binhost_data(self):
dv = self.clVars
last_ts = dv.Get('update.cl_update_last_timestamp')
if dv.GetBool('update.cl_update_binhost_stable_opt_set'):
binhost_list = dv.Get('update.cl_update_binhost_list')
else:
binhost_list = dv.Get('update.cl_update_binhost_unstable_list')
self.binhosts_data = Binhosts(
# значение малозначимо, поэтому берётся из собирающей системы
dv.GetInteger('update.cl_update_binhost_timeout'),
dv.Get('update.cl_update_binhost_revision_path'),
dv.Get('update.cl_update_binhost_timestamp_path'),
last_ts, binhost_list,
self.get_arch_machine(),
gpg=dv.Get('update.cl_update_gpg'),
base=self.base)
return True
def index_pkgdir(self, pkgdir, trunkdir, stabledir):
"""
Спрять pkgdir при использование base binhost, для корректного вычисления
зависимостей
:param pkgdir:
:param trunkdir:
:param stabledir:
:return:
"""
try:
clear_binhost_garbage(pkgdir)
except OSError as e:
raise BuilderError(_("Failed to clear binary directory: %s")%str(e))
drcs = DirectoryRCS(pkgdir, trunkdir, stabledir)
if drcs.not_prepared():
try:
drcs.fixing()
except RCSError as e:
raise BuilderError(
_("Failed to update trunk binaries: %s") % str(e))
else:
try:
if drcs.is_worked():
drcs.indexing()
except RCSError as e:
raise BuilderError(
_("Failed to prepare stable binaries: %s") % str(e))
return True
def fix_pkgdir(self, pkgdir, trunkdir, stabledir):
"""
Зафиксировать изменения по собранным бинарным пакетам
:param pkgdir:
:param trunkdir:
:param stabledir:
:return:
"""
try:
clear_binhost_garbage(pkgdir)
except OSError as e:
raise BuilderError(_("Failed to clear binary directory: %s")%str(e))
try:
drcs = DirectoryRCS(pkgdir, trunkdir, stabledir)
drcs.fixing()
except RCSError as e:
raise BuilderError(
_("Failed to merge prepared binaries: %s") % str(e))
return True
@variable_module("builder")
def update_binhost_list(self):
self.invalidateVariables('cl_builder_linux_datavars')
self.prepare_update_vars()
return super().update_binhost_list(
self.clVars.Get('cl_builder_linux_datavars'))
def is_update_action(self, action):
return action == Actions.Update
def remove_machine_id(self, builder_path):
try:
os.unlink(path.join(builder_path, 'etc/machine-id'))
return True
except OSError as e:
# raise BuilderError(_("Failed to find /etc/machine-id"))
return False
3 years ago
def set_current_saved_tag(self):
#TODO add to var?
repname = "calculate"
dv = self.clVars.Get('cl_builder_linux_datavars')
git = self.getGit()
rpath, revision = (
dv.Select(["cl_update_rep_path",
"cl_update_rep_rev"],
where="cl_update_rep_name",
eq=repname, limit=1))
try:
branch_name = self.clVars.Select(["cl_builder_branch_name"],
where="cl_builder_branch_rep", eq=repname, limit=1)[0]
tag = git.getCurrentTag(rpath)
if branch_name == Git.Reference.Tag and tag.isdigit():
self.clVars.Write("cl_update_saved_tag", str(tag), location="system")
return True
except ValueError as e:
return False