Добавлено создание streams/v1 мета файлов при создании контейнера

3.5.4.2
parent f511841212
commit 08aa6db522

@ -21,6 +21,7 @@ import re
import sys
import time
import stat
import json
from calculate.core.server.gen_pid import search_worked_process2
from calculate.core.setup_package import ChainProgressTemplate
from calculate.lib.cl_template import templateFunction, SystemIni
@ -32,11 +33,10 @@ from calculate.lib.utils.files import (
pathJoin, PercentProgress, getProgPath, process, STDOUT, removeDir,
makeDirectory, writeFile, readLinesFile, chmod, chown, FilePermission,
find, FindFileType, removeFileWithEmptyDirectory, check_rw,
tar_xz_directory,
tar_xz_directory, sha256sum,
copyWithPath, countFiles, listDirectory, getRunCommands, isMount, readFile)
from calculate.lib.utils.git import Git
from calculate.builder.variables.action import Actions
from hashlib import sha256
from calculate.lib.utils.portage import (Layman, EmergeLog, EmergeLogNamedTask,
InstalledPackageInfo, EbuildInfoError,
EbuildInfo, ChrootEix, getRequires,
@ -197,7 +197,7 @@ class Builder(Update):
def create_digest_container(self, image):
"""
Создать контрольную сумму для файлов контейнера
:param dn:
:param image:
:return:
"""
sumfn = "SHA256SUMS"
@ -205,14 +205,7 @@ class Builder(Update):
for fn in listDirectory(image.basedirectory, fullPath=True):
if fn.endswith(sumfn):
continue
kb = 1024
hasher = sha256()
with open(fn) as f:
b = True
while b:
b = f.read(512 * kb)
hasher.update(b)
digest = hasher.hexdigest()
digest = sha256sum(fn)
f_sum.write("%s %s\n" % (digest, path.basename(fn)))
return True
@ -239,6 +232,7 @@ class Builder(Update):
'release': info['os_linux_ver'],
'arch': info['os_arch_machine'],
'variant': "default",
'name': info['os_linux_name'],
'build': info['os_linux_build'],
'fn': path.join("container", path.basename(container_dn)),
'subbuild': subbuild
@ -251,13 +245,82 @@ class Builder(Update):
ReverseKey(info['subbuild']),
ReverseKey(info['fn']))
data.sort(key=sort_key)
products_data = {}
filesize = lambda x: os.stat(x).st_size
with writeFile(path.join(dn, 'meta/1.0/index-system')) as f:
for k, grps in groupby(data, lambda x: (x['dist'],
x['release'],
x['arch'],
x['variant'])):
f.write("{dist};{release};{arch};"
"{variant};{build};{fn}\n".format(**grps.next()))
info = grps.next()
dist_key = "{dist}:{release}:{arch}:{variant}".format(**info)
try:
f.write("{dist};{release};{arch};"
"{variant};{build};{fn}\n".format(**info))
aliases = ("{dist}/{release}/{variant},"
"{dist}/{release}".format(**info))
root_tar_xz_rel = path.join(info["fn"], "rootfs.tar.xz")
root_tar_xz = path.join(path.dirname(dn), root_tar_xz_rel)
lxd_tar_xz_rel = path.join(info["fn"], "lxd.tar.xz")
lxd_tar_xz = path.join(path.dirname(dn), lxd_tar_xz_rel)
combined_sha256 = sha256sum(root_tar_xz, lxd_tar_xz)
product = {
"aliases": aliases,
"versions": {
info["build"]: {
"items": {
"root.tar.xz": {
"ftype": "root.tar.xz",
"sha256": sha256sum(root_tar_xz),
"size": filesize(root_tar_xz),
"path": root_tar_xz_rel,
},
"lxd.tar.xz": {
"ftype": "lxd.tar.xz",
"size": filesize(lxd_tar_xz),
"sha256": sha256sum(lxd_tar_xz),
"combined_sha256": combined_sha256,
"combined_rootxz_sha256":
combined_sha256,
"path": lxd_tar_xz_rel,
}
}
}
},
"release_title": info["release"],
"release": info["release"],
"os": info["name"],
"arch": info["arch"]
}
products_data[dist_key] = product
except BaseException as e:
if isinstance(e, KeyboardInterrupt):
raise
self.printWARNING(_("Failed to index %s") % dist_key)
self.printWARNING(str(e))
index_data = {
"format": "index:1.0",
"index": {
"images": {
"format": "products:1.0",
"datatype": "image-downloads",
"products": list(products_data.keys()),
"path": "streams/v1/images.json"
}
}
}
with writeFile(path.join(dn, 'streams/v1/index.json')) as f:
json.dump(index_data, f)
images_data = {
"content_id": "images",
"format": "products:1.0",
"datatype": "image-downloads",
"products": products_data,
}
with writeFile(path.join(dn, 'streams/v1/images.json')) as f:
json.dump(images_data, f)
return True
def remove_container_data(self, dn):

Loading…
Cancel
Save