root/build-tools/stx/build-pkgs
STX Builder a90bc3ccd3 debrepack: Handle the KERNEL_TYPE placeholder issue
After revert the below commit, use this commit to handle
the KERNEL_TYPE issue in debian meta:
commit f810774a7726d9fa0e4d41a3438b30aa2d44e1b6
Author: hbai <haiqing.bai@windriver.com>
Date:   Tue Mar 29 20:25:35 2022 +0800

    build-pkgs: pass build type to debrepack

    Pass build type to debrepack to support to
    build rt packages

Story: 2008846
Task: 45006

Test Plan:
Pass: revert blow commits:
    f810774a7726d9fa0e4d41a3438b30aa2d44e1b6
    35cd03b80aea8b39f20724f18f81c66ad67051af
    64189c22c468ee37f5e9d65ce228c2ed63332c17
The apply this commit and run 'build all' tests

Change-Id: I0b318fcb75672dbb19bd834b41539072ea627bbb
2022-04-09 12:10:24 +08:00

867 lines
31 KiB
Python
Executable File

#!/usr/bin/python3
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright (C) 2021 Wind River Systems,Inc
import argparse
import debrepack
import debsentry
import dsc_depend
import dsccache
import logging
import os
import repo_manage
import requests
import shutil
import signal
import subprocess
import sys
import time
import utils
import yaml
BUILDER_URL = os.environ.get('BUILDER_URL')
REPOMGR_URL = os.environ.get('REPOMGR_URL')
BUILD_ROOT = os.environ.get('MY_BUILD_PKG_DIR')
STX_ROOT = os.environ.get('MY_REPO_ROOT_DIR')
PKGBUILDER_ROOT = "/localdisk/pkgbuilder"
USER = os.environ.get('MYUNAME')
PROJECT = os.environ.get('PROJECT')
REPO_BUILD = 'deb-local-build'
REPO_SOURCE = 'deb-local-source'
# Listed all stx source layers which contains 'debian_pkg_dirs'
STX_SOURCE_REPOS = [
'SDO-rv-service',
'ansible-playbooks',
'audit-armada-app',
'cert-manager-armada-app',
'clients',
'compile',
'config',
'config-files',
'containers',
'distributedcloud',
'distributedcloud-client',
'fault',
'gui',
'ha',
'helm-charts',
'integ',
'kernel',
'metal',
'metrics-server-armada-app',
'monitor-armada-app',
'monitoring',
'nfv',
'nginx-ingress-controller-armada-app',
'oidc-auth-armada-app',
'openstack-armada-app',
'platform-armada-app',
'portieris-armada-app',
'ptp-notification-armada-app',
'rook-ceph',
'snmp-armada-app',
'stx-puppet',
'update',
'upstream',
'utilities',
'vault-armada-app',
]
STX_LAYERS = ['distro', 'flock']
logger = logging.getLogger('debcontroller')
utils.set_logger(logger)
def get_pkgname_with_dsc(dscs, dsc_path):
for package, dsc in dscs.items():
if dsc.strip() in dsc_path:
return package
return None
def get_pkgname_ver_with_deb(deb_name):
if not deb_name.endswith('.deb'):
return None
name_list = deb_name.split('_')
if len(name_list) < 2:
return None
return name_list[0], name_list[1]
def req_chroots_action(action, extra_params):
"""
Base function called by each require on chroot with Restful API
Param:
action: addchroot, loadchroot, savechroot
"""
req_params = {}
req_params['project'] = PROJECT
req_params['user'] = USER
if extra_params:
req_params.update(extra_params)
try:
resp = requests.get(BUILDER_URL + action, data=req_params)
resp.raise_for_status()
except requests.RequestException as e:
print(e)
else:
logger.debug(resp.text)
if 'success' in resp.text:
return 'success'
if 'exists' in resp.text:
return 'success'
if 'creating' in resp.text:
return 'creating'
return 'fail'
def show_task_log(log_file, wait_time, success_str, exit_str):
"""
Display the log file on the current console
Param:
wait_time: customer defines to wait before the log file can be read
key_str: the separate string can be taken as flag to exit
"""
status = 'fail'
time.sleep(wait_time)
logger.debug(' '.join(['Waiting for log file', log_file]))
timeout = 8
time_counter = 0
while not os.path.exists(log_file):
time.sleep(1)
time_counter += 1
if time_counter > timeout:
break
if os.path.exists(log_file):
p = subprocess.Popen("tail -f " + log_file, stdout=subprocess.PIPE,
shell=True, universal_newlines=True, bufsize=0)
while p.poll() is None:
line = p.stdout.readline()
line = line.strip()
if line:
print(line)
if success_str and success_str in line:
status = 'success'
break
if exit_str and exit_str in line:
logger.error(' '.join(['Task failed. For details please',
'consult log', log_file]))
status = 'fail'
break
return status
def bc_safe_fetch(dst_file, entry_handler=None):
entries = []
try:
with open(dst_file, 'r') as flist:
lines = list(line for line in (p.strip() for p in flist) if line)
except IOError as e:
logger.error(str(e))
except Exception as e:
logger.error(str(e))
else:
for entry in lines:
entry = entry.strip()
if entry.startswith('#'):
continue
if entry_handler:
entries.append(entry_handler(entry))
else:
entries.append(entry)
return entries
def pkgdirs_entry_handler(entry):
if entry:
return os.path.basename(entry)
return []
def get_pkgs_of_layer(layer):
"""
Scan all STX source layers to get all buildable packages of layer
debian_build_layer.cfg defines whether the STX source layer belongs
to 'distor' or 'flock' or other layer
Params: None
Return:
List of all STX buildable packages of layer
"""
pkgs = []
stx_src_root = os.path.join(os.environ.get('MY_REPO_ROOT_DIR'),
'cgcs-root/stx')
for root, dirs, files in os.walk(stx_src_root):
if dirs:
pass
for r in files:
if r == 'debian_build_layer.cfg':
layers = []
layer_file = os.path.join(root, r)
layers.extend(bc_safe_fetch(layer_file, None))
if layer in layers:
# The current STX src layer belongs to 'layer'
pkgs_f = os.path.join(root, 'debian_pkg_dirs')
msg = ' '.join(['Pkgdirs', pkgs_f, 'for layer', layer])
logger.debug(msg)
pkgs.extend(bc_safe_fetch(pkgs_f, pkgdirs_entry_handler))
return pkgs
def get_all_packages():
"""
Scan all STX source layers to get all buildable packages
Params: None
Return:
List of all STX buildable packages
"""
pkgs = []
projects_root = os.environ.get('MY_REPO')
for root, dirs, files in os.walk(projects_root):
if dirs:
pass
for r in files:
if r == 'debian_pkg_dirs':
pkgs_file = os.path.join(root, r)
pkgs.extend(bc_safe_fetch(pkgs_file, pkgdirs_entry_handler))
# Remove duplication
return list(set(pkgs))
def fetch_debian_folder(package):
for layer in STX_SOURCE_REPOS:
pkg_dir_file = os.path.join(STX_ROOT, 'cgcs-root/stx', layer,
'debian_pkg_dirs')
if not os.path.exists(pkg_dir_file):
logger.warning('debian_pkg_dirs does not exist for layer %s, please check', layer)
continue
logger.debug(' '.join(['Fetching debian meta in', pkg_dir_file]))
try:
with open(pkg_dir_file, 'r') as fdir:
debs = list(line for line in (d.strip() for d in fdir) if line)
except IOError as e:
logger.error(str(e))
except Exception as e:
logger.error(str(e))
else:
for deb in debs:
deb = deb.strip()
if deb.startswith('#'):
continue
if os.path.basename(deb) == package:
msg = ' '.join(['Meta of', package, 'in', deb])
logger.debug(msg)
return os.path.join(STX_ROOT, 'cgcs-root/stx', layer, deb)
return None
def get_package_jobs(package):
'''
Returns the number of parallel jobs of the package
If the serial build is not enabled by the meta file,
the default number of jobs is equal to the value of
environment variable MAX_CPUS.
'''
jobs = os.environ.get('MAX_CPUS', 1)
pkg_dir = fetch_debian_folder(package)
if pkg_dir:
pkg_meta_yaml = os.path.join(pkg_dir, 'debian/meta_data.yaml')
try:
with open(pkg_meta_yaml) as f:
yaml_doc = yaml.safe_load(f)
except Exception as e:
logger.error(str(e))
else:
# serial: true [Disable parallel build]
# No 'serial:' or 'serial: false' [Support parallel build]
if yaml_doc.get('serial'):
jobs = 1
logger.debug('Requires the number of jobs %s for %s', jobs, package)
return jobs
class BuildController():
"""
builderClient helps to create or refresh the debian build recipes
(.dsc, *.tar) based on the stx source, then it offloads the build
task to the container 'pkgbuilder' with customer's build options
The build log will be displayed on console until getting the result
'Status: success': build ok
'Status: fail': build fail
'Status: give-back': try again later
"""
def __init__(self):
self.attrs = {
'mode': 'private',
'type': 'std',
'avoid': True,
'parallel': False,
'exit_on_fail': False,
'run_tests': False
}
self.kits = {
'dsc_cache': None,
'repo_mgr': None,
'dsc_maker': None
}
self.lists = {
'success': [],
'fail': [],
'build-needed': [],
'uploaded': []
}
self.pkgs_digests = {}
if not self.kits['repo_mgr']:
rlogger = logging.getLogger('repo_manager')
utils.set_logger(rlogger)
self.kits['repo_mgr'] = repo_manage.RepoMgr('aptly', REPOMGR_URL,
'/tmp', rlogger)
logger.debug("Successful created repo manager")
@property
def build_avoid(self):
return self.attrs['avoid']
@build_avoid.setter
def build_avoid(self, avoid):
self.attrs['avoid'] = avoid
def start(self):
if not self.kits['dsc_cache']:
pkl_file = os.path.join(BUILD_ROOT, self.attrs['type'], 'dsc.pkl')
self.kits['dsc_cache'] = dsccache.DscCache(logger, pkl_file)
if not self.kits['dsc_cache']:
logger.warning(' '.join(['Failed to create dsc cache',
pkl_file]))
if not self.kits['repo_mgr']:
logger.critical("Failed to create repo manager")
return False
self.kits['repo_mgr'].upload_pkg(REPO_BUILD, None)
self.kits['repo_mgr'].upload_pkg(REPO_SOURCE, None)
build_dir = os.path.join(BUILD_ROOT, self.attrs['type'])
os.makedirs(build_dir, exist_ok=True)
recipes_dir = os.path.join(BUILD_ROOT, 'recipes')
os.makedirs(recipes_dir, exist_ok=True)
if not self.kits['dsc_maker']:
try:
self.kits['dsc_maker'] = debrepack.Parser(build_dir,
recipes_dir, 'debug')
except Exception as e:
logger.error(str(e))
logger.error("Failed to create dsc maker")
return False
else:
logger.info("Successfully created dsc maker")
# load the persistent chroot on shared volume
req_chroots_action('loadchroot', None)
return True
def stop(self):
return self.show_build_stats()
def clean(self):
"""
Clean the build env includes cleaning all these build artifacts under
<path to>/std or <path to>/rt and empty the local build repo
"""
# clean build artifacts
build_dir = os.path.join(BUILD_ROOT, self.attrs['type'])
if os.path.exists(build_dir):
logger.debug(' '.join(['Cleaning the build directroy', build_dir]))
try:
shutil.rmtree(build_dir)
except Exception as e:
logger.error(str(e))
logger.error("Failed to clean of the build directory")
else:
logger.info("Finished cleaning of the build directory")
# clean build repo
if self.kits['repo_mgr']:
if not self.kits['repo_mgr'].remove_repo(REPO_BUILD):
logger.debug(' '.join(['Failed to clean', REPO_BUILD]))
else:
logger.debug(' '.join(['Successfully cleaned', REPO_BUILD]))
def add_chroot(self, mirror):
extra_req = {}
if mirror:
# Extra required data can be extended here, for example:
# req_param['mirror'] = "http://ftp.de.debian.org/debian"
# when 'addchroot'
extra_req['mirror'] = mirror
ret = req_chroots_action('addchroot', extra_req)
if 'success' in ret:
logger.debug('Chroot exists, ready to build')
return 'success'
if 'creating' in ret:
key_string = "Successfully set up bullseye chroot"
state = show_task_log(os.path.join(PKGBUILDER_ROOT, USER, PROJECT,
'chroot.log'),
10, key_string, None)
if 'success' in state:
req_chroots_action('savechroot', None)
ret = 'success'
else:
logger.error('Failed to add chroot, please consult the log')
ret = 'fail'
self.req_kill_task('chroot')
return ret
def upload_with_deb(self, package, debs_dir):
"""
upload the local build debian binaries to repo manager
Params:
package: target package name
debs_dir: the directory to debian binaries
"""
logger.debug(' '.join(['Remove all old version of debs for', package]))
debs_clue = os.path.join(os.environ.get('MY_BUILD_PKG_DIR'),
'debs_entry.pkl')
subdebs = debsentry.get_subdebs(debs_clue, package, logger)
if subdebs:
for deb in subdebs:
pkg_item = deb.split('_')
msg = ''.join(['package ', pkg_item[0], '(', pkg_item[1], ')'])
logger.info(' '.join(['Searching for binary', msg, 'in repository', REPO_BUILD]))
if self.kits['repo_mgr'].search_pkg(REPO_BUILD, pkg_item[0], None, True):
logger.info('Found binary %s in repository %s', msg, REPO_BUILD)
if self.kits['repo_mgr'].delete_pkg(REPO_BUILD, pkg_item[0], 'binary', None):
logger.info('Successfully deleted binary %s from repository %s',
msg, REPO_BUILD)
else:
logger.info('Failed to delete binary %s from repository %s', msg,
REPO_BUILD)
sdebs = []
if not os.path.exists(debs_dir):
logger.error(' '.join(['Noneexistent directory', debs_dir]))
return False
for root, dirs, files in os.walk(debs_dir):
if dirs:
pass
for r in files:
if r.endswith('.deb'):
deb_file = os.path.join(root, r)
if self.kits['repo_mgr'].upload_pkg(REPO_BUILD, deb_file):
logger.info(' '.join(['Successfully uploaded',
deb_file, 'to repository', REPO_BUILD]))
pkg_item = r.split('_')
sdebs.append(''.join([pkg_item[0], '_', pkg_item[1]]))
msg = ''.join([pkg_item[0], '_', pkg_item[1],
' is saved to debs_entry for ',
package])
logger.debug(msg)
else:
logger.error(' '.join(['Failed to upload', deb_file,
'to repository', REPO_BUILD]))
return False
debsentry.set_subdebs(debs_clue, package, sdebs, logger)
return True
def upload_with_dsc(self, deb, dsc, repo_name):
if not os.path.exists(dsc):
logger.error(' '.join(['Dsc file', dsc, 'does not exist']))
return False
dsc_pkg = os.path.basename(dsc).split('_')[0]
if deb != dsc_pkg:
logger.warning(''.join(['Package name passed in is ', deb,
', from dsc is ', dsc_pkg, ' ,did not match.']))
logger.info(' '.join(['Existing source for', dsc_pkg,
'will be deleted from repository', repo_name, 'before new source is uploaded']))
logger.info("Searching for %s in repository %s", dsc_pkg, repo_name)
if self.kits['repo_mgr'].search_pkg(repo_name, dsc_pkg, None, False):
logger.info("Found %s in repository %s, attempting to delete", dsc_pkg, repo_name)
if not self.kits['repo_mgr'].delete_pkg(repo_name, dsc_pkg, 'source'):
logger.error("Failed to delete source %s from repository %s", dsc_pkg, repo_name)
return False
logger.info("Successfully deleted source %s from repository %s", dsc_pkg, repo_name)
else:
logger.info("can't find %s in repository %s", dsc_pkg, repo_name)
logger.info(' '.join(['Start to upload source', dsc, 'to repository', repo_name]))
if not self.kits['repo_mgr'].upload_pkg(repo_name, dsc):
logger.error("Failed to upload source %s to repository %s", dsc, repo_name)
return False
logger.info("Successfully uploaded source %s to repository %s", dsc, repo_name)
return True
def req_add_task(self, package, dsc_path):
status = 'fail'
dsc = os.path.basename(dsc_path)
req_params = {}
req_params['mode'] = self.attrs['mode']
req_params['type'] = self.attrs['type']
req_params['project'] = PROJECT
req_params['user'] = USER
req_params['name'] = package
req_params['dsc'] = dsc
req_params['jobs'] = get_package_jobs(package)
req_params['run_tests'] = self.attrs['run_tests']
try:
resp = requests.get(BUILDER_URL + 'addtask', data=req_params)
resp.raise_for_status()
except requests.RequestException as e:
print(e)
else:
logger.debug(resp.text)
if 'success' in resp.text:
log = os.path.join(BUILD_ROOT, self.attrs['type'], package,
dsc.replace('.dsc', '_amd64.build'))
ret = show_task_log(log, 3, 'Status: successful',
'Finished at')
if 'success' in ret:
self.upload_with_deb(package, os.path.join(BUILD_ROOT,
self.attrs['type'], package))
self.req_kill_task('sbuild')
status = 'success'
return status
def req_kill_task(self, owner):
req_params = {}
req_params['owner'] = owner
req_params['user'] = USER
req_params['mode'] = self.attrs['mode']
try:
resp = requests.get(BUILDER_URL + 'killtask', data=req_params)
resp.raise_for_status()
except requests.RequestException as e:
print(e)
else:
logger.debug(resp.text)
def req_stop_task(self):
req_params = {}
req_params['user'] = USER
req_params['mode'] = self.attrs['mode']
try:
resp = requests.get(BUILDER_URL + 'stoptask', data=req_params)
resp.raise_for_status()
except requests.RequestException as e:
print(e)
else:
logger.debug(resp.text)
def create_dsc(self, package, pkg_meta):
"""
Call dsc maker(debrepack) to generate the new dsc for package
Params:
package: package name
pkg_meta: path to the package's debian folder
Return: result list like:
['dhcp-2.10.1.tis.dsc' 'dhcp-2.10.tar.xz' 'dhcp-2.10.tar.xz.orig']
"""
skip_build = False
# Check whether there are changes on package's debian folder
new_checksum = self.kits['dsc_maker'].checksum(pkg_meta)
self.pkgs_digests[package] = new_checksum
if self.kits['dsc_cache']:
old_checksum = self.kits['dsc_cache'].get_package_digest(package)
if old_checksum and old_checksum == new_checksum:
logger.info(' '.join(['No source meta changes of', package]))
skip_build = True
if self.attrs['avoid'] and skip_build:
self.lists['success'].append(package)
logger.info(' '.join(['Skip build', package, 'again']))
logger.info(' '.join(['Force to build, please use -c/--clean']))
return None
logger.debug(' '.join([pkg_meta, 'is ready to create dsc']))
pkgdir = os.path.join(BUILD_ROOT, self.attrs['type'], package)
if os.path.exists(pkgdir):
try:
shutil.rmtree(pkgdir)
except Exception as e:
logger.error(str(e))
else:
logger.debug(' '.join(['Successfully removed old', pkgdir]))
os.makedirs(pkgdir)
try:
src_mirror_dir = os.path.join(os.environ.get('STX_MIRROR'), 'sources')
dsc_recipes = self.kits['dsc_maker'].package(pkg_meta, src_mirror_dir)
except Exception as e:
logger.error(str(e))
return None
else:
if not dsc_recipes:
logger.error(' '.join(['Failed to create dsc for', package]))
return None
logger.debug(' '.join(['Successfully created dsc for', package]))
return dsc_recipes
def run_build_loop(self, pkgs_dsc):
build_dir = os.path.join(BUILD_ROOT, self.attrs['type'])
dsc_list_file = os.path.join(build_dir, 'dsc.lst')
deps_resolver = dsc_depend.Dsc_build_order(dsc_list_file, logger)
for p in range(len(pkgs_dsc)):
pkgs_can_build = deps_resolver.get_build_able_pkg(1)
if not pkgs_can_build:
logger.warning("Depends resolver returns none package")
return
for dsc in pkgs_can_build:
logger.info(' '.join(['Depends resolver tells to build',
os.path.basename(dsc)]))
package = get_pkgname_with_dsc(pkgs_dsc, dsc)
status = self.req_add_task(package, dsc)
if 'success' in status:
logger.info(' '.join(['Successfully built',
package]))
deps_resolver.pkg_accomplish(dsc)
self.lists['success'].append(package)
pkg_md5 = self.pkgs_digests[package]
self.kits['dsc_cache'].set_package_digest(package, pkg_md5)
else:
logger.info(' '.join(['Failed to build', package, str(p)]))
self.lists['fail'].append(package)
self.req_stop_task()
if self.attrs['exit_on_fail']:
return
logger.info("Build loop done, please check the stats")
def build_route(self, port, data):
if port == 'package':
self.build_packages(data)
if port == 'layer':
self.build_layers(data)
if build_port == 'all':
self.build_all()
def build_all(self):
packages = get_all_packages()
if packages:
total_pkgs = len(packages)
logger.debug(''.join(['All packages(', str(total_pkgs), '):',
','.join(packages)]))
self.build_packages(packages)
else:
logger.error('Failed to get all buildable packages')
def build_layers(self, layers):
if not layers:
logger.error('Failed to get layers')
return
# remove duplication
layers = list(set(layers))
for layer in layers:
if layer not in STX_LAYERS:
logger.error(' '.join([layer, 'is not a valid layer']))
else:
logger.info(' '.join(['Start to build all packages in layer',
layer]))
packages = get_pkgs_of_layer(layer)
if packages:
logger.debug(''.join([layer, ' need packages:',
','.join(packages)]))
self.build_packages(packages)
else:
logger.error(' '.join(['Failed to get packages for layer',
layer]))
logger.info(' '.join(['Finished building packages in layer',
layer]))
return
def build_packages(self, packages):
# remove duplication
packages = list(set(packages))
fdsc_file = None
packages_dscs = {}
self.lists['build-needed'] = packages
build_dir = os.path.join(BUILD_ROOT, self.attrs['type'])
os.makedirs(build_dir, exist_ok=True)
dscs_list_file = os.path.join(build_dir, 'dsc.lst')
logger.debug(' '.join(['Prepare', dscs_list_file, 'to deps_resolver']))
fdsc_file = open(dscs_list_file, 'w+')
fdsc_file.seek(0)
fdsc_file.truncate()
# Now check and create the debian meta one by one
for deb in packages:
dsc_file = ""
deb = deb.strip()
deb_meta_path = fetch_debian_folder(deb)
if not deb_meta_path:
logger.error(' '.join(['No debian meta found, skip', deb]))
continue
deb_recipes = self.create_dsc(deb, deb_meta_path)
if deb_recipes:
dsc_file = os.path.join(build_dir, deb, deb_recipes[0])
packages_dscs[deb.strip()] = dsc_file
fdsc_file.write(dsc_file + '\n')
if self.kits['repo_mgr']:
self.upload_with_dsc(deb, dsc_file, REPO_SOURCE)
else:
if self.attrs['exit_on_fail']:
if fdsc_file:
fdsc_file.close()
return
if fdsc_file:
fdsc_file.close()
# Start to build
if packages_dscs:
self.run_build_loop(packages_dscs)
else:
logger.info("No debian dsc files found")
def show_build_stats(self):
"""
Since all packages are put into self.lists['build-needed']
at the begining of building, we know how many
packages want to build
"""
ret_val = 0
logger.info("Total packages needing to be built: %d", len(self.lists['build-needed']))
success_number = len(self.lists['success'])
if success_number > 0:
logger.info("Successfully built: %d", success_number)
for deb in sorted(self.lists['success']):
logger.info(deb)
# failed_pkgs is the universal set of failed packages for various reasons
failed_pkgs = list(set(self.lists['build-needed']) - set(self.lists['success']))
failed_number = len(failed_pkgs)
if failed_number > 0:
ret_val = 1
logger.error("Failed to build: %d", failed_number)
for deb in sorted(failed_pkgs):
logger.error(deb)
# self.lists['fail'] is the subset of failed_pkgs
# particularly refer to those failed packages reported by pkgbuilder
if len(self.lists['fail']) > 0:
logger.info("List of failed packages:")
for deb in sorted(self.lists['fail']):
logger.error(deb)
logger.info("For the failure reason, you can check with:")
logger.info("\'cat /localdisk/builder.log | grep ERROR\' or")
logger.info("\'cat ${MY_WORKSPACE}/<std or rt>/<Failed package>/*.build\'")
return ret_val
def bc_signal_handler(signum, frame):
ret_val = 0
if not build_controller:
sys.exit(1)
if frame:
logger.debug(' '.join(['Signal', str(signum), 'got']))
logger.debug('Request to stop building tasks')
build_controller.req_stop_task()
ret_val = build_controller.show_build_stats()
logger.debug('Exit for user interruption')
sys.exit(ret_val)
def bc_reg_signal_handler():
signal.signal(signal.SIGINT, bc_signal_handler)
signal.signal(signal.SIGHUP, bc_signal_handler)
signal.signal(signal.SIGTERM, bc_signal_handler)
if __name__ == "__main__":
default_layer = 'distro'
build_port = 'all'
build_data = None
parser = argparse.ArgumentParser(description="build-pkgs helper")
parser.add_argument('-c', '--clean', help="Start a fresh building",
action='store_true')
parser.add_argument('-e', '--exit_on_fail', help="Exit for any fail",
action='store_true')
parser.add_argument('-t', '--test', help="Run package tests during build",
action='store_true')
# set mutually options pair for package build and layer build
build_group = parser.add_mutually_exclusive_group()
build_group.add_argument('-a', '--all', help="Packages with comma",
action='store_true')
build_group.add_argument('-l', '--layers', help="Layers with comma",
type=str)
build_group.add_argument('-p', '--packages', help="Packages with comma",
type=str)
args = parser.parse_args()
if args.packages:
build_port = 'package'
build_data = args.packages.strip().split(',')
else:
if args.layers:
build_port = 'layer'
build_data = args.layers.strip().split(',')
else:
if args.all:
build_port = 'all'
build_data = None
else:
logger.error("Please consult: build-pkgs --help")
sys.exit(1)
build_controller = BuildController()
if args.clean:
build_controller.build_avoid = False
if build_port == 'all':
build_controller.clean()
if args.exit_on_fail:
build_controller.attrs['exit_on_fail'] = True
if args.test:
build_controller.attrs['run_tests'] = True
if not build_controller.start():
logger.critical("Fail to initialize build controller, exit ......")
sys.exit(1)
bc_reg_signal_handler()
# mirror can be set to add_chroot as the main package repo
# e.g http://ftp.de.debian.org/debian
if build_controller.add_chroot(None) != 'success':
pkgbuilder_log = '/localdisk/pkgbuilder/pkgbuilder.log'
logger.error(' '.join(['Chroot is not ready, please check',
pkgbuilder_log]))
sys.exit(1)
build_controller.build_route(build_port, build_data)
ret_value = build_controller.stop()
logger.info("Build controller done")
sys.exit(ret_value)