add almalinux build scripts

This commit is contained in:
Trevor Batley 2024-07-05 09:44:42 +10:00
parent 60c8168468
commit 5a55ba4fec
6 changed files with 2179 additions and 0 deletions

View File

170
build_scripts/build.py Executable file
View File

@ -0,0 +1,170 @@
#!/usr/bin/env python3
# coding=utf-8
import argparse
import logging
import os
import subprocess
from pathlib import Path
from shutil import rmtree
from typing import (
Optional,
List,
)
PUNGI_RESULTS = 'pungi-results'
logging.basicConfig(level=logging.INFO)
def prepare_koji_env(
env_path: str,
local_mirror: str,
local_repos: List[str],
koji_excluded_packages: List[str],
):
koji_env_path = os.path.join(
env_path,
'koji',
)
logging.info(
'Update koji env in "%s"',
koji_env_path,
)
if os.path.exists(koji_env_path):
rmtree(koji_env_path)
os.makedirs(koji_env_path, exist_ok=True)
command = (
f'pungi-gather-rpms -p {local_mirror} -t {koji_env_path} '
f'-e={" ".join(koji_excluded_packages)}'
)
logging.info(command)
subprocess.check_call(
command,
shell=True,
)
if local_repos:
local_repos_paths = ' '.join(
path for local_repo in local_repos for
path in map(str, Path(local_mirror).glob(local_repo))
)
part_of_command = f'-rd {local_repos_paths}'
else:
part_of_command = f'-rp {local_mirror}'
command = f'pungi-gather-modules {part_of_command} -t {koji_env_path}'
logging.info(command)
subprocess.check_call(
command,
shell=True,
)
def run_build(
env_path: str,
pungi_label: str,
result_directory: Optional[str] = None,
):
logging.info('Run building of distribution')
pungi_config_name = 'pungi-build.conf'
command = f'pungi-koji --config {pungi_config_name} --label {pungi_label}'
if 'Beta-' in pungi_label:
command += ' --test'
else:
command += ' --production'
if result_directory is not None:
pungi_results_dir_full_path = os.path.join(
env_path,
PUNGI_RESULTS,
)
os.makedirs(pungi_results_dir_full_path, exist_ok=True)
result_dir_full_path = os.path.join(
pungi_results_dir_full_path,
result_directory,
)
command += f' --compose-dir {result_dir_full_path}'
else:
command += f' --target-dir {PUNGI_RESULTS} --no-latest-link'
logging.info(command)
subprocess.check_call(
command,
shell=True,
cwd=env_path,
)
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
'--env-path',
action='store',
help='A path to folder which will be used '
'for building new distribution',
required=True,
)
parser.add_argument(
'--local-mirror-path',
action='store',
help='A path to local mirror of repos',
required=True,
)
parser.add_argument(
'--local-repos',
action='store',
nargs='*',
default=[],
type=str,
help='List of the local repos in `--local-mirror-path`'
)
parser.add_argument(
'--pungi-label',
action='store',
help='A label of an build distribution',
required=True,
)
parser.add_argument(
'--result-directory',
action='store',
help='A path to store the result of building',
required=False,
default=None,
)
parser.add_argument(
'--koji-excluded-packages',
required=False,
nargs='*',
type=str,
default=[],
)
return parser
def cli_main():
args = create_parser().parse_args()
os.makedirs(
os.path.join(
args.env_path,
PUNGI_RESULTS,
),
exist_ok=True,
)
prepare_koji_env(
env_path=args.env_path,
local_mirror=args.local_mirror_path,
local_repos=args.local_repos,
koji_excluded_packages=args.koji_excluded_packages,
)
if args.result_directory is not None and \
os.path.exists(args.result_directory):
rmtree(args.result_directory)
run_build(
env_path=args.env_path,
pungi_label=args.pungi_label,
result_directory=args.result_directory,
)
if __name__ == '__main__':
cli_main()

79
build_scripts/cleanup.py Executable file
View File

@ -0,0 +1,79 @@
#!/usr/bin/env python3
# coding=utf-8
import argparse
import logging
import os
from pathlib import Path
from shutil import rmtree
PUNGI_RESULTS = 'pungi-results'
logging.basicConfig(level=logging.INFO)
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
'--env-path',
action='store',
help='A path to folder which will be used '
'for building new distribution',
required=True,
)
parser.add_argument(
'--keep-builds',
action='store',
help='An amount of kept old builds',
required=True,
type=int,
)
parser.add_argument(
'--excluded-dirs',
help='The list of excluded for deleting dirs',
required=False,
nargs='+',
type=str,
default=[],
)
return parser
def cli_main():
args = create_parser().parse_args()
pungi_results_path = os.path.join(
args.env_path,
PUNGI_RESULTS,
)
dirs_prefixes = [
'latest-',
'minimal_iso',
]
dirs_prefixes.extend(
args.excluded_dirs,
)
old_pungi_results = sorted(
filter(
lambda i: not any(
i.name.startswith(dir_prefix) for dir_prefix in dirs_prefixes
),
filter(
lambda i: i.is_dir(),
Path(pungi_results_path).iterdir()
)
),
key=os.path.getmtime,
)
if args.keep_builds:
old_pungi_results = old_pungi_results[:-args.keep_builds]
for old_pungi_result in old_pungi_results:
logging.info(
'Remove old build by path "%s"',
old_pungi_result,
)
rmtree(old_pungi_result)
if __name__ == '__main__':
cli_main()

595
build_scripts/jenkins.py Executable file
View File

@ -0,0 +1,595 @@
#!/usr/bin/env python3
# coding=utf-8
"""
This script should be inserted to jenkins job
"""
import argparse
import logging
import os
import signal
import subprocess
from distutils.util import strtobool
from pathlib import Path
from typing import (
List,
Union,
Optional,
)
import requests
logging.basicConfig(level=logging.INFO)
def signal_handler(signum, frame, process: subprocess.Popen):
logging.info(
'Processing signal "%s" in frame "%s"',
signal.Signals(signum),
frame,
)
if process.poll() is None:
process.send_signal(signum)
class Runner:
def __init__(
self,
working_root_directory: Path,
product_name: str,
distribution_major_version: int,
distribution_minor_version: int,
arch: str,
branch: str,
keep_builds: int,
use_products_repos: bool,
env_files: List[str],
beta_suffix: str,
sigkeys_fingerprints: List[str],
skip_mirroring: bool,
local_repos: List[str],
not_needed_variant: str,
pgp_sign_keyid: str,
git_url: str,
git_project: str,
git_type: str,
git_auth_token: str,
git_auth_username: str,
sign_service_username: str,
sign_service_password: str,
sign_service_endpoint: str,
koji_excluded_packages: List[str],
):
self.sign_service_username = sign_service_username
self.sign_service_password = sign_service_password
self.sign_service_endpoint = sign_service_endpoint
self.git_url = git_url
self.git_project = git_project
self.git_type = git_type
self.git_auth_username = git_auth_username
self.git_auth_token = git_auth_token
self.working_root_directory = working_root_directory
self.product_name = product_name
self.distribution_major_version = distribution_major_version
self.distribution_minor_version = distribution_minor_version
self.arch = arch
self.branch = branch
self.keep_builds = keep_builds
self.compose_dir = 'last_compose_dir'
self.use_products_repos = use_products_repos
self.env_files = env_files
self.beta_suffix = beta_suffix
self.sigkeys_fingerprints = sigkeys_fingerprints
self.skip_mirroring = skip_mirroring
self.local_repos = local_repos
self.not_needed_variant = not_needed_variant
self.pgp_sign_keyid = pgp_sign_keyid
self.repos_folder = working_root_directory.joinpath(
f'alma-{distribution_major_version}-{arch}'
)
self.koji_excluded_packages = koji_excluded_packages
self.build_scripts_path = working_root_directory.joinpath(
'pungi-scripts-public',
'build_scripts',
)
self.env_path = working_root_directory.joinpath(
f'{self.product_name}{self.distribution_major_version}{self.arch}'
)
self.koji_profile_name = (
f'{self.product_name.lower()}_{self.distribution_major_version}'
)
if self.beta_suffix:
self.pungi_label = (
f'Beta-{self.distribution_major_version}.'
f'{self.distribution_minor_version}'
)
else:
self.pungi_label = (
f'Update-{self.distribution_major_version}.'
f'{self.distribution_minor_version}'
)
self.final_repo_folders = ' '.join(self.get_variants(
arch=self.arch,
distribution_major_version=self.distribution_major_version,
distribution_minor_version=self.distribution_minor_version,
beta_suffix=self.beta_suffix,
))
self.pungi_configs_git_repo = (
'https://github.com/AlmaLinux/pungi-scripts-public.git'
)
self.compose_dir_full_path = self.env_path.joinpath(
'pungi-results',
self.compose_dir,
)
@staticmethod
def run_command(
command: str,
exit_or_not: bool = True,
raise_exception: bool = True,
use_sudo: bool = True
) -> None:
sudo_suffix = 'sudo' if use_sudo else ''
cmd_line = f"{sudo_suffix} bash -c \"{command}\""
logging.info(cmd_line)
process = subprocess.Popen(
cmd_line,
shell=True,
executable='/bin/bash',
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
)
for _signum in (signal.SIGTERM, signal.SIGINT):
signal.signal(
_signum,
lambda signum, frame: signal_handler(
signum=signum,
frame=frame,
process=process,
),
)
while process.poll() is None:
if process.stdout is not None:
realtime_stdout_line = process.stdout.readline().strip()
if realtime_stdout_line:
print(realtime_stdout_line, flush=True)
else:
if process.poll():
if exit_or_not:
exit(process.poll())
elif raise_exception:
raise subprocess.SubprocessError()
def cleanup(self):
command = f'cd {self.build_scripts_path} && /usr/bin/env ' \
f'python3 cleanup.py ' \
f'--env-path {self.env_path} ' \
f'--keep-builds {self.keep_builds} '
if self.compose_dir is not None:
command = f'{command} --excluded-dirs {self.compose_dir}'
self.run_command(command=command)
def enable_products_repos(self):
suffix = 'products_repos'
target = Path(f'{self.repos_folder}').joinpath(suffix)
source = Path(f'{self.repos_folder}-{suffix}')
if self.use_products_repos:
logging.info('Enable products repos')
if not target.is_symlink():
target.symlink_to(source)
elif target.exists() and target.is_symlink():
logging.info('Disable products repos')
target.unlink(missing_ok=True)
def prepare(self):
if not self.skip_mirroring:
command = (
f'cd {self.build_scripts_path} && /usr/bin/env '
f'python3 prepare.py '
f'--env-path {self.env_path} '
f'dnf_reposync_synchronize '
f'{"--use-products-repos " if self.use_products_repos else ""}'
f'--mirroring-target {self.repos_folder} '
f'--product-name {self.product_name} '
f'--arch {self.arch} '
'--distribution-major-version '
f'{self.distribution_major_version}'
)
self.run_command(command=command)
self.enable_products_repos()
command = (
f'cd {self.build_scripts_path} && /usr/bin/env '
f'python3 prepare.py '
f'--env-path {self.env_path} '
f'add_env_files '
f'--env-files {" ".join(self.env_files)}'
)
self.run_command(command=command)
command = (
f'cd {self.build_scripts_path} && /usr/bin/env '
f'python3 prepare.py '
f'--env-path {self.env_path} '
f'add_koji_profile '
f'--koji-profile-name {self.koji_profile_name}'
)
self.run_command(command=command)
command = (
f'cd {self.build_scripts_path} && /usr/bin/env '
f'python3 prepare.py '
f'--env-path {self.env_path} '
f'prepare_build_conf '
f'--product-name {self.product_name} '
f'--arch {self.arch} '
f'--distribution-major-version {self.distribution_major_version} '
f'--distribution-minor-version {self.distribution_minor_version} '
f'--beta-suffix={self.beta_suffix} '
f'--sigkeys-fingerprints {" ".join(self.sigkeys_fingerprints)} '
f'--git-url {self.git_url} '
f'--git-project {self.git_project} '
f'--git-type {self.git_type} '
f'--git-auth-token {self.git_auth_token} '
f'--git-auth-username {self.git_auth_username}'
)
self.run_command(command=command)
def build(self):
# remove an old compose dir
command = (
f'[[ -d "{self.compose_dir_full_path}" ]] && '
f'rm -rf {self.compose_dir_full_path}'
)
self.run_command(command, exit_or_not=False, raise_exception=False)
command = (
f'cd {self.build_scripts_path} && /usr/bin/env '
f'python3 build.py '
f'--env-path {self.env_path} '
f'--local-mirror-path {self.repos_folder} '
f'--pungi-label {self.pungi_label} '
f'--result-directory {self.compose_dir} '
f'--local-repos {" ".join(self.local_repos)} '
f'--koji-excluded-packages {" ".join(self.koji_excluded_packages)}'
)
self.run_command(command=command)
def post(self):
command = (
f'cd {self.build_scripts_path} && /usr/bin/env '
f'python3 post_actions.py '
f'--env-path {self.env_path} '
f'--arch {self.arch} '
f'--source-repos-folder {self.repos_folder} '
f'--repos {self.final_repo_folders} '
f'--not-needed-repos {self.not_needed_variant} '
)
if self.pgp_sign_keyid:
command += f'--pgp-sign-keyid={self.pgp_sign_keyid} '
if self.sign_service_username:
f'--sign-service-username={self.sign_service_username} '
if self.sign_service_password:
f'--sign-service-password={self.sign_service_password} '
if self.sign_service_endpoint:
f'--sign-service-endpoint={self.sign_service_endpoint} '
self.run_command(command=command)
@staticmethod
def get_variants(
arch: str,
distribution_major_version: int,
distribution_minor_version: int,
beta_suffix: str,
) -> List[str]:
url = (
'https://git.almalinux.org/almalinux/pungi-almalinux/raw/branch/'
f'a{distribution_major_version}.{distribution_minor_version}'
f'{beta_suffix}/{arch}/variants_options.json'
)
response = requests.get(url)
response.raise_for_status()
variants_data = response.json() # type: dict
return list(variants_data.keys())
def checkout_scripts(self):
pungi_configs_folder = self.working_root_directory.joinpath(
'pungi-scripts-public'
)
try:
command = f'ls {pungi_configs_folder}'
self.run_command(command=command, exit_or_not=False)
command = (
f'cd {pungi_configs_folder} && '
'rm -rf ./* && git checkout -- . && '
'git fetch && git clean -f && '
f'git checkout -B {self.branch} origin/{self.branch}'
)
self.run_command(command=command)
except subprocess.SubprocessError:
command = (
f'cd {self.working_root_directory} && '
f'git clone "{self.pungi_configs_git_repo}" '
f'&& cd {pungi_configs_folder} '
f'&& git checkout -B {self.branch} origin/{self.branch}'
)
self.run_command(command=command)
class StoreAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
if values is None or not values:
raise argparse.ArgumentError(
self,
f'Invalid value: {values}',
)
setattr(namespace, self.dest, values)
def get_env_var(
key: str,
default: Optional[Union[str, int, List]] = None,
is_bool: bool = False,
is_multiline: bool = False,
) -> Union[str, bool, int, List[str]]:
result = os.environ.get(key.lower()) or os.environ.get(key.upper())
result = result or default
if is_bool:
result = strtobool(result)
if is_multiline:
result = list(filter(None, result.split('\n')))
return result
def create_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser()
parser.add_argument(
'--working-root-directory',
action=StoreAction,
default=get_env_var(key='remote_home_dir'),
type=Path,
)
parser.add_argument(
'--pgp-sign-keyid',
action=StoreAction,
default=get_env_var(
key='pgp_sign_keyid',
default='',
),
type=str,
)
parser.add_argument(
'--arch',
action=StoreAction,
default=get_env_var(key='arch'),
type=str,
)
parser.add_argument(
'--product-name',
action=StoreAction,
default=get_env_var(key='product_name'),
type=str,
)
parser.add_argument(
'--distribution-major-version',
action=StoreAction,
default=get_env_var(key='distribution_major_version'),
type=int,
)
parser.add_argument(
'--distribution-minor-version',
action=StoreAction,
default=get_env_var(key='distribution_minor_version'),
type=int,
)
parser.add_argument(
'--beta-suffix',
action='store',
default=get_env_var(key='beta_suffix', default=''),
type=str,
)
parser.add_argument(
'--not-needed-variant',
action=StoreAction,
default=get_env_var(key='not_needed_variant', default='Minimal'),
type=str,
)
parser.add_argument(
'--keep-builds',
action=StoreAction,
type=int,
default=get_env_var(key='keep_builds', default=1),
)
parser.add_argument(
'--env-files',
action='store',
type=List[str],
default=get_env_var(
key='add_env_files',
default='',
is_multiline=True,
),
nargs='+',
)
parser.add_argument(
'--local-repos',
action='store',
type=List[str],
default=get_env_var(
key='local_repos',
default='',
is_multiline=True,
),
nargs='+',
)
parser.add_argument(
'--sigkeys-fingerprints',
action=StoreAction,
default=get_env_var(
key='sigkeys_fingerprints',
default='',
is_multiline=True,
),
nargs='+',
)
parser.add_argument(
'--skip-mirroring',
action='store_true',
default=get_env_var(
key='skip_mirroring',
default='False',
is_bool=True,
),
)
parser.add_argument(
'--use-products-repos',
action='store_true',
default=get_env_var(
key='use_products_repos',
default='False',
is_bool=True,
),
)
parser.add_argument(
'--git-auth-token',
action='store',
type=str,
default=get_env_var(key='git_auth_token'),
)
parser.add_argument(
'--git-auth-username',
action='store',
type=str,
default=get_env_var(key='git_auth_username'),
)
parser.add_argument(
'--git-url',
action='store',
type=str,
default=get_env_var(
key='git_storage_url',
default='git.almalinux.org',
),
)
parser.add_argument(
'--git-project',
action='store',
type=str,
default=get_env_var(
key='git_project',
default='almalinux/pungi-almalinux',
),
)
parser.add_argument(
'--git-type',
action='store',
type=str,
default=get_env_var(key='git_type', default='gitea'),
)
parser.add_argument(
'--sign-service-username',
action='store',
help='An username of a sign service',
default=get_env_var(
key='sign_service_username',
default='',
),
)
parser.add_argument(
'--sign-service-password',
action='store',
help='A password of a sign service',
default=get_env_var(
key='sign_service_password',
default='',
),
)
parser.add_argument(
'--sign-service-endpoint',
action='store',
help='An endpoint of a sign service',
default=get_env_var(
key='sign_service_endpoint',
default='',
),
)
parser.add_argument(
'-e',
'--koji-excluded-packages',
required=False,
nargs='+',
type=List[str],
default=get_env_var(
key='koji_excluded_packages',
default='',
is_multiline=True,
),
)
parser.add_argument(
'--branch',
action=StoreAction,
type=str,
default=get_env_var(key='branch', default='master'),
)
return parser
def main():
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
sensitive_arguments = (
'git_auth_token',
'sign_service_password',
)
args = create_parser().parse_args()
logging.info('All CLI arguments:')
for arg_name, arg_value in vars(args).items():
if arg_name in sensitive_arguments:
continue
logging.info('%s: %s', arg_name, arg_value)
runner = Runner(
working_root_directory=args.working_root_directory,
product_name=args.product_name,
distribution_major_version=args.distribution_major_version,
distribution_minor_version=args.distribution_minor_version,
arch=args.arch,
branch=args.branch,
keep_builds=args.keep_builds,
use_products_repos=args.use_products_repos,
env_files=args.env_files,
beta_suffix=args.beta_suffix,
sigkeys_fingerprints=args.sigkeys_fingerprints,
skip_mirroring=args.skip_mirroring,
local_repos=args.local_repos,
not_needed_variant=args.not_needed_variant,
pgp_sign_keyid=args.pgp_sign_keyid,
git_url=args.git_url,
git_project=args.git_project,
git_auth_token=args.git_auth_token,
git_auth_username=args.git_auth_username,
git_type=args.git_type,
sign_service_username=args.sign_service_username,
sign_service_password=args.sign_service_password,
sign_service_endpoint=args.sign_service_endpoint,
koji_excluded_packages=args.koji_excluded_packages,
)
try:
runner.checkout_scripts()
runner.prepare()
runner.build()
runner.post()
finally:
runner.cleanup()
if __name__ == '__main__':
main()

View File

@ -0,0 +1,798 @@
#!/usr/bin/env python3
# coding=utf-8
import json
import os
import logging
from collections import defaultdict
from shutil import (
rmtree,
copytree,
copy,
)
import argparse
import requests
from subprocess import check_call
from configparser import ConfigParser
from time import time
from typing import (
List,
Optional,
Dict,
)
from pathlib import Path
from urllib.parse import urljoin
from requests.auth import AuthBase
PUNGI_RESULTS = 'pungi-results'
KICKSTART_REPO = 'BaseOS'
INCLUDED_TO_KICKSTART_REPO = 'AppStream'
PATH_DICTIONARY = defaultdict(lambda: None, **{
'debug': 'debug/{arch}',
'source': 'Source',
'source_packages': 'Source/Packages',
'source_repository': 'Source',
'source_tree': 'Source',
'debug_packages': 'debug/{arch}/Packages',
'debug_repository': 'debug/{arch}',
'debug_tree': 'debug/{arch}',
}) # type: Dict[str ,Optional[str]]
logging.basicConfig(level=logging.INFO)
class BearerAuth(AuthBase):
def __init__(self, token):
self.token = token
def __call__(self, request):
request.headers["Authorization"] = f'Bearer {self.token}'
return request
class Signer:
__session__ = None # type: Optional[requests.Session]
def get_token(self, username, password) -> str:
data = {
'email': username,
'password': password
}
response = requests.post(
url=urljoin(self.endpoint + '/', 'token'),
json=data,
)
response.raise_for_status()
return response.json()['token']
def __init__(self, username, password, endpoint):
self.endpoint = endpoint
self.token = self.get_token(
username=username,
password=password,
)
def sign(
self,
file_path: Path,
keyid: str,
sign_type: str = 'detach-sign',
) -> Path:
auth = BearerAuth(token=self.token)
params = {
'keyid': keyid,
'sign_type': sign_type,
}
files = {
'file': file_path.open('rb'),
}
response = requests.post(
url=urljoin(self.endpoint + '/', 'sign'),
params=params,
files=files,
auth=auth,
)
response.raise_for_status()
if sign_type == 'detach-sign':
file_path = file_path.with_suffix(file_path.suffix + '.asc')
with file_path.open('w') as fd:
fd.write(response.text)
return file_path
@staticmethod
def verify(file_path: Path):
if file_path.suffix == '.asc':
command = f'gpg --verify {file_path} {file_path.with_suffix("")}'
else:
command = f'gpg --verify {file_path}'
check_call(
command,
shell=True,
universal_newlines=True,
)
def move_sources_folder_to_right_place(
latest_path: Path,
repo_name: str,
):
src_of_sources = latest_path.joinpath(repo_name, 'source')
dst_of_sources = latest_path.joinpath(repo_name, PATH_DICTIONARY['source'])
if src_of_sources.exists():
logging.info(
'Move sources to right place for result '
'dir "%s" and repo "%s"',
latest_path,
repo_name,
)
src_of_sources.joinpath('tree').rename(dst_of_sources)
rmtree(src_of_sources)
def move_debug_folder_to_right_place(
latest_path: Path,
repo_name: str,
arch: str,
):
src_of_debug = latest_path.joinpath(repo_name, arch, 'debug')
dst_of_debug = latest_path.joinpath(
repo_name,
PATH_DICTIONARY['debug'].format(arch=arch),
)
if src_of_debug.exists():
logging.info(
'Move a folder with debug rpms to right places for '
'result dir "%s", arch "%s" and repo "%s"',
latest_path,
arch,
repo_name,
)
os.makedirs(dst_of_debug.parent, exist_ok=True)
src_of_debug.joinpath('tree').rename(dst_of_debug)
rmtree(src_of_debug)
def copy_updateinfo_from_platform_repos(
src_repos_path: Path,
latest_path: Path,
repo_name: str,
arch: str,
):
dst_path = latest_path.joinpath(repo_name, arch, 'os')
if not dst_path.exists():
return
for path in src_repos_path.glob(
f'platform-almalinux-[0-9]-{repo_name.lower()}-'
f'{arch}/repodata/*updateinfo*'
):
dst_file = dst_path.joinpath('repodata', path.name)
if dst_file.exists():
return
logging.info('Copy updateinfo.xml for repo "%s"', repo_name)
copy(
path,
dst_file,
)
dst_repodata_path = dst_path.joinpath('repodata')
logging.info('Modify repo "%s" with updateinfo.xml', repo_name)
check_call(
f'modifyrepo_c --mdtype=updateinfo {dst_file} {dst_repodata_path}',
shell=True,
)
return
logging.warning(
'Updateinfo.xml for repo "%s" does not exist',
repo_name,
)
def sign_repomd_xml(
latest_path: Path,
repo_name: str,
arch: str,
pgp_keyid: str,
username: str,
password: str,
endpoint: str,
):
repomd_xml_path_suffix = 'repodata/repomd.xml'
os_repomd_xml_path = latest_path.joinpath(
repo_name,
arch,
'os',
repomd_xml_path_suffix,
)
kickstart_repomd_xml_path = latest_path.joinpath(
repo_name,
arch,
'kickstart',
repomd_xml_path_suffix,
)
source_repomd_xml_path = latest_path.joinpath(
repo_name,
PATH_DICTIONARY['source'],
repomd_xml_path_suffix,
)
debug_repomd_xml_path = latest_path.joinpath(
repo_name,
PATH_DICTIONARY['debug'].format(arch=arch),
repomd_xml_path_suffix
)
logging.info(
'Sign repomd.xml files for "%s" and verify signatures',
repo_name,
)
for repomd_xml_path in (
os_repomd_xml_path,
kickstart_repomd_xml_path,
source_repomd_xml_path,
debug_repomd_xml_path,
):
if not repomd_xml_path.exists():
continue
signer = Signer(
username=username,
password=password,
endpoint=endpoint,
)
file_path = signer.sign(
file_path=repomd_xml_path,
keyid=pgp_keyid,
)
signer.verify(file_path=file_path)
def create_kickstart_folder(
latest_path: Path,
repo_name: str,
arch: str,
):
src_kickstart = latest_path.joinpath(repo_name, arch, 'os')
dst_kickstart = latest_path.joinpath(repo_name, arch, 'kickstart')
if src_kickstart.exists() and not dst_kickstart.exists():
logging.info(
'Make kickstart repo for result dir "%s", '
'repo "%s" and arch "%s"',
latest_path,
repo_name,
arch,
)
copytree(
src_kickstart,
dst_kickstart,
copy_function=os.link,
)
logging.info(
'Copy repodata for a kickstart without using hardlinks'
)
repodata_dst_path = dst_kickstart.joinpath('repodata')
repodata_src_path = src_kickstart.joinpath('repodata')
rmtree(repodata_dst_path)
copytree(
repodata_src_path,
repodata_dst_path,
)
def update_timestamp_in_treeinfo(
tree_info_path: Path,
timestamp: int,
):
replaced_values = {
'general': 'timestamp',
'tree': 'build_timestamp',
}
if not tree_info_path.exists():
return
tree_info_config = ConfigParser()
tree_info_config.read(tree_info_path)
for section, key in replaced_values.items():
tree_info_config.set(
section=section,
option=key,
value=str(timestamp),
)
logging.info(
'Update timestamp "%s" in .treeinfo for "%s"',
timestamp,
tree_info_path,
)
with open(tree_info_path, 'w') as tree_info_fp:
tree_info_config.write(tree_info_fp)
def update_kickstart_treeinfo_file(
tree_info_path: str,
):
replaced_values = {
'packages': None,
'repository': None,
}
tree_info_config = ConfigParser()
tree_info_config.read(tree_info_path)
logging.info(
'Update .treeinfo file "%s": replace path-suffix `os` by `kickstart`',
tree_info_path,
)
section_name = f'variant-{INCLUDED_TO_KICKSTART_REPO}'
for key in replaced_values:
if section_name not in tree_info_config.sections():
continue
replaced_values[key] = tree_info_config.get(
section=section_name,
option=key
).replace('os', 'kickstart')
for key, value in replaced_values.items():
if section_name not in tree_info_config.sections():
continue
tree_info_config.set(
section=section_name,
option=key,
value=value,
)
# because it's hardlink and could be modified
# both files (in dirs `os` and `kickstart`)
os.remove(tree_info_path)
with open(tree_info_path, 'w') as tree_info_fp:
tree_info_config.write(tree_info_fp)
def sign_isos_checksum(
latest_path: Path,
arch: str,
pgp_keyid: str,
username: str,
password: str,
endpoint: str,
):
logging.info('Sign ISOs CHECKSUM and verify signature')
checksum_path = latest_path.joinpath('isos', arch, 'CHECKSUM')
if not checksum_path.exists():
logging.warning('File CHECKSUM is absent')
return
signer = Signer(
username=username,
password=password,
endpoint=endpoint,
)
file_path = signer.sign(
file_path=checksum_path,
keyid=pgp_keyid,
sign_type='clear-sign',
)
signer.verify(file_path=file_path)
def post_processing_images_json_metadata(
latest_path: Path,
arch: str,
):
images_json_path = latest_path.joinpath(
'metadata', arch, 'images.json'
)
logging.info('Post-processing images.json')
if not images_json_path.exists():
logging.warning('images.json is absent')
return
with open(images_json_path, 'r') as images_metadata_fd:
content = json.load(images_metadata_fd)
images = content['payload']['images']
for variant in images:
variant_data = images[variant] # type: Dict[str, List[Dict]]
for arch, images_list in variant_data.items():
variant_data[arch] = [
dict(
image,
**{
'path': str(Path('isos').joinpath(
arch,
Path(image['path']).name,
))
}
) for image in images_list
]
with open(images_json_path, 'w') as images_metadata_fd:
json.dump(
content,
images_metadata_fd,
indent=4,
)
def post_processing_rpms_json_metadata(
latest_path: Path,
arch: str,
):
rpms_json_path = latest_path.joinpath(
'metadata', arch, 'rpms.json'
)
logging.info('Post-processing rpms.json')
if not rpms_json_path.exists():
logging.warning('rpms.json is absent')
return
with open(rpms_json_path, 'r') as rpms_metadata_fd:
content = json.load(rpms_metadata_fd)
rpms = content['payload']['rpms']
for variant in rpms:
variant_data = rpms[variant]
if variant == 'Minimal':
continue
for arch in variant_data:
arch_data = variant_data[arch]
for srpm in arch_data:
srpm_data = arch_data[srpm]
for artifact in srpm_data:
artifact_data = srpm_data[artifact]
path_suffix = PATH_DICTIONARY[
artifact_data['category']
] # type: Optional[str]
if path_suffix is None:
continue
else:
path_suffix = path_suffix.format(arch=arch)
artifact_path = Path(artifact_data['path'])
artifact_data['path'] = str(Path(variant).joinpath(
path_suffix,
artifact_path.parent.name,
artifact_path.name,
))
with open(rpms_json_path, 'w') as rpms_metadata_fd:
if 'Minimal' in content['payload']['rpms']:
del content['payload']['rpms']['Minimal']
json.dump(
content,
rpms_metadata_fd,
indent=4,
)
def post_processing_compose_info_json_metadata(
latest_path: Path,
arch: str,
):
composeinfo_json_path = latest_path.joinpath(
'metadata', arch, 'composeinfo.json'
)
logging.info('Post-processing composeinfo.json')
if not composeinfo_json_path.exists():
logging.warning('composeinfo.json is absent')
return
with open(composeinfo_json_path, 'r') as composeinfo_metadata_fd:
content = json.load(composeinfo_metadata_fd)
variants = content['payload']['variants']
for variant in variants:
variant_paths = variants[variant]['paths']
if variant == 'Minimal':
continue
for path_type in variant_paths:
path_data = variant_paths[path_type] # type: Dict
for arch, path in path_data.items():
path_suffix = PATH_DICTIONARY[
path_type] # type: Optional[str]
if path_suffix is None:
continue
else:
path_suffix = path_suffix.format(arch=arch)
path = Path(variant).joinpath(path_suffix)
path_data[arch] = str(path)
with open(composeinfo_json_path, 'w') as composeinfo_metadata_fd:
if 'Minimal' in content['payload']['variants']:
del content['payload']['variants']['Minimal']
json.dump(
content,
composeinfo_metadata_fd,
indent=4,
)
def move_json_metadata_to_arch_folder(
latest_path: Path,
arch: str,
):
extension = '*.json'
metadata_path = latest_path.joinpath('metadata')
metadata_arch_path = metadata_path.joinpath(arch)
os.makedirs(metadata_arch_path, exist_ok=True)
for json_metadata in Path(metadata_path).glob(extension):
logging.info(
'Copy "%s" to arch directory "%s"',
json_metadata,
metadata_arch_path,
)
json_metadata.rename(metadata_arch_path.joinpath(json_metadata.name))
def move_iso_and_its_artifacts_to_isos_arch_folder(
src_latest_path: Path,
repo_name: str,
arch: str,
exts_of_files: List[str],
dst_latest_path: Optional[Path] = None,
):
src_iso_folder_path = src_latest_path.joinpath(repo_name, arch, 'iso')
isos_arch_folder = (dst_latest_path or src_latest_path).joinpath(
'isos',
arch,
)
os.makedirs(isos_arch_folder, exist_ok=True)
if not src_iso_folder_path.exists():
return
for ext_of_file in exts_of_files:
for src_file_path in src_iso_folder_path.glob(ext_of_file):
dst_file_path = isos_arch_folder.joinpath(src_file_path.name)
if not src_file_path.exists():
continue
if dst_file_path.exists():
continue
logging.info(
'Move iso or iso\'s artifacts from "%s" to "%s"',
src_file_path,
dst_file_path,
)
src_file_path.rename(dst_file_path)
src_checksum_path = src_latest_path.joinpath(
repo_name,
arch,
'iso',
'CHECKSUM',
)
dst_checksum_path = isos_arch_folder.joinpath('CHECKSUM')
if src_checksum_path.exists():
logging.info(
'Write CHEKSUM from "%s" to "%s"',
src_checksum_path,
dst_checksum_path,
)
with open(src_checksum_path, 'r') as src_checksum_file, \
open(dst_checksum_path, 'a+') as dst_checksum_path:
src_checksum_content = src_checksum_file.read()
dst_checksum_path.write(src_checksum_content)
rmtree(src_iso_folder_path)
def rename_latest_dir(
latest_path: Path,
):
old_real_name = Path(os.readlink(latest_path.parent)).name
if not old_real_name.startswith('last_compose_dir'):
return
old_real_name_path = latest_path.parent.parent.joinpath(old_real_name)
new_real_name = f'{int(time())}-{old_real_name}'
new_real_name_path = latest_path.parent.parent.joinpath(new_real_name)
logging.info('New real name path %s', new_real_name_path)
logging.info('Old real name path %s', old_real_name_path)
if not old_real_name_path.exists():
return
logging.info(
'Add the timestamp to name of a '
'real directory with latest result: "%s"',
new_real_name_path,
)
old_real_name_path.rename(new_real_name_path)
os.unlink(latest_path.parent)
os.symlink(new_real_name_path, latest_path.parent)
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
'--env-path',
action='store',
help='A path to folder which will be used '
'for building new distribution',
required=True,
)
parser.add_argument(
'--sign-service-username',
action='store',
help='An username of a sign service',
default=None,
required=False,
)
parser.add_argument(
'--sign-service-password',
action='store',
help='A password of a sign service',
default=None,
required=False,
)
parser.add_argument(
'--sign-service-endpoint',
action='store',
help='An endpoint of a sign service',
default=None,
required=False,
)
parser.add_argument(
'--pgp-sign-keyid',
action='store',
help='PGP sign key ID. Used for signing building artifacts',
required=False,
default=None,
)
parser.add_argument(
'--middle-result-directory',
action='store',
help='A directory with middle result. '
'E.g. a directory contains Minimal iso',
default=None,
required=False,
)
parser.add_argument(
'--arch',
type=str,
help='Architecture of a distribution',
required=True,
)
parser.add_argument(
'--source-repos-folder',
type=str,
help='Path to folder there are stored source repos',
required=True,
)
parser.add_argument(
'--repos',
nargs='+',
type=str,
help='A list of repositories are contained in distribution',
required=True,
)
parser.add_argument(
'--middle-repos',
nargs='+',
type=str,
help='A list of repositories from a middle result '
'which will be used for getting ISOs',
required=False,
default=[],
)
parser.add_argument(
'--not-needed-repos',
nargs='*',
type=str,
help='A list of repositories which are not needed, e.g. Minimal',
required=False,
default=[],
)
return parser
def cli_main():
args = create_parser().parse_args()
pungi_results = Path(args.env_path).joinpath(PUNGI_RESULTS)
latest_result_paths = pungi_results.glob('latest-*')
logging.info(
'We have the following latest results "%s"',
latest_result_paths,
)
extensions_of_files = ['*.iso', '*.manifest']
for latest_path in latest_result_paths:
latest_path = latest_path.joinpath('compose')
build_timestamp = int(time())
for repo in args.repos:
if not latest_path.joinpath(repo).exists():
continue
if repo in args.middle_repos:
not_needed_repo_path = latest_path.joinpath(repo)
if not_needed_repo_path.exists():
rmtree(not_needed_repo_path)
continue
move_sources_folder_to_right_place(
latest_path=latest_path,
repo_name=repo,
)
move_debug_folder_to_right_place(
latest_path=latest_path,
repo_name=repo,
arch=args.arch,
)
copy_updateinfo_from_platform_repos(
src_repos_path=Path(args.source_repos_folder),
latest_path=latest_path,
repo_name=repo,
arch=args.arch,
)
create_kickstart_folder(
latest_path=latest_path,
repo_name=repo,
arch=args.arch,
)
if all(opt is not None for opt in (
args.sign_service_username,
args.sign_service_password,
args.sign_service_endpoint
)):
sign_repomd_xml(
latest_path=latest_path,
repo_name=repo,
arch=args.arch,
username=args.sign_service_username,
password=args.sign_service_password,
endpoint=args.sign_service_endpoint,
pgp_keyid=args.pgp_sign_keyid,
)
move_iso_and_its_artifacts_to_isos_arch_folder(
src_latest_path=latest_path,
repo_name=repo,
arch=args.arch,
exts_of_files=extensions_of_files,
)
repo_path = latest_path.joinpath(repo)
for path in Path(repo_path).rglob('.treeinfo'):
update_timestamp_in_treeinfo(
tree_info_path=path,
timestamp=build_timestamp,
)
if path.parent.name == 'kickstart':
update_kickstart_treeinfo_file(
tree_info_path=str(path),
)
move_json_metadata_to_arch_folder(
latest_path=latest_path,
arch=args.arch,
)
post_processing_compose_info_json_metadata(
latest_path=latest_path,
arch=args.arch,
)
post_processing_rpms_json_metadata(
latest_path=latest_path,
arch=args.arch,
)
post_processing_images_json_metadata(
latest_path=latest_path,
arch=args.arch,
)
if all(opt is not None for opt in (
args.sign_service_username,
args.sign_service_password,
args.sign_service_endpoint
)):
sign_isos_checksum(
latest_path=latest_path,
arch=args.arch,
username=args.sign_service_username,
password=args.sign_service_password,
endpoint=args.sign_service_endpoint,
pgp_keyid=args.pgp_sign_keyid,
)
for repo in args.middle_repos:
if args.middle_result_directory is not None:
move_iso_and_its_artifacts_to_isos_arch_folder(
src_latest_path=Path(
args.middle_result_directory,
).joinpath('compose'),
repo_name=repo,
arch=args.arch,
exts_of_files=extensions_of_files,
dst_latest_path=latest_path,
)
for repo in args.not_needed_repos:
not_needed_repo_path = latest_path.joinpath(repo)
if not_needed_repo_path.exists():
logging.info(
'Remove not needed variant "%s" by path "%s"',
repo,
not_needed_repo_path,
)
rmtree(not_needed_repo_path)
rename_latest_dir(latest_path=latest_path)
if args.middle_result_directory is not None and \
Path(args.middle_result_directory).exists():
rmtree(args.middle_result_directory)
if __name__ == '__main__':
cli_main()

537
build_scripts/prepare.py Executable file
View File

@ -0,0 +1,537 @@
#!/usr/bin/env python3
# coding=utf-8
import argparse
import base64
import json
import logging
import os
import subprocess
from configparser import ConfigParser
from pathlib import Path
from typing import (
List,
Dict, Optional,
)
import jinja2
import requests
from urllib.parse import quote, quote_plus
from requests.auth import HTTPBasicAuth
VARIANTS_GENERATOR_FOLDER = 'variants-xml-generator'
COMPS_GENERATOR_FOLDER = 'almacomps'
KOJI_CONF_PATH = '/etc/koji.conf'
VARIANTS_OPTIONS_FILENAME = 'variants_options.json'
PUNGI_BUILD_CONF_TEMPLATE_FILENAME = 'pungi-build.conf.j2'
PUNGI_BUILD_CONF_FILENAME = 'pungi-build.conf'
INCLUDE_EXCLUDE_CONF_FILENAME = 'include_exclude.conf'
MULTILIB_CONF_FILENAME = 'multilib.conf'
EXTRA_OPTIONS_CONF_FILENAME = 'extra_options.conf'
logging.basicConfig(level=logging.INFO)
def dnf_reposync_mirroring(
use_products_repos: bool,
product_name: str,
distribution_major_version: str,
arch: str,
mirroring_target: str
) -> None:
base_repos_dir = Path('/etc/yum.repos.d/').joinpath(
product_name.lower(),
distribution_major_version,
arch,
)
specific_repos = base_repos_dir.joinpath('specific_repos')
products_repos = base_repos_dir.joinpath('products_repos')
platform_repos = base_repos_dir.joinpath('platform_repos')
if use_products_repos and any(products_repos.iterdir()):
command = f'dnf reposync -p {mirroring_target}-products_repos ' \
f'--setopt=reposdir="{products_repos}" ' \
'--enablerepo=* --download-metadata ' \
'--delete --downloadcomps --remote-time '
subprocess.check_call(
command,
shell=True,
)
if any(specific_repos.iterdir()):
command = f'dnf reposync -p {mirroring_target} ' \
f'--setopt=reposdir="{specific_repos}" ' \
'--enablerepo=* --download-metadata ' \
'--delete --downloadcomps --remote-time '
subprocess.check_call(
command,
shell=True,
)
if any(platform_repos.iterdir()):
command = f'dnf reposync -p {mirroring_target} ' \
f'--setopt=reposdir="{platform_repos}" ' \
'--enablerepo=* --download-metadata ' \
'--delete --downloadcomps --remote-time '
subprocess.check_call(
command,
shell=True,
)
def load_remote_file_content(
name: str,
distribution_major_version: int,
distribution_minor_version: int,
beta_suffix: str,
product_name: str,
git_auth_token: str,
git_auth_username: str,
git_url: str,
git_project: str,
git_type: str = 'gitea',
) -> Optional[str]:
beta_suffix = beta_suffix if beta_suffix else ''
decoding_func = {
'gerrit': lambda content: base64.b64decode(content).decode('utf-8'),
'gitea': lambda content: base64.b64decode(json.loads(content.decode())
['content']).decode('utf-8'),
'github': lambda content: base64.b64decode(content).decode('utf-8'),
}
if 'gerrit' == git_type:
name = quote(name, safe='')
url = f'https://{git_url}/a/projects/{git_project}/branches/' \
f'{product_name[0].lower()}{distribution_major_version}.' \
f'{distribution_minor_version}' \
f'{beta_suffix}/files/{name}/content'
response = requests.get(url, auth=HTTPBasicAuth(
username=git_auth_username,
password=git_auth_token,
))
elif 'gitea' == git_type:
headers = {
'accept': 'application/json',
}
params = {
'access_token': git_auth_token,
'ref': (
f'{product_name[0].lower()}{distribution_major_version}.'
f'{distribution_minor_version}{beta_suffix}'
)
}
name = quote(name, safe='')
url = f'https://{git_url}/api/v1/repos/{git_project}/contents/{name}'
response = requests.get(url, params=params, headers=headers)
elif 'github' == git_type:
name = quote(name, safe='')
headers = {
'Authorization:': f'Bearer {git_auth_token}',
'Accept': 'application/vnd.github+json',
}
params = {
'ref': (
f'{product_name[0].lower()}{distribution_major_version}.'
f'{distribution_minor_version}{beta_suffix}'
)
}
url = f'https://api.github/repos/{git_project}/contents/{name}'
response = requests.get(url, params=params, headers=headers)
else:
raise NotImplemented(f'{git_type} is not supported yet')
try:
response.raise_for_status()
except requests.RequestException:
return
return decoding_func[git_type](response.content)
def render_variants_options(
env: jinja2.Environment,
variables: Dict,
) -> dict:
variants_options_template = env.get_template(os.path.join(
variables['arch'],
VARIANTS_OPTIONS_FILENAME,
))
variants_options = variants_options_template.render(**variables)
return json.loads(variants_options)
def render_pungi_build_conf(
env: jinja2.Environment,
variables: Dict,
) -> str:
pungi_build_conf_template = env.get_template(
PUNGI_BUILD_CONF_TEMPLATE_FILENAME,
)
return pungi_build_conf_template.render(**variables)
def prepare_build_conf(
product_name: str,
arch: str,
distribution_major_version: int,
distribution_minor_version: int,
env_path: str,
sigkeys_fingerprints: List[str],
git_url: str,
git_project: str,
git_auth_token: str,
git_auth_username: str,
git_type: str = 'gitea',
beta_suffix: str = '',
):
logging.info(
'Prepare build conf'
)
variables = {
'product_name': product_name,
'arch': arch,
'distribution_major_version': distribution_major_version,
'distribution_minor_version': distribution_minor_version,
'beta_suffix': beta_suffix,
'sigkeys_fingerprints': [
f'"{sigkey}"' for sigkey in sigkeys_fingerprints if sigkey
],
'git_auth_username': quote_plus(git_auth_username),
'git_auth_token': quote_plus(git_auth_token),
}
env = jinja2.Environment(
loader=jinja2.FunctionLoader(lambda name: load_remote_file_content(
name=name,
distribution_major_version=distribution_major_version,
distribution_minor_version=distribution_minor_version,
beta_suffix=beta_suffix,
git_url=git_url,
git_project=git_project,
git_auth_token=git_auth_token,
git_auth_username=git_auth_username,
product_name=product_name,
git_type=git_type,
)),
autoescape=jinja2.select_autoescape(),
trim_blocks=True,
lstrip_blocks=True,
)
variables['variants'] = render_variants_options(
env=env,
variables=variables,
)
extra_options_conf_lines = load_remote_file_content(
name=f'{arch}/{EXTRA_OPTIONS_CONF_FILENAME}',
distribution_minor_version=distribution_minor_version,
distribution_major_version=distribution_major_version,
beta_suffix=beta_suffix,
git_url=git_url,
git_project=git_project,
git_auth_token=git_auth_token,
git_auth_username=git_auth_username,
product_name=product_name,
git_type=git_type,
)
variables['extra_options'] = extra_options_conf_lines is not None
pungi_build_conf_lines = render_pungi_build_conf(
env=env,
variables=variables,
)
include_exclude_conf_lines = load_remote_file_content(
name=f'{arch}/{INCLUDE_EXCLUDE_CONF_FILENAME}',
distribution_minor_version=distribution_minor_version,
distribution_major_version=distribution_major_version,
beta_suffix=beta_suffix,
git_url=git_url,
git_project=git_project,
git_auth_token=git_auth_token,
git_auth_username=git_auth_username,
product_name=product_name,
git_type=git_type,
)
multilib_conf_lines = load_remote_file_content(
name=f'{arch}/{MULTILIB_CONF_FILENAME}',
distribution_minor_version=distribution_minor_version,
distribution_major_version=distribution_major_version,
beta_suffix=beta_suffix,
git_url=git_url,
git_project=git_project,
git_auth_token=git_auth_token,
git_auth_username=git_auth_username,
product_name=product_name,
git_type=git_type,
)
with open(os.path.join(
env_path,
PUNGI_BUILD_CONF_FILENAME,
), 'w') as fd:
fd.write(pungi_build_conf_lines)
with open(os.path.join(
env_path,
INCLUDE_EXCLUDE_CONF_FILENAME,
), 'w') as fd:
fd.write(include_exclude_conf_lines)
with open(os.path.join(
env_path,
MULTILIB_CONF_FILENAME,
), 'w') as fd:
fd.write(multilib_conf_lines)
if variables['extra_options']:
with open(os.path.join(
env_path,
EXTRA_OPTIONS_CONF_FILENAME,
), 'w') as fd:
fd.write(extra_options_conf_lines)
def add_koji_profile(
env_path: str,
profile_name: str,
):
logging.info(
'Add koji profile "%s" to "%s"',
profile_name,
KOJI_CONF_PATH,
)
koji_env_path = os.path.join(
env_path,
'koji'
)
with open(KOJI_CONF_PATH, 'r') as koji_conf_file:
koji_conf_obj = ConfigParser()
koji_conf_obj.read_file(koji_conf_file)
if profile_name not in koji_conf_obj.sections():
koji_conf_obj.add_section(profile_name)
koji_conf_obj.set(profile_name, 'topdir', koji_env_path)
with open(KOJI_CONF_PATH, 'w') as koji_conf_file:
koji_conf_obj.write(koji_conf_file)
def save_additional_env_files(
env_path: str,
add_env_files: List[str],
) -> None:
for add_env_file in filter(lambda i: True if i else False, add_env_files):
env_file_name, file_content_in_base64 = add_env_file.split(',')
env_file_content = base64.b64decode(
file_content_in_base64,
).decode('utf-8')
env_file_path = os.path.join(
env_path,
env_file_name,
)
with open(env_file_path, 'w') as env_file:
env_file.write(env_file_content)
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
'--env-path',
action='store',
help='A path to folder which will be used '
'for building new distribution',
required=False,
default=None,
)
subparsers = parser.add_subparsers(
dest='command',
)
parser_synchronize_using_dnf_reposync = subparsers.add_parser(
'dnf_reposync_synchronize',
help='Run synchronize using DNF reposync',
)
parser_synchronize_using_dnf_reposync.add_argument(
'--mirroring-dnf-repos',
nargs='*',
type=str,
help='A list of repos which will be mirrored',
required=False,
)
parser_synchronize_using_dnf_reposync.add_argument(
'--mirroring-target',
action='store',
help='A folder which will contain a local mirror',
required=False,
)
parser_synchronize_using_dnf_reposync.add_argument(
'--use-products-repos',
action='store_true',
default=False,
)
parser_synchronize_using_dnf_reposync.add_argument(
'--product-name',
action='store',
help='A name of building product',
required=True,
)
parser_synchronize_using_dnf_reposync.add_argument(
'--arch',
help='Architecture of a product',
action='store',
required=True,
)
parser_synchronize_using_dnf_reposync.add_argument(
'--distribution-major-version',
help='Major version of a product',
action='store',
required=True,
)
parser_add_env_files = subparsers.add_parser(
'add_env_files',
help='Save environment files which are passed as base64 strings'
)
parser_add_env_files.add_argument(
'--env-files',
nargs='*',
type=str,
help='A list of files which should be stored in env '
'directory. E.t. `add-comps.xml,<content_in_base64>`',
required=False,
default=[],
)
parser_koji_profile = subparsers.add_parser(
'add_koji_profile',
help=f'Add new koji profile to {KOJI_CONF_PATH}',
)
parser_koji_profile.add_argument(
'--koji-profile-name',
action='store',
help='A name of koji profile',
required=True,
)
parser_build_conf = subparsers.add_parser(
'prepare_build_conf',
help='Prepare a Pungi build conf',
)
parser_build_conf.add_argument(
'--product-name',
action='store',
help='A name of building product',
required=True,
)
parser_build_conf.add_argument(
'--arch',
help='Architecture of a product',
action='store',
required=True,
)
parser_build_conf.add_argument(
'--distribution-major-version',
help='Major version of a product',
action='store',
required=True,
)
parser_build_conf.add_argument(
'--distribution-minor-version',
help='Minor version of a product',
action='store',
required=True,
)
parser_build_conf.add_argument(
'--beta-suffix',
help='Suffix of a ISOs & Volume ID names. E.g. `-beta-1`',
action='store',
default='',
type=str,
required=False,
)
parser_build_conf.add_argument(
'--sigkeys-fingerprints',
nargs='*',
type=str,
help='A list of fingerprints of AlmaLinux sign keys. '
'They are used for checking that all packages are signed',
)
parser_build_conf.add_argument(
'--git-auth-token',
action='store',
type=str,
help='Auth token for access to a Git repository which '
'contains a build config and related stuff'
)
parser_build_conf.add_argument(
'--git-auth-username',
action='store',
type=str,
help='Auth username for access to a Git repository which '
'contains a build config and related stuff'
)
parser_build_conf.add_argument(
'--git-url',
action='store',
type=str,
help='Git URL for a Git repository which '
'contains a build config and related stuff'
)
parser_build_conf.add_argument(
'--git-project',
action='store',
type=str,
help='Name of a Git repository which '
'contains a build config and related stuff'
)
parser_build_conf.add_argument(
'--git-type',
action='store',
type=str,
default='gitea',
help='Type of a Git repository which '
'contains a build config and related stuff'
)
return parser
def check_is_root():
if os.geteuid():
logging.error('The script should be ran under root or any sudo user')
exit(1)
def cli_main():
# check_is_root()
parser = create_parser()
args = parser.parse_args()
if args.command == 'dnf_reposync_synchronize':
dnf_reposync_mirroring(
use_products_repos=args.use_products_repos,
product_name=args.product_name,
distribution_major_version=args.distribution_major_version,
arch=args.arch,
mirroring_target=args.mirroring_target,
)
else:
# another commands which require an env path
os.makedirs(args.env_path, exist_ok=True)
if args.command == 'add_env_files':
save_additional_env_files(
env_path=args.env_path,
add_env_files=args.env_files,
)
if args.command == 'add_koji_profile':
add_koji_profile(
env_path=args.env_path,
profile_name=args.koji_profile_name,
)
if args.command == 'prepare_build_conf':
prepare_build_conf(
beta_suffix=args.beta_suffix,
product_name=args.product_name,
arch=args.arch,
distribution_major_version=args.distribution_major_version,
distribution_minor_version=args.distribution_minor_version,
env_path=args.env_path,
sigkeys_fingerprints=args.sigkeys_fingerprints,
git_url=args.git_url,
git_project=args.git_project,
git_auth_token=args.git_auth_token,
git_auth_username=args.git_auth_username,
git_type=args.git_type,
)
if __name__ == '__main__':
cli_main()