diff options
| author | Viacheslav Hletenko <v.gletenko@vyos.io> | 2024-08-26 15:21:14 +0000 |
|---|---|---|
| committer | Viacheslav Hletenko <v.gletenko@vyos.io> | 2024-08-29 09:30:52 +0000 |
| commit | cc7d0993b420c3245e628a818f887411d72530ff (patch) | |
| tree | 53fd8ab2b5dea74c1f7969b8ff853179d81f8fec /scripts/package-build/netfilter | |
| parent | 70bb3c5baacb6e0c72b9532f6dda417d395a6bc0 (diff) | |
| download | vyos-build-cc7d0993b420c3245e628a818f887411d72530ff.tar.gz vyos-build-cc7d0993b420c3245e628a818f887411d72530ff.zip | |
T6674: Add build-scrips for packages without Jenkins
Add build scripts for .deb packages without Jenkins.
To exclude Jenkins we need some place where we can put new builds-scripts
to run in parallel (old/new) during meantime
We will deprecate old Jenkins package builds in the future.
Diffstat (limited to 'scripts/package-build/netfilter')
4 files changed, 321 insertions, 0 deletions
diff --git a/scripts/package-build/netfilter/.gitignore b/scripts/package-build/netfilter/.gitignore new file mode 100644 index 00000000..8518afb9 --- /dev/null +++ b/scripts/package-build/netfilter/.gitignore @@ -0,0 +1,3 @@ +/pkg-libnftnl/ +/pkg-nftables/ + diff --git a/scripts/package-build/netfilter/build.py b/scripts/package-build/netfilter/build.py new file mode 100755 index 00000000..9737b7d3 --- /dev/null +++ b/scripts/package-build/netfilter/build.py @@ -0,0 +1,189 @@ +#!/usr/bin/env python3 +# +# Copyright (C) 2024 VyOS maintainers and contributors +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 or later as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# + +import glob +import shutil +import toml +import os + +from argparse import ArgumentParser +from pathlib import Path +from subprocess import run, CalledProcessError + + +def ensure_dependencies(dependencies: list) -> None: + """Ensure Debian build dependencies are met""" + if not dependencies: + print("I: No additional dependencies to install") + return + + print("I: Ensure Debian build dependencies are met") + run(['sudo', 'apt-get', 'update'], check=True) + run(['sudo', 'apt-get', 'install', '-y'] + dependencies, check=True) + + +def apply_patches(repo_dir: Path, patch_dir: Path, package_name: str) -> None: + """Apply patches from the patch directory to the repository""" + package_patch_dir = patch_dir / package_name + if package_patch_dir.exists() and package_patch_dir.is_dir(): + patches = list(package_patch_dir.glob('*')) + else: + print(f"I: No patch directory found for {package_name} in {patch_dir}") + return + + # Filter out directories from patches list + patches = [patch for patch in patches if patch.is_file()] + + if not patches: + print(f"I: No patches found in {package_patch_dir}") + return + + debian_patches_dir = repo_dir / 'debian/patches' + debian_patches_dir.mkdir(parents=True, exist_ok=True) + + series_file = debian_patches_dir / 'series' + with series_file.open('a') as series: + for patch in patches: + patch_dest = debian_patches_dir / patch.name + try: + # Ensure the patch file exists before copying + if patch.exists(): + shutil.copy(patch, patch_dest) + series.write(patch.name + '\n') + print(f"I: Applied patch: {patch.name}") + else: + print(f"W: Patch file {patch} not found, skipping") + except FileNotFoundError: + print(f"W: Patch file {patch} not found, skipping") + + +def prepare_package(repo_dir: Path, install_data: str) -> None: + """Prepare a package""" + if not install_data: + print("I: No install data provided, skipping package preparation") + return + + try: + install_file = repo_dir / 'debian/install' + install_file.parent.mkdir(parents=True, exist_ok=True) + install_file.write_text(install_data) + print("I: Prepared package") + except Exception as e: + print(f"Failed to prepare package: {e}") + raise + + +def build_package(package: dict, dependencies: list, patch_dir: Path) -> None: + """Build a package from the repository + + Args: + package (dict): Package information + dependencies (list): List of additional dependencies + patch_dir (Path): Directory containing patches + """ + repo_name = package['name'] + repo_dir = Path(repo_name) + + try: + # Clone the repository if it does not exist + if not repo_dir.exists(): + run(['git', 'clone', package['scm_url'], str(repo_dir)], check=True) + + # Check out the specific commit + run(['git', 'checkout', package['commit_id']], cwd=repo_dir, check=True) + + # Ensure dependencies + ensure_dependencies(dependencies) + + # Apply patches if any + apply_patches(repo_dir, patch_dir, repo_name) + + # Prepare the package if required + if package.get('prepare_package', False): + prepare_package(repo_dir, package.get('install_data', '')) + + # Build dependency package and install it + if (repo_dir / 'debian/control').exists(): + try: + run('sudo mk-build-deps --install --tool "apt-get --yes --no-install-recommends"', cwd=repo_dir, check=True, shell=True) + run('sudo dpkg -i *build-deps*.deb', cwd=repo_dir, check=True, shell=True) + except CalledProcessError as e: + print(f"Failed to build package {repo_name}: {e}") + + # Build the package, check if we have build_cmd in the package.toml + build_cmd = package.get('build_cmd', 'dpkg-buildpackage -uc -us -tc -b') + run(build_cmd, cwd=repo_dir, check=True, shell=True) + + except CalledProcessError as e: + print(f"Failed to build package {repo_name}: {e}") + finally: + # Clean up repository directory + # shutil.rmtree(repo_dir, ignore_errors=True) + pass + + +def cleanup_build_deps(repo_dir: Path) -> None: + """Clean up build dependency packages""" + try: + if repo_dir.exists(): + for file in glob.glob(str(repo_dir / '*build-deps*.deb')): + os.remove(file) + print("Cleaned up build dependency packages") + except Exception as e: + print(f"Error cleaning up build dependencies: {e}") + + +def copy_packages(repo_dir: Path) -> None: + """Copy generated .deb packages to the parent directory""" + try: + deb_files = glob.glob(str(repo_dir / '*.deb')) + for deb_file in deb_files: + shutil.copy(deb_file, repo_dir.parent) + print(f'I: copy generated "{deb_file}" package') + except Exception as e: + print(f"Error copying packages: {e}") + + +if __name__ == '__main__': + # Prepare argument parser + arg_parser = ArgumentParser() + arg_parser.add_argument('--config', + default='package.toml', + help='Path to the package configuration file') + arg_parser.add_argument('--patch-dir', + default='patches', + help='Path to the directory containing patches') + args = arg_parser.parse_args() + + # Load package configuration + with open(args.config, 'r') as file: + config = toml.load(file) + + packages = config['packages'] + patch_dir = Path(args.patch_dir) + + for package in packages: + dependencies = package.get('dependencies', {}).get('packages', []) + + # Build the package + build_package(package, dependencies, patch_dir) + + # Clean up build dependency packages after build + cleanup_build_deps(Path(package['name'])) + + # Copy generated .deb packages to parent directory + copy_packages(Path(package['name'])) diff --git a/scripts/package-build/netfilter/package.toml b/scripts/package-build/netfilter/package.toml new file mode 100644 index 00000000..45752d08 --- /dev/null +++ b/scripts/package-build/netfilter/package.toml @@ -0,0 +1,11 @@ +[[packages]] +name = "pkg-libnftnl" +commit_id = "debian/1.2.6-2" +scm_url = "https://salsa.debian.org/pkg-netfilter-team/pkg-libnftnl.git" +build_cmd = "sudo mk-build-deps --install --tool 'apt-get --yes --no-install-recommends'; dpkg-buildpackage -uc -us -tc -b" + +[[packages]] +name = "pkg-nftables" +commit_id = "debian/1.0.9-1" +scm_url = "https://salsa.debian.org/pkg-netfilter-team/pkg-nftables.git" +build_cmd = "sudo dpkg -i ../libnftnl*.deb; dpkg-buildpackage -uc -us -tc -b" diff --git a/scripts/package-build/netfilter/patches/pkg-nftables/0001-meta-fix-hour-decoding.patch b/scripts/package-build/netfilter/patches/pkg-nftables/0001-meta-fix-hour-decoding.patch new file mode 100644 index 00000000..dd466f1a --- /dev/null +++ b/scripts/package-build/netfilter/patches/pkg-nftables/0001-meta-fix-hour-decoding.patch @@ -0,0 +1,118 @@ +From d392ddf243dcbf8a34726c777d2c669b1e8bfa85 Mon Sep 17 00:00:00 2001 +From: Florian Westphal <fw@strlen.de> +Date: Thu, 2 Nov 2023 15:34:13 +0100 +Subject: meta: fix hour decoding when timezone offset is negative + +Brian Davidson says: + + meta hour rules don't display properly after being created when the + hour is on or after 00:00 UTC. The netlink debug looks correct for + seconds past midnight UTC, but displaying the rules looks like an + overflow or a byte order problem. I am in UTC-0400, so today, 20:00 + and later exhibits the problem, while 19:00 and earlier hours are + fine. + +meta.c only ever worked when the delta to UTC is positive. +We need to add in case the second counter turns negative after +offset adjustment. + +Also add a test case for this. + +Fixes: f8f32deda31d ("meta: Introduce new conditions 'time', 'day' and 'hour'") +Reported-by: Brian Davidson <davidson.brian@gmail.com> +Signed-off-by: Florian Westphal <fw@strlen.de> +--- + src/meta.c | 11 ++++- + .../shell/testcases/listing/dumps/meta_time.nodump | 0 + tests/shell/testcases/listing/meta_time | 52 ++++++++++++++++++++++ + 3 files changed, 61 insertions(+), 2 deletions(-) + create mode 100644 tests/shell/testcases/listing/dumps/meta_time.nodump + create mode 100755 tests/shell/testcases/listing/meta_time + +diff --git a/src/meta.c b/src/meta.c +index b578d5e2..7846aefe 100644 +--- a/src/meta.c ++++ b/src/meta.c +@@ -495,9 +495,16 @@ static void hour_type_print(const struct expr *expr, struct output_ctx *octx) + + /* Obtain current tm, so that we can add tm_gmtoff */ + ts = time(NULL); +- if (ts != ((time_t) -1) && localtime_r(&ts, &cur_tm)) +- seconds = (seconds + cur_tm.tm_gmtoff) % SECONDS_PER_DAY; ++ if (ts != ((time_t) -1) && localtime_r(&ts, &cur_tm)) { ++ int32_t adj = seconds + cur_tm.tm_gmtoff; + ++ if (adj < 0) ++ adj += SECONDS_PER_DAY; ++ else if (adj >= SECONDS_PER_DAY) ++ adj -= SECONDS_PER_DAY; ++ ++ seconds = adj; ++ } + minutes = seconds / 60; + seconds %= 60; + hours = minutes / 60; +diff --git a/tests/shell/testcases/listing/dumps/meta_time.nodump b/tests/shell/testcases/listing/dumps/meta_time.nodump +new file mode 100644 +index 00000000..e69de29b +diff --git a/tests/shell/testcases/listing/meta_time b/tests/shell/testcases/listing/meta_time +new file mode 100755 +index 00000000..a9761998 +--- /dev/null ++++ b/tests/shell/testcases/listing/meta_time +@@ -0,0 +1,52 @@ ++#!/bin/bash ++ ++set -e ++ ++TMP1=$(mktemp) ++TMP2=$(mktemp) ++ ++cleanup() ++{ ++ rm -f "$TMP1" ++ rm -f "$TMP2" ++} ++ ++check_decode() ++{ ++ TZ=$1 $NFT list chain t c | grep meta > "$TMP2" ++ diff -u "$TMP1" "$TMP2" ++} ++ ++trap cleanup EXIT ++ ++$NFT -f - <<EOF ++table t { ++ chain c { ++ } ++} ++EOF ++ ++for i in $(seq -w 0 23); do ++ TZ=UTC $NFT add rule t c meta hour "$i:00"-"$i:59" ++done ++ ++# Check decoding in UTC, this mirrors 1:1 what should have been added. ++for i in $(seq 0 23); do ++ printf "\t\tmeta hour \"%02d:%02d\"-\"%02d:%02d\"\n" $i 0 $i 59 >> "$TMP1" ++done ++ ++check_decode UTC ++ ++printf "\t\tmeta hour \"%02d:%02d\"-\"%02d:%02d\"\n" 23 0 23 59 > "$TMP1" ++for i in $(seq 0 22); do ++ printf "\t\tmeta hour \"%02d:%02d\"-\"%02d:%02d\"\n" $i 0 $i 59 >> "$TMP1" ++done ++check_decode UTC+1 ++ ++printf "\t\tmeta hour \"%02d:%02d\"-\"%02d:%02d\"\n" 1 0 1 59 > "$TMP1" ++for i in $(seq 2 23); do ++ printf "\t\tmeta hour \"%02d:%02d\"-\"%02d:%02d\"\n" $i 0 $i 59 >> "$TMP1" ++done ++printf "\t\tmeta hour \"%02d:%02d\"-\"%02d:%02d\"\n" 0 0 0 59 >> "$TMP1" ++ ++check_decode UTC-1 +-- +cgit v1.2.3 + |
