initial commit
imported from https://salsa.debian.org/kernel-team/linux.git commit 9d5cc9d9d6501d7f1dd7e194d4b245bd0b6c6a22 version 6.11.4-1
This commit is contained in:
77
debian/bin/buildcheck.py
vendored
Executable file
77
debian/bin/buildcheck.py
vendored
Executable file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
from debian_linux.config_v2 import Config
|
||||
from debian_linux.kconfig import KconfigFile
|
||||
|
||||
|
||||
class CheckSecureBootConfig:
|
||||
def __init__(self, config, dir, *_):
|
||||
self.config = config
|
||||
self.dir = pathlib.Path(dir)
|
||||
|
||||
def __call__(self, out):
|
||||
fail = 0
|
||||
|
||||
if self.config.build.enable_signed \
|
||||
and not os.getenv('DEBIAN_KERNEL_DISABLE_SIGNED'):
|
||||
kconfig = KconfigFile()
|
||||
with (self.dir / '.config').open() as fh:
|
||||
kconfig.read(fh)
|
||||
|
||||
for name, value in [('EFI_STUB', True),
|
||||
('LOCK_DOWN_IN_EFI_SECURE_BOOT', True),
|
||||
('SYSTEM_TRUSTED_KEYS', '""')]:
|
||||
if name not in kconfig:
|
||||
out.write(f'Secure Boot: CONFIG_{name} is not defined\n')
|
||||
fail = 1
|
||||
elif kconfig[name].value != value:
|
||||
out.write(f'Secure Boot: CONFIG_{name} has wrong value:'
|
||||
f' {kconfig[name].value}\n')
|
||||
fail = 1
|
||||
|
||||
return fail
|
||||
|
||||
|
||||
class Main(object):
|
||||
|
||||
checks = {
|
||||
'setup': [CheckSecureBootConfig],
|
||||
'build': [],
|
||||
}
|
||||
|
||||
def __init__(self, dir, arch, featureset, flavour, phase):
|
||||
self.args = dir, arch, featureset, flavour
|
||||
self.phase = phase
|
||||
|
||||
config_dirs = [
|
||||
pathlib.Path('debian/config'),
|
||||
pathlib.Path('debian/config.local'),
|
||||
]
|
||||
top_config = Config.read_orig(config_dirs).merged
|
||||
arch_config = next(
|
||||
ac
|
||||
for ac in itertools.chain.from_iterable(
|
||||
kac.debianarchs for kac in top_config.kernelarchs)
|
||||
if ac.name == arch
|
||||
)
|
||||
fs_config = next(fsc for fsc in arch_config.featuresets
|
||||
if fsc.name == featureset)
|
||||
self.config = next(fc for fc in fs_config.flavours
|
||||
if fc.name == flavour)
|
||||
|
||||
def __call__(self):
|
||||
fail = 0
|
||||
|
||||
for c in self.checks[self.phase]:
|
||||
fail |= c(self.config, *self.args)(sys.stdout)
|
||||
|
||||
return fail
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(Main(*sys.argv[1:])())
|
28
debian/bin/check-patches.sh
vendored
Executable file
28
debian/bin/check-patches.sh
vendored
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/bin/sh -e
|
||||
|
||||
TMPDIR=$(mktemp -d)
|
||||
trap "rm -rf $TMPDIR" EXIT
|
||||
for patchdir in debian/patches*; do
|
||||
sed '/^#/d; /^[[:space:]]*$/d; /^X /d; s/^+ //; s,^,'"$patchdir"'/,' "$patchdir"/series
|
||||
done | sort -u > $TMPDIR/used
|
||||
find debian/patches* ! -path '*/series' -type f -name "*.diff" -o -name "*.patch" -printf "%p\n" | sort > $TMPDIR/avail
|
||||
echo "Used patches"
|
||||
echo "=============="
|
||||
cat $TMPDIR/used
|
||||
echo
|
||||
echo "Unused patches"
|
||||
echo "=============="
|
||||
grep -F -v -f $TMPDIR/used $TMPDIR/avail || test $? = 1
|
||||
echo
|
||||
echo "Patches without required headers"
|
||||
echo "================================"
|
||||
xargs grep -E -l '^(Subject|Description):' < $TMPDIR/used | xargs grep -E -l '^(From|Author|Origin):' > $TMPDIR/goodheaders || test $? = 1
|
||||
grep -F -v -f $TMPDIR/goodheaders $TMPDIR/used || test $? = 1
|
||||
echo
|
||||
echo "Patches without Origin or Forwarded header"
|
||||
echo "=========================================="
|
||||
xargs grep -E -L '^(Origin:|Forwarded: (no\b|not-needed|http))' < $TMPDIR/used || test $? = 1
|
||||
echo
|
||||
echo "Patches to be forwarded"
|
||||
echo "======================="
|
||||
xargs grep -E -l '^Forwarded: no\b' < $TMPDIR/used || test $? = 1
|
1
debian/bin/debian_linux
vendored
Symbolic link
1
debian/bin/debian_linux
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../lib/python/debian_linux/
|
32
debian/bin/diff-gencontrol
vendored
Executable file
32
debian/bin/diff-gencontrol
vendored
Executable file
@@ -0,0 +1,32 @@
|
||||
#!/bin/bash
|
||||
set -euE
|
||||
|
||||
REF_BASE=${1:-master}
|
||||
|
||||
REPO=$(git rev-parse --show-toplevel)
|
||||
COMMIT_BASE=$(git merge-base --fork-point "$REF_BASE")
|
||||
COMMIT_NEW=$(git stash create)
|
||||
|
||||
TMP=$(mktemp -d)
|
||||
trap "rm -rf '$TMP'" EXIT
|
||||
|
||||
function git {
|
||||
command git -c advice.detachedHead=false -c init.defaultBranch=main -C "$TMP" "$@"
|
||||
}
|
||||
|
||||
git init -q
|
||||
git remote add origin "$REPO"
|
||||
git fetch -q --depth 1 origin $COMMIT_BASE:base $COMMIT_NEW:new
|
||||
|
||||
git checkout -q base
|
||||
echo "Running gencontrol on ${COMMIT_BASE}"
|
||||
( cd "$TMP"; ./debian/bin/gencontrol.py )
|
||||
git stash push -q --all
|
||||
|
||||
git checkout -q new
|
||||
echo "Running gencontrol on uncommited changes"
|
||||
( cd "$TMP"; ./debian/bin/gencontrol.py )
|
||||
git stash push -q --all
|
||||
|
||||
# ^3 is the commit with untracked files
|
||||
git diff stash@{1}^3 stash@{0}^3
|
12
debian/bin/fix-shebang
vendored
Executable file
12
debian/bin/fix-shebang
vendored
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/usr/bin/perl -pi
|
||||
|
||||
# Change "#!/usr/bin/env perl" to "#!/usr/bin/perl" (policy §10.4).
|
||||
# Other uses of /usr/bin/env should probably be converted as well, but
|
||||
# policy doesn't specify what to do.
|
||||
if ($. == 1 && m|^\#!\s*/usr/bin/env\s+(.+)|) {
|
||||
if ($1 eq "perl") {
|
||||
$_ = "#!/usr/bin/perl\n";
|
||||
} else {
|
||||
print STDERR "W: Found #!/usr/bin/env $1 and don't know what to substitute\n";
|
||||
}
|
||||
}
|
635
debian/bin/gencontrol.py
vendored
Executable file
635
debian/bin/gencontrol.py
vendored
Executable file
@@ -0,0 +1,635 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import json
|
||||
import locale
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
import subprocess
|
||||
import re
|
||||
import tempfile
|
||||
from typing import cast
|
||||
|
||||
from debian_linux.config_v2 import (
|
||||
Config,
|
||||
ConfigMerged,
|
||||
ConfigMergedDebianarch,
|
||||
ConfigMergedFeatureset,
|
||||
ConfigMergedFlavour,
|
||||
)
|
||||
from debian_linux.dataclasses_deb822 import read_deb822, write_deb822
|
||||
from debian_linux.debian import \
|
||||
PackageBuildprofile, \
|
||||
PackageRelationEntry, PackageRelationGroup, \
|
||||
VersionLinux, BinaryPackage
|
||||
from debian_linux.gencontrol import Gencontrol as Base, PackagesBundle, \
|
||||
MakeFlags
|
||||
from debian_linux.utils import Templates
|
||||
|
||||
locale.setlocale(locale.LC_CTYPE, "C.UTF-8")
|
||||
|
||||
|
||||
class Gencontrol(Base):
|
||||
disable_installer: bool
|
||||
disable_signed: bool
|
||||
|
||||
env_flags = [
|
||||
('DEBIAN_KERNEL_DISABLE_INSTALLER', 'disable_installer', 'installer modules'),
|
||||
('DEBIAN_KERNEL_DISABLE_SIGNED', 'disable_signed', 'signed code'),
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config_dirs=[
|
||||
pathlib.Path('debian/config'),
|
||||
pathlib.Path('debian/config.local'),
|
||||
],
|
||||
template_dirs=["debian/templates"],
|
||||
) -> None:
|
||||
super().__init__(
|
||||
Config.read_orig(config_dirs).merged,
|
||||
Templates(template_dirs),
|
||||
VersionLinux)
|
||||
self.config_dirs = config_dirs
|
||||
self.process_changelog()
|
||||
|
||||
for env, attr, desc in self.env_flags:
|
||||
setattr(self, attr, False)
|
||||
if os.getenv(env):
|
||||
if self.changelog[0].distribution == 'UNRELEASED':
|
||||
import warnings
|
||||
warnings.warn(f'Disable {desc} on request ({env} set)')
|
||||
setattr(self, attr, True)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f'Unable to disable {desc} in release build ({env} set)')
|
||||
|
||||
def _setup_makeflags(self, names, makeflags, data) -> None:
|
||||
for src, dst, optional in names:
|
||||
if src in data or not optional:
|
||||
makeflags[dst] = data[src]
|
||||
|
||||
def do_main_setup(
|
||||
self,
|
||||
config: ConfigMerged,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
super().do_main_setup(config, vars, makeflags)
|
||||
makeflags.update({
|
||||
'VERSION': self.version.linux_version,
|
||||
'UPSTREAMVERSION': self.version.linux_upstream,
|
||||
'ABINAME': self.abiname,
|
||||
'SOURCEVERSION': self.version.complete,
|
||||
})
|
||||
makeflags['SOURCE_BASENAME'] = vars['source_basename']
|
||||
makeflags['SOURCE_SUFFIX'] = vars['source_suffix']
|
||||
|
||||
# Prepare to generate debian/tests/control
|
||||
self.tests_control = list(self.templates.get_tests_control('main.tests-control', vars))
|
||||
|
||||
def do_main_makefile(
|
||||
self,
|
||||
config: ConfigMerged,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
for featureset in self.config.root_featuresets:
|
||||
makeflags_featureset = makeflags.copy()
|
||||
makeflags_featureset['FEATURESET'] = featureset.name
|
||||
|
||||
self.bundle.makefile.add_rules(f'source_{featureset.name}',
|
||||
'source', makeflags_featureset)
|
||||
self.bundle.makefile.add_deps('source', [f'source_{featureset.name}'])
|
||||
|
||||
makeflags = makeflags.copy()
|
||||
makeflags['ALL_FEATURESETS'] = ' '.join(i.name for i in self.config.root_featuresets)
|
||||
super().do_main_makefile(config, vars, makeflags)
|
||||
|
||||
def do_main_packages(
|
||||
self,
|
||||
config: ConfigMerged,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
self.bundle.add('main', (), makeflags, vars)
|
||||
|
||||
# Only build the metapackages if their names won't exactly match
|
||||
# the packages they depend on
|
||||
do_meta = config.packages.meta \
|
||||
and vars['source_suffix'] != '-' + vars['version']
|
||||
|
||||
if config.packages.docs:
|
||||
self.bundle.add('docs', (), makeflags, vars)
|
||||
if do_meta:
|
||||
self.bundle.add('docs.meta', (), makeflags, vars)
|
||||
if config.packages.source:
|
||||
self.bundle.add('sourcebin', (), makeflags, vars)
|
||||
if do_meta:
|
||||
self.bundle.add('sourcebin.meta', (), makeflags, vars)
|
||||
|
||||
if config.packages.libc_dev:
|
||||
libcdev_kernelarches = set()
|
||||
libcdev_multiarches = set()
|
||||
for kernelarch in self.config.kernelarchs:
|
||||
libcdev_kernelarches.add(kernelarch.name)
|
||||
for debianarch in kernelarch.debianarchs:
|
||||
libcdev_multiarches.add(
|
||||
f'{debianarch.defs_debianarch.multiarch}:{kernelarch.name}'
|
||||
)
|
||||
|
||||
libcdev_makeflags = makeflags.copy()
|
||||
libcdev_makeflags['ALL_LIBCDEV_KERNELARCHES'] = ' '.join(sorted(libcdev_kernelarches))
|
||||
libcdev_makeflags['ALL_LIBCDEV_MULTIARCHES'] = ' '.join(sorted(libcdev_multiarches))
|
||||
|
||||
self.bundle.add('libc-dev', (), libcdev_makeflags, vars)
|
||||
|
||||
def do_indep_featureset_setup(
|
||||
self,
|
||||
config: ConfigMergedFeatureset,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
makeflags['LOCALVERSION'] = vars['localversion']
|
||||
kernel_arches = set()
|
||||
for kernelarch in self.config.kernelarchs:
|
||||
for debianarch in kernelarch.debianarchs:
|
||||
for featureset in debianarch.featuresets:
|
||||
if config.name_featureset in featureset.name:
|
||||
kernel_arches.add(kernelarch.name)
|
||||
makeflags['ALL_KERNEL_ARCHES'] = ' '.join(sorted(list(kernel_arches)))
|
||||
|
||||
vars['featureset_desc'] = ''
|
||||
if config.name_featureset != 'none':
|
||||
desc = config.description
|
||||
vars['featureset_desc'] = (' with the %s featureset' %
|
||||
desc.short[desc.parts[0]])
|
||||
|
||||
def do_indep_featureset_packages(
|
||||
self,
|
||||
config: ConfigMergedFeatureset,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
self.bundle.add('headers.featureset', (config.name_featureset, ), makeflags, vars)
|
||||
|
||||
def do_arch_setup(
|
||||
self,
|
||||
config: ConfigMergedDebianarch,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
makeflags['KERNEL_ARCH'] = config.name_kernelarch
|
||||
|
||||
def do_arch_packages(
|
||||
self,
|
||||
config: ConfigMergedDebianarch,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
arch = config.name
|
||||
|
||||
if not self.disable_signed:
|
||||
build_signed = config.build.enable_signed
|
||||
else:
|
||||
build_signed = False
|
||||
|
||||
if build_signed:
|
||||
# Make sure variables remain
|
||||
vars['signedtemplate_binaryversion'] = '@signedtemplate_binaryversion@'
|
||||
vars['signedtemplate_sourceversion'] = '@signedtemplate_sourceversion@'
|
||||
|
||||
self.bundle.add('signed-template', (arch,), makeflags, vars, arch=arch)
|
||||
|
||||
bundle_signed = self.bundles[f'signed-{arch}'] = \
|
||||
PackagesBundle(f'signed-{arch}', 'signed.source.control', vars, self.templates)
|
||||
|
||||
with bundle_signed.open('source/lintian-overrides', 'w') as f:
|
||||
f.write(self.substitute(
|
||||
self.templates.get('signed.source.lintian-overrides'), vars))
|
||||
|
||||
with bundle_signed.open('changelog.head', 'w') as f:
|
||||
dist = self.changelog[0].distribution
|
||||
urgency = self.changelog[0].urgency
|
||||
f.write(f'''\
|
||||
linux-signed-{vars['arch']} (@signedtemplate_sourceversion@) {dist}; urgency={urgency}
|
||||
|
||||
* Sign kernel from {self.changelog[0].source} @signedtemplate_binaryversion@
|
||||
''')
|
||||
|
||||
if config.packages.source and list(config.featuresets):
|
||||
self.bundle.add('config', (arch, ), makeflags, vars)
|
||||
|
||||
if config.packages.tools_unversioned:
|
||||
self.bundle.add('tools-unversioned', (arch, ), makeflags, vars)
|
||||
|
||||
if config.packages.tools_versioned:
|
||||
self.bundle.add('tools-versioned', (arch, ), makeflags, vars)
|
||||
|
||||
def do_featureset_setup(
|
||||
self,
|
||||
featureset: ConfigMergedFeatureset,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
vars['localversion_headers'] = vars['localversion']
|
||||
makeflags['LOCALVERSION_HEADERS'] = vars['localversion_headers']
|
||||
|
||||
def do_flavour_setup(
|
||||
self,
|
||||
config: ConfigMergedFlavour,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
vars['flavour'] = vars['localversion'][1:]
|
||||
vars['class'] = config.description.hardware or ''
|
||||
vars['longclass'] = config.description.hardware_long or vars['class']
|
||||
|
||||
vars['localversion-image'] = vars['localversion']
|
||||
|
||||
vars['image-stem'] = cast(str, config.build.kernel_stem)
|
||||
|
||||
if t := config.build.cflags:
|
||||
makeflags['KCFLAGS'] = t
|
||||
makeflags['COMPILER'] = config.build.compiler
|
||||
if t := config.build.compiler_gnutype:
|
||||
makeflags['KERNEL_GNU_TYPE'] = t
|
||||
if t := config.build.compiler_gnutype_compat:
|
||||
makeflags['COMPAT_GNU_TYPE'] = t
|
||||
makeflags['IMAGE_FILE'] = config.build.kernel_file
|
||||
makeflags['IMAGE_INSTALL_STEM'] = config.build.kernel_stem
|
||||
|
||||
makeflags['LOCALVERSION'] = vars['localversion']
|
||||
makeflags['LOCALVERSION_IMAGE'] = vars['localversion-image']
|
||||
|
||||
def do_flavour_packages(
|
||||
self,
|
||||
config: ConfigMergedFlavour,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
arch = config.name_debianarch
|
||||
ruleid = (arch, config.name_featureset, config.name_flavour)
|
||||
|
||||
packages_headers = (
|
||||
self.bundle.add('headers', ruleid, makeflags, vars, arch=arch)
|
||||
)
|
||||
assert len(packages_headers) == 1
|
||||
|
||||
do_meta = config.packages.meta
|
||||
|
||||
relation_compiler = PackageRelationEntry(cast(str, config.build.compiler))
|
||||
|
||||
relation_compiler_header = PackageRelationGroup([relation_compiler])
|
||||
|
||||
# Generate compiler build-depends for native:
|
||||
# gcc-N [arm64] <!cross !pkg.linux.nokernel>
|
||||
self.bundle.source.build_depends_arch.merge([
|
||||
PackageRelationEntry(
|
||||
relation_compiler,
|
||||
arches={arch},
|
||||
restrictions='<!cross !pkg.linux.nokernel>',
|
||||
)
|
||||
])
|
||||
|
||||
# Generate compiler build-depends for cross:
|
||||
# gcc-N-aarch64-linux-gnu [arm64] <cross !pkg.linux.nokernel>
|
||||
self.bundle.source.build_depends_arch.merge([
|
||||
PackageRelationEntry(
|
||||
relation_compiler,
|
||||
name=f'{relation_compiler.name}-{config.defs_debianarch.gnutype_package}',
|
||||
arches={arch},
|
||||
restrictions='<cross !pkg.linux.nokernel>',
|
||||
)
|
||||
])
|
||||
|
||||
# Generate compiler build-depends for kernel:
|
||||
# gcc-N-hppa64-linux-gnu [hppa] <!pkg.linux.nokernel>
|
||||
if gnutype := config.build.compiler_gnutype:
|
||||
if gnutype != config.defs_debianarch.gnutype:
|
||||
self.bundle.source.build_depends_arch.merge([
|
||||
PackageRelationEntry(
|
||||
relation_compiler,
|
||||
name=f'{relation_compiler.name}-{gnutype.replace("_", "-")}',
|
||||
arches={arch},
|
||||
restrictions='<!pkg.linux.nokernel>',
|
||||
)
|
||||
])
|
||||
|
||||
# Generate compiler build-depends for compat:
|
||||
# gcc-arm-linux-gnueabihf [arm64] <!pkg.linux.nokernel>
|
||||
# XXX: Linux uses various definitions for this, all ending with "gcc", not $CC
|
||||
if gnutype := config.build.compiler_gnutype_compat:
|
||||
if gnutype != config.defs_debianarch.gnutype:
|
||||
self.bundle.source.build_depends_arch.merge([
|
||||
PackageRelationEntry(
|
||||
f'gcc-{gnutype.replace("_", "-")}',
|
||||
arches={arch},
|
||||
restrictions='<!pkg.linux.nokernel>',
|
||||
)
|
||||
])
|
||||
|
||||
packages_own = []
|
||||
|
||||
if not self.disable_signed:
|
||||
build_signed = config.build.enable_signed
|
||||
else:
|
||||
build_signed = False
|
||||
|
||||
if build_signed:
|
||||
bundle_signed = self.bundles[f'signed-{arch}']
|
||||
else:
|
||||
bundle_signed = self.bundle
|
||||
|
||||
vars.setdefault('desc', '')
|
||||
|
||||
if build_signed:
|
||||
packages_image_unsigned = (
|
||||
self.bundle.add('image-unsigned', ruleid, makeflags, vars, arch=arch)
|
||||
)
|
||||
packages_image = packages_image_unsigned[:]
|
||||
packages_image.extend(
|
||||
bundle_signed.add('signed.image', ruleid, makeflags, vars, arch=arch)
|
||||
)
|
||||
|
||||
else:
|
||||
packages_image = packages_image_unsigned = (
|
||||
bundle_signed.add('image', ruleid, makeflags, vars, arch=arch)
|
||||
)
|
||||
|
||||
for field in ('Depends', 'Provides', 'Suggests', 'Recommends',
|
||||
'Conflicts', 'Breaks'):
|
||||
for i in getattr(config.relations.image, field.lower(), []):
|
||||
for package_image in packages_image:
|
||||
getattr(package_image, field.lower()).merge(
|
||||
PackageRelationGroup(i, arches={arch})
|
||||
)
|
||||
|
||||
for field in ('Depends', 'Suggests', 'Recommends'):
|
||||
for i in getattr(config.relations.image, field.lower(), []):
|
||||
group = PackageRelationGroup(i, arches={arch})
|
||||
for entry in group:
|
||||
if entry.operator is not None:
|
||||
entry.operator = -entry.operator
|
||||
for package_image in packages_image:
|
||||
package_image.breaks.append(PackageRelationGroup([entry]))
|
||||
|
||||
if desc_parts := config.description.parts:
|
||||
# XXX: Workaround, we need to support multiple entries of the same
|
||||
# name
|
||||
parts = list(set(desc_parts))
|
||||
parts.sort()
|
||||
for package_image in packages_image:
|
||||
desc = package_image.description
|
||||
for part in parts:
|
||||
desc.append(config.description.long[part])
|
||||
desc.append_short(config.description.short[part])
|
||||
|
||||
packages_headers[0].depends.merge(relation_compiler_header)
|
||||
packages_own.extend(packages_image)
|
||||
packages_own.extend(packages_headers)
|
||||
|
||||
# The image meta-packages will depend on signed linux-image
|
||||
# packages where applicable, so should be built from the
|
||||
# signed source packages The header meta-packages will also be
|
||||
# built along with the signed packages, to create a dependency
|
||||
# relationship that ensures src:linux and src:linux-signed-*
|
||||
# transition to testing together.
|
||||
if do_meta:
|
||||
packages_meta = (
|
||||
bundle_signed.add('image.meta', ruleid, makeflags, vars, arch=arch)
|
||||
)
|
||||
assert len(packages_meta) == 1
|
||||
packages_meta += (
|
||||
bundle_signed.add(build_signed and 'signed.headers.meta' or 'headers.meta',
|
||||
ruleid, makeflags, vars, arch=arch)
|
||||
)
|
||||
assert len(packages_meta) == 2
|
||||
|
||||
if (
|
||||
config.defs_flavour.is_default
|
||||
and not self.vars['source_suffix']
|
||||
):
|
||||
packages_meta[0].provides.append('linux-image-generic')
|
||||
packages_meta[1].provides.append('linux-headers-generic')
|
||||
|
||||
packages_own.extend(packages_meta)
|
||||
|
||||
if config.build.enable_vdso:
|
||||
makeflags['VDSO'] = True
|
||||
|
||||
packages_own.extend(
|
||||
self.bundle.add('image-dbg', ruleid, makeflags, vars, arch=arch)
|
||||
)
|
||||
if do_meta:
|
||||
packages_own.extend(
|
||||
self.bundle.add('image-dbg.meta', ruleid, makeflags, vars, arch=arch)
|
||||
)
|
||||
|
||||
if (
|
||||
config.defs_flavour.is_default
|
||||
# XXX
|
||||
and not self.vars['source_suffix']
|
||||
):
|
||||
packages_own.extend(
|
||||
self.bundle.add('image-extra-dev', ruleid, makeflags, vars, arch=arch)
|
||||
)
|
||||
|
||||
# In a quick build, only build the quick flavour (if any).
|
||||
if not config.defs_flavour.is_quick:
|
||||
for package in packages_own:
|
||||
package.build_profiles[0].neg.add('pkg.linux.quick')
|
||||
|
||||
tests_control_image = self.templates.get_tests_control('image.tests-control', vars)
|
||||
for c in tests_control_image:
|
||||
c.depends.extend(
|
||||
[i.name for i in packages_image_unsigned]
|
||||
)
|
||||
|
||||
tests_control_headers = self.templates.get_tests_control('headers.tests-control', vars)
|
||||
for c in tests_control_headers:
|
||||
c.depends.extend(
|
||||
[i.name for i in packages_headers] +
|
||||
[i.name for i in packages_image_unsigned]
|
||||
)
|
||||
|
||||
self.tests_control.extend(tests_control_image)
|
||||
self.tests_control.extend(tests_control_headers)
|
||||
|
||||
kconfig = []
|
||||
for c in config.config:
|
||||
for d in self.config_dirs:
|
||||
if (f := d / c).exists():
|
||||
kconfig.append(str(f))
|
||||
makeflags['KCONFIG'] = ' '.join(kconfig)
|
||||
makeflags['KCONFIG_OPTIONS'] = ''
|
||||
# Add "salt" to fix #872263
|
||||
makeflags['KCONFIG_OPTIONS'] += \
|
||||
' -o "BUILD_SALT=\\"%(abiname)s%(localversion)s\\""' % vars
|
||||
|
||||
merged_config = ('debian/build/config.%s_%s_%s' %
|
||||
(config.name_debianarch, config.name_featureset, config.name_flavour))
|
||||
self.bundle.makefile.add_cmds(merged_config,
|
||||
["$(MAKE) -f debian/rules.real %s %s" %
|
||||
(merged_config, makeflags)])
|
||||
|
||||
if (
|
||||
config.name_featureset == 'none'
|
||||
and not self.disable_installer
|
||||
and config.packages.installer
|
||||
):
|
||||
with tempfile.TemporaryDirectory(prefix='linux-gencontrol') as config_dir:
|
||||
base_path = pathlib.Path('debian/installer').absolute()
|
||||
config_path = pathlib.Path(config_dir)
|
||||
(config_path / 'modules').symlink_to(base_path / 'modules')
|
||||
(config_path / 'package-list').symlink_to(base_path / 'package-list')
|
||||
|
||||
with (config_path / 'kernel-versions').open('w') as versions:
|
||||
versions.write(f'{arch} - {vars["flavour"]} - - -\n')
|
||||
|
||||
# Add udebs using kernel-wedge
|
||||
kw_env = os.environ.copy()
|
||||
kw_env['KW_DEFCONFIG_DIR'] = config_dir
|
||||
kw_env['KW_CONFIG_DIR'] = config_dir
|
||||
kw_proc = subprocess.Popen(
|
||||
['kernel-wedge', 'gen-control', vars['abiname']],
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
env=kw_env)
|
||||
assert kw_proc.stdout is not None
|
||||
udeb_packages_base = list(read_deb822(BinaryPackage, kw_proc.stdout))
|
||||
kw_proc.wait()
|
||||
if kw_proc.returncode != 0:
|
||||
raise RuntimeError('kernel-wedge exited with code %d' %
|
||||
kw_proc.returncode)
|
||||
|
||||
udeb_packages = [
|
||||
dataclasses.replace(
|
||||
package_base,
|
||||
# kernel-wedge currently chokes on Build-Profiles so add it now
|
||||
build_profiles=PackageBuildprofile.parse(
|
||||
'<!noudeb !pkg.linux.nokernel !pkg.linux.quick>',
|
||||
),
|
||||
meta_rules_target='installer',
|
||||
)
|
||||
for package_base in udeb_packages_base
|
||||
]
|
||||
|
||||
makeflags_local = makeflags.copy()
|
||||
makeflags_local['IMAGE_PACKAGE_NAME'] = udeb_packages[0].name
|
||||
|
||||
bundle_signed.add_packages(
|
||||
udeb_packages,
|
||||
(config.name_debianarch, config.name_featureset, config.name_flavour),
|
||||
makeflags_local, arch=arch,
|
||||
)
|
||||
|
||||
if build_signed:
|
||||
# XXX This is a hack to exclude the udebs from
|
||||
# the package list while still being able to
|
||||
# convince debhelper and kernel-wedge to go
|
||||
# part way to building them.
|
||||
udeb_packages = [
|
||||
dataclasses.replace(
|
||||
package_base,
|
||||
# kernel-wedge currently chokes on Build-Profiles so add it now
|
||||
build_profiles=PackageBuildprofile.parse(
|
||||
'<pkg.linux.udeb-unsigned-test-build !noudeb'
|
||||
' !pkg.linux.nokernel !pkg.linux.quick>',
|
||||
),
|
||||
meta_rules_target='installer-test',
|
||||
)
|
||||
for package_base in udeb_packages_base
|
||||
]
|
||||
|
||||
self.bundle.add_packages(
|
||||
udeb_packages,
|
||||
(config.name_debianarch, config.name_featureset, config.name_flavour),
|
||||
makeflags_local, arch=arch, check_packages=False,
|
||||
)
|
||||
|
||||
def process_changelog(self) -> None:
|
||||
version = self.version = self.changelog[0].version
|
||||
|
||||
if self.changelog[0].distribution == 'UNRELEASED':
|
||||
self.abiname = f'{version.linux_upstream}+unreleased'
|
||||
elif self.changelog[0].distribution == 'experimental':
|
||||
self.abiname = f'{version.linux_upstream}'
|
||||
elif version.linux_revision_backports:
|
||||
self.abiname = f'{version.linux_upstream_full}+bpo'
|
||||
else:
|
||||
self.abiname = f'{version.linux_upstream_full}'
|
||||
|
||||
self.vars = {
|
||||
'upstreamversion': self.version.linux_upstream,
|
||||
'version': self.version.linux_version,
|
||||
'version_complete': self.version.complete,
|
||||
'source_basename': re.sub(r'-[\d.]+$', '',
|
||||
self.changelog[0].source),
|
||||
'source_upstream': self.version.upstream,
|
||||
'source_package': self.changelog[0].source,
|
||||
'abiname': self.abiname,
|
||||
}
|
||||
self.vars['source_suffix'] = \
|
||||
self.changelog[0].source[len(self.vars['source_basename']):]
|
||||
|
||||
distribution = self.changelog[0].distribution
|
||||
if distribution in ('unstable', ):
|
||||
if version.linux_revision_experimental or \
|
||||
version.linux_revision_backports or \
|
||||
version.linux_revision_other:
|
||||
raise RuntimeError("Can't upload to %s with a version of %s" %
|
||||
(distribution, version))
|
||||
if distribution in ('experimental', ):
|
||||
if not version.linux_revision_experimental:
|
||||
raise RuntimeError("Can't upload to %s with a version of %s" %
|
||||
(distribution, version))
|
||||
if distribution.endswith('-security') or distribution.endswith('-lts'):
|
||||
if version.linux_revision_backports or \
|
||||
version.linux_revision_other:
|
||||
raise RuntimeError("Can't upload to %s with a version of %s" %
|
||||
(distribution, version))
|
||||
if distribution.endswith('-backports'):
|
||||
if not version.linux_revision_backports:
|
||||
raise RuntimeError("Can't upload to %s with a version of %s" %
|
||||
(distribution, version))
|
||||
|
||||
def write(self) -> None:
|
||||
super().write()
|
||||
self.write_tests_control()
|
||||
self.write_signed()
|
||||
|
||||
def write_signed(self) -> None:
|
||||
for bundle in self.bundles.values():
|
||||
pkg_sign_entries = {}
|
||||
|
||||
for p in bundle.packages.values():
|
||||
if not isinstance(p, BinaryPackage):
|
||||
continue
|
||||
|
||||
if pkg_sign_pkg := p.meta_sign_package:
|
||||
pkg_sign_entries[pkg_sign_pkg] = {
|
||||
'trusted_certs': [],
|
||||
'files': [
|
||||
{
|
||||
'sig_type': e.split(':', 1)[-1],
|
||||
'file': e.split(':', 1)[0],
|
||||
}
|
||||
for e in p.meta_sign_files
|
||||
],
|
||||
}
|
||||
|
||||
if pkg_sign_entries:
|
||||
with bundle.path('files.json').open('w') as f:
|
||||
json.dump({'packages': pkg_sign_entries}, f, indent=2)
|
||||
|
||||
def write_tests_control(self) -> None:
|
||||
with open("debian/tests/control", 'w') as f:
|
||||
write_deb822(self.tests_control, f)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
Gencontrol()()
|
109
debian/bin/genpatch-lockdown
vendored
Executable file
109
debian/bin/genpatch-lockdown
vendored
Executable file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import io
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def main(repo, range='torvalds/master..dhowells/efi-lock-down'):
|
||||
patch_dir = 'debian/patches'
|
||||
lockdown_patch_dir = 'features/all/lockdown'
|
||||
series_name = 'series'
|
||||
|
||||
# Only replace patches in this subdirectory and starting with a digit
|
||||
# - the others are presumably Debian-specific for now
|
||||
lockdown_patch_name_re = re.compile(
|
||||
r'^' + re.escape(lockdown_patch_dir) + r'/\d')
|
||||
series_before = []
|
||||
series_after = []
|
||||
|
||||
old_series = set()
|
||||
new_series = set()
|
||||
|
||||
try:
|
||||
with open(os.path.join(patch_dir, series_name), 'r') as series_fh:
|
||||
for line in series_fh:
|
||||
name = line.strip()
|
||||
if lockdown_patch_name_re.match(name):
|
||||
old_series.add(name)
|
||||
elif len(old_series) == 0:
|
||||
series_before.append(line)
|
||||
else:
|
||||
series_after.append(line)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
with open(os.path.join(patch_dir, series_name), 'w') as series_fh:
|
||||
for line in series_before:
|
||||
series_fh.write(line)
|
||||
|
||||
# Add directory prefix to all filenames.
|
||||
# Add Origin to all patch headers.
|
||||
def add_patch(name, source_patch, origin):
|
||||
name = os.path.join(lockdown_patch_dir, name)
|
||||
path = os.path.join(patch_dir, name)
|
||||
try:
|
||||
os.unlink(path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
with open(path, 'w') as patch:
|
||||
in_header = True
|
||||
for line in source_patch:
|
||||
if in_header and re.match(r'^(\n|[^\w\s]|Index:)', line):
|
||||
patch.write('Origin: %s\n' % origin)
|
||||
if line != '\n':
|
||||
patch.write('\n')
|
||||
in_header = False
|
||||
patch.write(line)
|
||||
series_fh.write(name)
|
||||
series_fh.write('\n')
|
||||
new_series.add(name)
|
||||
|
||||
# XXX No signature to verify
|
||||
|
||||
env = os.environ.copy()
|
||||
env['GIT_DIR'] = os.path.join(repo, '.git')
|
||||
args = ['git', 'format-patch', '--subject-prefix=', range]
|
||||
format_proc = subprocess.Popen(args,
|
||||
cwd=os.path.join(patch_dir,
|
||||
lockdown_patch_dir),
|
||||
env=env, stdout=subprocess.PIPE)
|
||||
with io.open(format_proc.stdout.fileno(), encoding='utf-8') as pipe:
|
||||
for line in pipe:
|
||||
name = line.strip('\n')
|
||||
with open(os.path.join(patch_dir, lockdown_patch_dir, name)) \
|
||||
as source_patch:
|
||||
patch_from = source_patch.readline()
|
||||
match = re.match(r'From ([0-9a-f]{40}) ', patch_from)
|
||||
assert match
|
||||
origin = ('https://git.kernel.org/pub/scm/linux/kernel/'
|
||||
'git/dhowells/linux-fs.git/commit?id=%s' %
|
||||
match.group(1))
|
||||
add_patch(name, source_patch, origin)
|
||||
|
||||
for line in series_after:
|
||||
series_fh.write(line)
|
||||
|
||||
for name in new_series:
|
||||
if name in old_series:
|
||||
old_series.remove(name)
|
||||
else:
|
||||
print('Added patch', os.path.join(patch_dir, name))
|
||||
|
||||
for name in old_series:
|
||||
print('Obsoleted patch', os.path.join(patch_dir, name))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if not (2 <= len(sys.argv) <= 3):
|
||||
sys.stderr.write('''\
|
||||
Usage: %s REPO [REVISION-RANGE]
|
||||
REPO is a git repo containing the REVISION-RANGE. The default range is
|
||||
torvalds/master..dhowells/efi-lock-down.
|
||||
''' % sys.argv[0])
|
||||
print('BASE is the base branch (default: torvalds/master).')
|
||||
sys.exit(2)
|
||||
main(*sys.argv[1:])
|
160
debian/bin/genpatch-rt
vendored
Executable file
160
debian/bin/genpatch-rt
vendored
Executable file
@@ -0,0 +1,160 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import argparse
|
||||
import io
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
def main(source, version, verify_signature):
|
||||
patch_dir = 'debian/patches-rt'
|
||||
series_name = 'series'
|
||||
old_series = set()
|
||||
new_series = set()
|
||||
|
||||
try:
|
||||
with open(os.path.join(patch_dir, series_name), 'r') as series_fh:
|
||||
for line in series_fh:
|
||||
name = line.strip()
|
||||
if name != '' and name[0] != '#':
|
||||
old_series.add(name)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
with open(os.path.join(patch_dir, series_name), 'w') as series_fh:
|
||||
# Add Origin to all patch headers.
|
||||
def add_patch(name, source_patch, origin):
|
||||
path = os.path.join(patch_dir, name)
|
||||
try:
|
||||
os.unlink(path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
with open(path, 'w') as patch:
|
||||
in_header = True
|
||||
for line in source_patch:
|
||||
if in_header and re.match(r'^(\n|[^\w\s]|Index:)', line):
|
||||
patch.write('Origin: %s\n' % origin)
|
||||
if line != '\n':
|
||||
patch.write('\n')
|
||||
in_header = False
|
||||
patch.write(line)
|
||||
new_series.add(name)
|
||||
|
||||
if os.path.isdir(os.path.join(source, '.git')):
|
||||
# Export rebased branch from stable-rt git as patch series
|
||||
up_ver = re.sub(r'-rt\d+$', '', version)
|
||||
env = os.environ.copy()
|
||||
env['GIT_DIR'] = os.path.join(source, '.git')
|
||||
env['DEBIAN_KERNEL_KEYRING'] = 'rt-signing-key.pgp'
|
||||
|
||||
if verify_signature:
|
||||
# Validate tag signature
|
||||
gpg_wrapper = os.path.join(os.getcwd(),
|
||||
"debian/bin/git-tag-gpg-wrapper")
|
||||
verify_proc = subprocess.Popen(
|
||||
['git', '-c', 'gpg.program=%s' % gpg_wrapper,
|
||||
'tag', '-v', 'v%s-rebase' % version],
|
||||
env=env)
|
||||
if verify_proc.wait():
|
||||
raise RuntimeError("GPG tag verification failed")
|
||||
|
||||
args = ['git', 'format-patch',
|
||||
'v%s..v%s-rebase' % (up_ver, version)]
|
||||
format_proc = subprocess.Popen(args,
|
||||
cwd=patch_dir,
|
||||
env=env, stdout=subprocess.PIPE)
|
||||
with io.open(format_proc.stdout.fileno(), encoding='utf-8') \
|
||||
as pipe:
|
||||
for line in pipe:
|
||||
name = line.strip('\n')
|
||||
with open(os.path.join(patch_dir, name)) as source_patch:
|
||||
patch_from = source_patch.readline()
|
||||
match = re.match(r'From ([0-9a-f]{40}) ', patch_from)
|
||||
assert match
|
||||
origin = ('https://git.kernel.org/cgit/linux/kernel/'
|
||||
'git/rt/linux-stable-rt.git/commit?id=%s' %
|
||||
match.group(1))
|
||||
add_patch(name, source_patch, origin)
|
||||
series_fh.write(line)
|
||||
|
||||
else:
|
||||
# Get version and upstream version
|
||||
if version is None:
|
||||
match = re.search(r'(?:^|/)patches-(.+)\.tar\.[gx]z$', source)
|
||||
assert match, 'no version specified or found in filename'
|
||||
version = match.group(1)
|
||||
match = re.match(r'^(\d+\.\d+)(?:\.\d+|-rc\d+)?-rt\d+$', version)
|
||||
assert match, 'could not parse version string'
|
||||
up_ver = match.group(1)
|
||||
|
||||
if verify_signature:
|
||||
# Expect an accompanying signature, and validate it
|
||||
source_sig = re.sub(r'.[gx]z$', '.sign', source)
|
||||
unxz_proc = subprocess.Popen(['xzcat', source],
|
||||
stdout=subprocess.PIPE)
|
||||
verify_output = subprocess.check_output(
|
||||
['gpgv', '--status-fd', '1',
|
||||
'--keyring', 'debian/upstream/rt-signing-key.pgp',
|
||||
'--ignore-time-conflict', source_sig, '-'],
|
||||
stdin=unxz_proc.stdout,
|
||||
text=True)
|
||||
if unxz_proc.wait() or \
|
||||
not re.search(r'^\[GNUPG:\]\s+VALIDSIG\s',
|
||||
verify_output, re.MULTILINE):
|
||||
sys.stderr.write(verify_output)
|
||||
raise RuntimeError("GPG signature verification failed")
|
||||
|
||||
temp_dir = tempfile.mkdtemp(prefix='rt-genpatch', dir='debian')
|
||||
try:
|
||||
# Unpack tarball
|
||||
subprocess.check_call(['tar', '-C', temp_dir, '-xaf', source])
|
||||
source_dir = os.path.join(temp_dir, 'patches')
|
||||
assert os.path.isdir(source_dir), \
|
||||
'tarball does not contain patches directory'
|
||||
|
||||
# Copy patch series
|
||||
origin = ('https://www.kernel.org/pub/linux/kernel/projects/'
|
||||
'rt/%s/older/patches-%s.tar.xz' %
|
||||
(up_ver, version))
|
||||
with open(os.path.join(source_dir, 'series'), 'r') \
|
||||
as source_series_fh:
|
||||
for line in source_series_fh:
|
||||
name = line.strip()
|
||||
if name != '' and name[0] != '#':
|
||||
with open(os.path.join(source_dir, name)) \
|
||||
as source_patch:
|
||||
add_patch(name, source_patch, origin)
|
||||
series_fh.write(line)
|
||||
finally:
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
for name in new_series:
|
||||
if name in old_series:
|
||||
old_series.remove(name)
|
||||
else:
|
||||
print('Added patch', os.path.join(patch_dir, name))
|
||||
|
||||
for name in old_series:
|
||||
print('Obsoleted patch', os.path.join(patch_dir, name))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Generate or update the rt featureset patch series')
|
||||
parser.add_argument(
|
||||
'source', metavar='SOURCE', type=str,
|
||||
help='tarball of patches or git repo containing the given RT-VERSION')
|
||||
parser.add_argument(
|
||||
'version', metavar='RT-VERSION', type=str, nargs='?',
|
||||
help='rt kernel version (optional for tarballs)')
|
||||
parser.add_argument(
|
||||
'--verify-signature', action=argparse.BooleanOptionalAction,
|
||||
default=True,
|
||||
help='verify signature on tarball (detached in .sign file) or git tag')
|
||||
args = parser.parse_args()
|
||||
main(args.source, args.version, args.verify_signature)
|
42
debian/bin/git-tag-gpg-wrapper
vendored
Executable file
42
debian/bin/git-tag-gpg-wrapper
vendored
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
# Instead of calling gpg, call gpgv and provide a local keyring
|
||||
|
||||
debian_dir="$(readlink -f "$(dirname "$0")/..")"
|
||||
|
||||
# Parse the expected options. If the next two lines are combined, a
|
||||
# failure of getopt won't cause the script to exit.
|
||||
ordered_args="$(getopt -n "$0" -o "" -l "status-fd:" -l "keyid-format:" -l "verify" -- "$@")"
|
||||
eval "set -- $ordered_args"
|
||||
gpgv_opts=()
|
||||
while true; do
|
||||
case "$1" in
|
||||
--status-fd)
|
||||
gpgv_opts+=(--status-fd $2)
|
||||
shift 2
|
||||
;;
|
||||
--keyid-format)
|
||||
# ignore
|
||||
shift 2
|
||||
;;
|
||||
--verify)
|
||||
# ignore
|
||||
shift 1
|
||||
;;
|
||||
--)
|
||||
shift 1
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
keyring="$debian_dir/upstream/${DEBIAN_KERNEL_KEYRING:-signing-key.asc}"
|
||||
case "$keyring" in
|
||||
*.asc)
|
||||
keyring_armored="$keyring"
|
||||
keyring="$(mktemp)"
|
||||
trap 'rm -f "$keyring"' EXIT
|
||||
gpg --dearmor <"$keyring_armored" > "$keyring"
|
||||
;;
|
||||
esac
|
||||
gpgv "${gpgv_opts[@]}" --keyring "$keyring" -- "$@"
|
39
debian/bin/kconfig.py
vendored
Executable file
39
debian/bin/kconfig.py
vendored
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import optparse
|
||||
import re
|
||||
|
||||
from debian_linux.kconfig import KconfigFile
|
||||
|
||||
|
||||
def merge(output, configs, overrides):
|
||||
kconfig = KconfigFile()
|
||||
for c in configs:
|
||||
kconfig.read(open(c))
|
||||
for key, value in overrides.items():
|
||||
kconfig.set(key, value)
|
||||
open(output, "w").write(str(kconfig))
|
||||
|
||||
|
||||
def opt_callback_dict(option, opt, value, parser):
|
||||
match = re.match(r'^\s*(\S+)=(\S+)\s*$', value)
|
||||
if not match:
|
||||
raise optparse.OptionValueError('not key=value')
|
||||
dest = option.dest
|
||||
data = getattr(parser.values, dest)
|
||||
data[match.group(1)] = match.group(2)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = optparse.OptionParser(usage="%prog [OPTION]... FILE...")
|
||||
parser.add_option(
|
||||
'-o', '--override',
|
||||
action='callback',
|
||||
callback=opt_callback_dict,
|
||||
default={},
|
||||
dest='overrides',
|
||||
help="Override option",
|
||||
type='string')
|
||||
options, args = parser.parse_args()
|
||||
|
||||
merge(args[0], args[1:], options.overrides)
|
18
debian/bin/no-depmod
vendored
Executable file
18
debian/bin/no-depmod
vendored
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
# This is a dummy substitute for depmod. Since we run depmod during
|
||||
# postinst, we do not need or want to package the files that it
|
||||
# generates.
|
||||
|
||||
if [ "x$1" = x-V ]; then
|
||||
# Satisfy version test
|
||||
echo 'not really module-init-tools'
|
||||
elif [ "x$1" = x-b -a "${2%/depmod.??????}" != "$2" ]; then
|
||||
# Satisfy test of short kernel versions
|
||||
mkdir -p "$2/lib/modules/$3"
|
||||
touch "$2/lib/modules/$3/modules.dep"
|
||||
else
|
||||
echo 'skipping depmod'
|
||||
fi
|
135
debian/bin/stable-update
vendored
Executable file
135
debian/bin/stable-update
vendored
Executable file
@@ -0,0 +1,135 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from debian_linux.debian import Changelog, VersionLinux
|
||||
|
||||
|
||||
def base_version(ver):
|
||||
# Assume base version is at least 3.0, thus only 2 components wanted
|
||||
match = re.match(r'^(\d+\.\d+)', ver)
|
||||
assert match
|
||||
return match.group(1)
|
||||
|
||||
|
||||
def add_update(ver, inc):
|
||||
base = base_version(ver)
|
||||
if base == ver:
|
||||
update = 0
|
||||
else:
|
||||
update = int(ver[len(base)+1:])
|
||||
update += inc
|
||||
if update == 0:
|
||||
return base
|
||||
else:
|
||||
return '{}.{}'.format(base, update)
|
||||
|
||||
|
||||
def next_update(ver):
|
||||
return add_update(ver, 1)
|
||||
|
||||
|
||||
def print_stable_log(log, cur_ver, new_ver):
|
||||
major_ver = re.sub(r'^(\d+)\..*', r'\1', cur_ver)
|
||||
while cur_ver != new_ver:
|
||||
next_ver = next_update(cur_ver)
|
||||
print(' https://www.kernel.org/pub/linux/kernel/v{}.x/ChangeLog-{}'
|
||||
.format(major_ver, next_ver),
|
||||
file=log)
|
||||
log.flush() # serialise our output with git's
|
||||
subprocess.check_call(['git', 'log', '--reverse',
|
||||
'--pretty= - %s',
|
||||
'v{}..v{}^'.format(cur_ver, next_ver)],
|
||||
stdout=log)
|
||||
cur_ver = next_ver
|
||||
|
||||
|
||||
def main(repo, new_ver):
|
||||
if os.path.exists(os.path.join(repo, '.git')):
|
||||
os.environ['GIT_DIR'] = os.path.join(repo, '.git')
|
||||
else:
|
||||
os.environ['GIT_DIR'] = repo
|
||||
|
||||
changelog = Changelog(version=VersionLinux)
|
||||
cur_pkg_ver = changelog[0].version
|
||||
cur_ver = cur_pkg_ver.linux_upstream_full
|
||||
|
||||
if base_version(new_ver) != base_version(cur_ver):
|
||||
print('{} is not on the same stable series as {}'
|
||||
.format(new_ver, cur_ver),
|
||||
file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
new_pkg_ver = new_ver + '-1'
|
||||
if cur_pkg_ver.linux_revision_experimental:
|
||||
new_pkg_ver += '~exp1'
|
||||
|
||||
# Three possible cases:
|
||||
# 1. The current version has been released so we need to add a new
|
||||
# version to the changelog.
|
||||
# 2. The current version has not been released so we're changing its
|
||||
# version string.
|
||||
# (a) There are no stable updates included in the current version,
|
||||
# so we need to insert an introductory line, the URL(s) and
|
||||
# git log(s) and a blank line at the top.
|
||||
# (b) One or more stable updates are already included in the current
|
||||
# version, so we need to insert the URL(s) and git log(s) after
|
||||
# them.
|
||||
|
||||
changelog_intro = 'New upstream stable update:'
|
||||
|
||||
# Case 1
|
||||
if changelog[0].distribution != 'UNRELEASED':
|
||||
subprocess.check_call(['dch', '-v', new_pkg_ver, '-D', 'UNRELEASED',
|
||||
changelog_intro])
|
||||
|
||||
with open('debian/changelog', 'r') as old_log:
|
||||
with open('debian/changelog.new', 'w') as new_log:
|
||||
line_no = 0
|
||||
inserted = False
|
||||
intro_line = ' * {}\n'.format(changelog_intro)
|
||||
|
||||
for line in old_log:
|
||||
line_no += 1
|
||||
|
||||
# Case 2
|
||||
if changelog[0].distribution == 'UNRELEASED' and line_no == 1:
|
||||
print('{} ({}) UNRELEASED; urgency={}'
|
||||
.format(changelog[0].source, new_pkg_ver,
|
||||
changelog[0].urgency),
|
||||
file=new_log)
|
||||
continue
|
||||
|
||||
if not inserted:
|
||||
# Case 2(a)
|
||||
if line_no == 3 and line != intro_line:
|
||||
new_log.write(intro_line)
|
||||
print_stable_log(new_log, cur_ver, new_ver)
|
||||
new_log.write('\n')
|
||||
inserted = True
|
||||
# Case 1 or 2(b)
|
||||
elif line_no > 3 and line == '\n':
|
||||
print_stable_log(new_log, cur_ver, new_ver)
|
||||
inserted = True
|
||||
|
||||
# Check that we inserted before hitting the end of the
|
||||
# first version entry
|
||||
assert not (line.startswith(' -- ') and not inserted)
|
||||
|
||||
new_log.write(line)
|
||||
|
||||
os.rename('debian/changelog.new', 'debian/changelog')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) != 3:
|
||||
print('''\
|
||||
Usage: {} REPO VERSION
|
||||
REPO is the git repository to generate a changelog from
|
||||
VERSION is the stable version (without leading v)'''.format(sys.argv[0]),
|
||||
file=sys.stderr)
|
||||
sys.exit(2)
|
||||
main(*sys.argv[1:])
|
2
debian/bin/stable-update.sh
vendored
Executable file
2
debian/bin/stable-update.sh
vendored
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/sh -e
|
||||
exec "$(dirname "$0")/stable-update" "$@"
|
140
debian/bin/test-patches
vendored
Executable file
140
debian/bin/test-patches
vendored
Executable file
@@ -0,0 +1,140 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
shopt -s extglob
|
||||
|
||||
# Set defaults from the running kernel
|
||||
arch="$(dpkg --print-architecture)"
|
||||
kernelabi="$(uname -r)"
|
||||
ff="${kernelabi##+([^-])-?(@(trunk|?(rc)+([0-9])|0.@(bpo|deb+([0-9])).+([0-9]))-)}"
|
||||
if [ "x$ff" != "x$kernelabi" ]; then
|
||||
flavour="${ff#rt-}"
|
||||
if [ "x$flavour" != "x$ff" ]; then
|
||||
featureset="${ff%-$flavour}"
|
||||
else
|
||||
featureset=none
|
||||
fi
|
||||
else
|
||||
flavour=
|
||||
featureset=none
|
||||
fi
|
||||
|
||||
dbginfo=
|
||||
fuzz=0
|
||||
jobs=$(nproc)
|
||||
|
||||
eval "set -- $(getopt -n "$0" -o "f:gj:s:" -l "fuzz:" -- "$@")"
|
||||
while true; do
|
||||
case "$1" in
|
||||
-f) flavour="$2"; shift 2 ;;
|
||||
-g) dbginfo=y; shift 1 ;;
|
||||
-j) jobs="$2"; shift 2 ;;
|
||||
-s) featureset="$2"; shift 2 ;;
|
||||
--fuzz) fuzz="$2"; shift 2;;
|
||||
--) shift 1; break ;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ $# -lt 1 ]; then
|
||||
echo >&2 "Usage: $0 [<options>] <patch>..."
|
||||
cat >&2 <<EOF
|
||||
Options:
|
||||
-f <flavour> specify the 'flavour' of kernel to build, e.g. 686-pae
|
||||
-g enable debug info
|
||||
-j <jobs> specify number of compiler jobs to run in parallel
|
||||
(default: number of available processors)
|
||||
-s <featureset> specify an optional featureset to apply, e.g. rt
|
||||
--fuzz <num> set the maximum patch fuzz factor (default: 0)
|
||||
EOF
|
||||
exit 2
|
||||
fi
|
||||
|
||||
if [ -z "$flavour" ]; then
|
||||
echo >&2 "You must specify a flavour to build with the -f option"
|
||||
exit 2
|
||||
fi
|
||||
|
||||
profiles=nodoc,noudeb,pkg.linux.nosource,pkg.linux.mintools
|
||||
if [ -z "$dbginfo" ]; then
|
||||
profiles="$profiles,pkg.linux.nokerneldbg,pkg.linux.nokerneldbginfo"
|
||||
fi
|
||||
|
||||
# Check build-dependencies early if possible
|
||||
if [ -f debian/control ]; then
|
||||
dpkg-checkbuilddeps -P"$profiles"
|
||||
fi
|
||||
|
||||
# Append 'a~test' to Debian version; this should be less than any official
|
||||
# successor and easily recognisable
|
||||
version="$(dpkg-parsechangelog | sed 's/^Version: //; t; d')"
|
||||
if [ "${version%a~test}" = "$version" ]; then
|
||||
version="$version"a~test
|
||||
dch -v "$version" --distribution UNRELEASED "Testing patches $*"
|
||||
fi
|
||||
|
||||
# Ignore user's .quiltrc
|
||||
alias quilt='quilt --quiltrc -'
|
||||
|
||||
# Try to clean up any previous test patches
|
||||
if [ "$featureset" = none ]; then
|
||||
patchdir=debian/patches
|
||||
while patch="$(quilt top 2>/dev/null)" && \
|
||||
[ "${patch#test/}" != "$patch" ]; do
|
||||
quilt pop -f
|
||||
done
|
||||
while patch="$(quilt next 2>/dev/null)" && \
|
||||
[ "${patch#test/}" != "$patch" ]; do
|
||||
quilt delete -r "$patch"
|
||||
done
|
||||
else
|
||||
patchdir=debian/patches-${featureset}
|
||||
sed -i '/^test\//d' $patchdir/series
|
||||
fi
|
||||
|
||||
# Prepare a new directory for the patches
|
||||
rm -rf $patchdir/test/
|
||||
mkdir $patchdir/test
|
||||
|
||||
# Prepare a new directory for the config; override ABI name, featuresets, flavours
|
||||
rm -rf debian/config.local
|
||||
mkdir debian/config.local debian/config.local/"$arch"
|
||||
for other_fs in none rt; do
|
||||
if [ "$other_fs" != "$featureset" ]; then
|
||||
cat >debian/config.local/defines.toml <<EOF
|
||||
[[featureset]]
|
||||
name = '$other_fs'
|
||||
enable = false
|
||||
EOF
|
||||
fi
|
||||
done
|
||||
cat >debian/config.local/"$arch"/defines.toml <<EOF
|
||||
[[featureset]]
|
||||
name = '$featureset'
|
||||
|
||||
[[featureset.flavour]]
|
||||
name = '$flavour'
|
||||
EOF
|
||||
|
||||
# Regenerate control and included rules
|
||||
rm -f debian/control debian/rules.gen
|
||||
debian/rules debian/control-real && exit 1 || true
|
||||
test -f debian/control
|
||||
test -f debian/rules.gen
|
||||
|
||||
# Check build-dependencies now that we know debian/control exists
|
||||
dpkg-checkbuilddeps -P"$profiles"
|
||||
|
||||
# Clean up old build; apply existing patches for featureset
|
||||
debian/rules clean
|
||||
debian/rules source
|
||||
|
||||
# Apply the additional patches
|
||||
for patch in "$@"; do
|
||||
patch_abs="$(readlink -f "$patch")"
|
||||
(cd "debian/build/source_${featureset}" && \
|
||||
quilt import -P "test/$(basename "$patch")" "$patch_abs" && \
|
||||
quilt push --fuzz="$fuzz")
|
||||
done
|
||||
|
||||
# Build selected binaries
|
||||
dpkg-buildpackage -b -P"$profiles" -j"$jobs" -nc -uc
|
24
debian/bin/update-bug-taint-list
vendored
Executable file
24
debian/bin/update-bug-taint-list
vendored
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/bin/sh -eu
|
||||
|
||||
temp="$(mktemp)"
|
||||
trap 'rm -f "$temp"' EXIT
|
||||
|
||||
# Copy everything above the existing flag checks.
|
||||
sed -rne '/^ +_check /q; p' \
|
||||
< debian/templates/image.bug/include-1tainted >"$temp"
|
||||
|
||||
# Generate flag checks from the table in tainted-kernels.rst. We
|
||||
# could alternatively extract them from sysctl/kernel.rst or in the C
|
||||
# sources, but this is easy to find and parse and is likely to have
|
||||
# the most useful descriptions.
|
||||
sed -rne '/^Bit +Log +Number +Reason/,/^$/ {
|
||||
s/^ *([0-9]+) +.\/(.) +[0-9]+ +(.*)/ _check \1 \2 '\''\3'\''/p
|
||||
}' \
|
||||
< Documentation/admin-guide/tainted-kernels.rst >>"$temp"
|
||||
|
||||
# Copy everything below the existing flag checks.
|
||||
sed -rne '/^ +echo "\*\* Tainted:/,$p' \
|
||||
< debian/templates/image.bug/include-1tainted >>"$temp"
|
||||
|
||||
# Update the bug script in-place.
|
||||
cp "$temp" debian/templates/image.bug/include-1tainted
|
Reference in New Issue
Block a user