Compare commits
5 Commits
31231013eb
...
18d03694bf
Author | SHA1 | Date |
---|---|---|
Federico Giovanardi | 18d03694bf | |
Daan De Meyer | 4a346b779a | |
Yu Watanabe | 0e42004f3e | |
Luca Boccassi | 6fd3496cfd | |
Daan De Meyer | bb486fe9df |
|
@ -38,9 +38,8 @@ SignExpectedPcr=yes
|
|||
|
||||
[Content]
|
||||
ExtraTrees=
|
||||
mkosi.extra.common
|
||||
mkosi.crt:/usr/lib/verity.d/mkosi.crt # sysext verification key
|
||||
mkosi.leak-sanitizer-suppressions:/usr/lib/systemd/leak-sanitizer-suppressions
|
||||
mkosi.coredump-journal-storage.conf:/usr/lib/systemd/coredump.conf.d/10-coredump-journal-storage.conf
|
||||
%O/minimal-0.root-%a.raw:/usr/share/minimal_0.raw
|
||||
%O/minimal-0.root-%a-verity.raw:/usr/share/minimal_0.verity
|
||||
%O/minimal-0.root-%a-verity-sig.raw:/usr/share/minimal_0.verity.sig
|
||||
|
|
|
@ -6,9 +6,7 @@ Include=
|
|||
%D/mkosi.sanitizers
|
||||
|
||||
[Content]
|
||||
ExtraTrees=
|
||||
%D/mkosi.leak-sanitizer-suppressions:/usr/lib/systemd/leak-sanitizer-suppressions
|
||||
%D/mkosi.coredump-journal-storage.conf:/usr/lib/systemd/coredump.conf.d/10-coredump-journal-storage.conf
|
||||
ExtraTrees=%D/mkosi.extra.common
|
||||
|
||||
Packages=
|
||||
findutils
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
integration_tests += [
|
||||
integration_test_template + {
|
||||
'name' : fs.name(meson.current_source_dir()),
|
||||
'coredump-exclude-regex' : '/(bash|python3.[0-9]+|systemd-executor)$',
|
||||
'cmdline' : integration_test_template['cmdline'] + [
|
||||
'''
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ integration_tests += [
|
|||
integration_test_template + {
|
||||
'name' : fs.name(meson.current_source_dir()),
|
||||
'unit' : files('TEST-16-EXTEND-TIMEOUT.service'),
|
||||
'coredump-exclude-regex' : '/(bash|sleep)$',
|
||||
},
|
||||
]
|
||||
|
||||
|
|
|
@ -4,5 +4,6 @@ integration_tests += [
|
|||
integration_test_template + {
|
||||
'name' : fs.name(meson.current_source_dir()),
|
||||
'vm' : true,
|
||||
'coredump-exclude-regex' : '/(sleep|udevadm)$',
|
||||
},
|
||||
]
|
||||
|
|
|
@ -3,5 +3,6 @@
|
|||
integration_tests += [
|
||||
integration_test_template + {
|
||||
'name' : fs.name(meson.current_source_dir()),
|
||||
'coredump-exclude-regex' : '/(sleep|bash|systemd-notify)$',
|
||||
},
|
||||
]
|
||||
|
|
|
@ -4,5 +4,7 @@ integration_tests += [
|
|||
integration_test_template + {
|
||||
'name' : fs.name(meson.current_source_dir()),
|
||||
'priority' : 10,
|
||||
# TODO: Remove when https://github.com/systemd/systemd/issues/35335 is fixed.
|
||||
'coredump-exclude-regex' : '/systemd-localed',
|
||||
},
|
||||
]
|
||||
|
|
|
@ -5,6 +5,7 @@ integration_tests += [
|
|||
'name' : fs.name(meson.current_source_dir()),
|
||||
'storage': 'persistent',
|
||||
'vm' : true,
|
||||
'coredump-exclude-regex' : '/(test-usr-dump|test-dump|bash)$',
|
||||
},
|
||||
]
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
|
@ -32,6 +33,59 @@ ExecStart=false
|
|||
"""
|
||||
|
||||
|
||||
def process_coredumps(args: argparse.Namespace, journal_file: Path) -> bool:
|
||||
# Collect executable paths of all coredumps and filter out the expected ones.
|
||||
|
||||
if args.coredump_exclude_regex:
|
||||
exclude_regex = re.compile(args.coredump_exclude_regex)
|
||||
else:
|
||||
exclude_regex = None
|
||||
|
||||
result = subprocess.run(
|
||||
[
|
||||
args.mkosi,
|
||||
'--directory', os.fspath(args.meson_source_dir),
|
||||
'--extra-search-path', os.fspath(args.meson_build_dir),
|
||||
'sandbox',
|
||||
'coredumpctl',
|
||||
'--file', journal_file,
|
||||
'--json=short',
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
) # fmt: skip
|
||||
|
||||
# coredumpctl returns a non-zero exit status if there are no coredumps.
|
||||
if result.returncode != 0:
|
||||
return False
|
||||
|
||||
coredumps = json.loads(result.stdout)
|
||||
|
||||
coredumps = [
|
||||
coredump for coredump in coredumps if not exclude_regex or not exclude_regex.search(coredump['exe'])
|
||||
]
|
||||
|
||||
if not coredumps:
|
||||
return False
|
||||
|
||||
subprocess.run(
|
||||
[
|
||||
args.mkosi,
|
||||
'--directory', os.fspath(args.meson_source_dir),
|
||||
'--extra-search-path', os.fspath(args.meson_build_dir),
|
||||
'sandbox',
|
||||
'coredumpctl',
|
||||
'--file', journal_file,
|
||||
'--no-pager',
|
||||
'info',
|
||||
*(coredump['exe'] for coredump in coredumps),
|
||||
],
|
||||
check=True,
|
||||
) # fmt: skip
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument('--mkosi', required=True)
|
||||
|
@ -44,6 +98,7 @@ def main() -> None:
|
|||
parser.add_argument('--slow', action=argparse.BooleanOptionalAction)
|
||||
parser.add_argument('--vm', action=argparse.BooleanOptionalAction)
|
||||
parser.add_argument('--exit-code', required=True, type=int)
|
||||
parser.add_argument('--coredump-exclude-regex', required=True)
|
||||
parser.add_argument('mkosi_args', nargs='*')
|
||||
args = parser.parse_args()
|
||||
|
||||
|
@ -114,7 +169,9 @@ def main() -> None:
|
|||
"""
|
||||
)
|
||||
|
||||
journal_file = None
|
||||
journal_file = (args.meson_build_dir / (f'test/journal/{name}.journal')).absolute()
|
||||
journal_file.unlink(missing_ok=True)
|
||||
|
||||
if not sys.stderr.isatty():
|
||||
dropin += textwrap.dedent(
|
||||
"""
|
||||
|
@ -122,9 +179,6 @@ def main() -> None:
|
|||
FailureAction=exit
|
||||
"""
|
||||
)
|
||||
|
||||
journal_file = (args.meson_build_dir / (f'test/journal/{name}.journal')).absolute()
|
||||
journal_file.unlink(missing_ok=True)
|
||||
elif not shell:
|
||||
dropin += textwrap.dedent(
|
||||
"""
|
||||
|
@ -194,44 +248,42 @@ def main() -> None:
|
|||
)
|
||||
exit(77)
|
||||
|
||||
if journal_file and (
|
||||
keep_journal == '0' or (result.returncode in (args.exit_code, 77) and keep_journal == 'fail')
|
||||
coredumps = process_coredumps(args, journal_file)
|
||||
|
||||
if keep_journal == '0' or (
|
||||
keep_journal == 'fail' and result.returncode in (args.exit_code, 77) and not coredumps
|
||||
):
|
||||
journal_file.unlink(missing_ok=True)
|
||||
|
||||
if shell or result.returncode in (args.exit_code, 77):
|
||||
if shell or (result.returncode in (args.exit_code, 77) and not coredumps):
|
||||
exit(0 if shell or result.returncode == args.exit_code else 77)
|
||||
|
||||
if journal_file:
|
||||
ops = []
|
||||
ops = []
|
||||
|
||||
if os.getenv('GITHUB_ACTIONS'):
|
||||
id = os.environ['GITHUB_RUN_ID']
|
||||
iteration = os.environ['GITHUB_RUN_ATTEMPT']
|
||||
j = json.loads(
|
||||
subprocess.run(
|
||||
[
|
||||
args.mkosi,
|
||||
'--directory', os.fspath(args.meson_source_dir),
|
||||
'--json',
|
||||
'summary',
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
).stdout
|
||||
) # fmt: skip
|
||||
distribution = j['Images'][-1]['Distribution']
|
||||
release = j['Images'][-1]['Release']
|
||||
artifact = f'ci-mkosi-{id}-{iteration}-{distribution}-{release}-failed-test-journals'
|
||||
ops += [f'gh run download {id} --name {artifact} -D ci/{artifact}']
|
||||
journal_file = Path(f'ci/{artifact}/test/journal/{name}.journal')
|
||||
if os.getenv('GITHUB_ACTIONS'):
|
||||
id = os.environ['GITHUB_RUN_ID']
|
||||
iteration = os.environ['GITHUB_RUN_ATTEMPT']
|
||||
j = json.loads(
|
||||
subprocess.run(
|
||||
[
|
||||
args.mkosi,
|
||||
'--directory', os.fspath(args.meson_source_dir),
|
||||
'--json',
|
||||
'summary',
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
).stdout
|
||||
) # fmt: skip
|
||||
distribution = j['Images'][-1]['Distribution']
|
||||
release = j['Images'][-1]['Release']
|
||||
artifact = f'ci-mkosi-{id}-{iteration}-{distribution}-{release}-failed-test-journals'
|
||||
ops += [f'gh run download {id} --name {artifact} -D ci/{artifact}']
|
||||
journal_file = Path(f'ci/{artifact}/test/journal/{name}.journal')
|
||||
|
||||
ops += [f'journalctl --file {journal_file} --no-hostname -o short-monotonic -u {args.unit} -p info']
|
||||
ops += [f'journalctl --file {journal_file} --no-hostname -o short-monotonic -u {args.unit} -p info']
|
||||
|
||||
print(
|
||||
"Test failed, relevant logs can be viewed with: \n\n" f"{(' && '.join(ops))}\n",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print("Test failed, relevant logs can be viewed with: \n\n" f"{(' && '.join(ops))}\n", file=sys.stderr)
|
||||
|
||||
# 0 also means we failed so translate that to a non-zero exit code to mark the test as failed.
|
||||
exit(result.returncode or 1)
|
||||
|
|
|
@ -297,6 +297,7 @@ integration_test_template = {
|
|||
'qemu-args' : [],
|
||||
'exit-code' : 123,
|
||||
'vm' : false,
|
||||
'coredump-exclude-regex' : '',
|
||||
}
|
||||
testdata_subdirs = [
|
||||
'auxv',
|
||||
|
@ -391,6 +392,7 @@ foreach integration_test : integration_tests
|
|||
'--storage', integration_test['storage'],
|
||||
'--firmware', integration_test['firmware'],
|
||||
'--exit-code', integration_test['exit-code'].to_string(),
|
||||
'--coredump-exclude-regex', integration_test['coredump-exclude-regex'],
|
||||
]
|
||||
|
||||
if 'unit' in integration_test
|
||||
|
|
|
@ -248,6 +248,7 @@ Bridge=mybridge
|
|||
[Match]
|
||||
Name=mybridge
|
||||
[Network]
|
||||
IPv6AcceptRA=no
|
||||
DNS=192.168.250.1
|
||||
Address=192.168.250.33/24
|
||||
Gateway=192.168.250.1
|
||||
|
@ -540,6 +541,7 @@ MACAddress=12:34:56:78:9a:bc
|
|||
[Match]
|
||||
Name=dummy0
|
||||
[Network]
|
||||
IPv6AcceptRA=no
|
||||
Address=192.168.42.100/24
|
||||
DNS=192.168.42.1
|
||||
Domains= ~company
|
||||
|
@ -573,6 +575,7 @@ MACAddress=12:34:56:78:9a:bc
|
|||
self.write_network('50-myvpn.network', '''[Match]
|
||||
Name=dummy0
|
||||
[Network]
|
||||
IPv6AcceptRA=no
|
||||
Address=192.168.42.100/24
|
||||
DNS=192.168.42.1
|
||||
Domains= ~company ~.
|
||||
|
@ -927,6 +930,7 @@ cat <<EOF >/run/systemd/network/50-test.network
|
|||
Name={ifr}
|
||||
|
||||
[Network]
|
||||
IPv6AcceptRA=no
|
||||
Address=192.168.5.1/24
|
||||
{addr6}
|
||||
DHCPServer=yes
|
||||
|
@ -1006,6 +1010,7 @@ MACAddress=12:34:56:78:9a:bc
|
|||
[Match]
|
||||
Name=dummy0
|
||||
[Network]
|
||||
IPv6AcceptRA=no
|
||||
Address=192.168.42.100/24
|
||||
DNS=192.168.42.1
|
||||
Domains= one two three four five six seven eight nine ten
|
||||
|
@ -1035,6 +1040,7 @@ MACAddress=12:34:56:78:9a:bc
|
|||
[Match]
|
||||
Name=dummy0
|
||||
[Network]
|
||||
IPv6AcceptRA=no
|
||||
Address=192.168.42.100/24
|
||||
DNS=192.168.42.1
|
||||
''')
|
||||
|
@ -1107,7 +1113,12 @@ class MatchClientTest(unittest.TestCase, NetworkdTestingUtilities):
|
|||
def test_basic_matching(self):
|
||||
"""Verify the Name= line works throughout this class."""
|
||||
self.add_veth_pair('test_if1', 'fake_if2')
|
||||
self.write_network('50-test.network', "[Match]\nName=test_*\n[Network]")
|
||||
self.write_network('50-test.network', '''\
|
||||
[Match]
|
||||
Name=test_*
|
||||
[Network]
|
||||
IPv6AcceptRA=no
|
||||
''')
|
||||
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
|
||||
self.assert_link_states(test_if1='managed', fake_if2='unmanaged')
|
||||
|
||||
|
@ -1118,11 +1129,13 @@ class MatchClientTest(unittest.TestCase, NetworkdTestingUtilities):
|
|||
mac = '00:01:02:03:98:99'
|
||||
self.add_veth_pair('test_veth', 'test_peer',
|
||||
['addr', mac], ['addr', mac])
|
||||
self.write_network('50-no-veth.network', """\
|
||||
self.write_network('50-no-veth.network', '''\
|
||||
[Match]
|
||||
MACAddress={}
|
||||
Name=!nonexistent *peer*
|
||||
[Network]""".format(mac))
|
||||
[Network]
|
||||
IPv6AcceptRA=no
|
||||
'''.format(mac))
|
||||
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
|
||||
self.assert_link_states(test_veth='managed', test_peer='unmanaged')
|
||||
|
||||
|
|
|
@ -6,6 +6,14 @@ set -o pipefail
|
|||
# shellcheck source=test/units/test-control.sh
|
||||
. "$(dirname "$0")"/test-control.sh
|
||||
|
||||
if systemd-detect-virt --quiet --container; then
|
||||
# This comes from the selinux package and tries to write
|
||||
# some files under sysfs, which will be read-only in a container,
|
||||
# so mask it. It's not our tmpfiles.d file anyway.
|
||||
mkdir -p /run/tmpfiles.d/
|
||||
ln -s /dev/null /run/tmpfiles.d/selinux-policy.conf
|
||||
fi
|
||||
|
||||
run_subtests
|
||||
|
||||
touch /testok
|
||||
|
|
Loading…
Reference in New Issue