Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add scaling tests #140

Merged
merged 19 commits into from
Aug 21, 2024

scale: redo the test to avoid assuming resolution

31dbc8a
Select commit
Loading
Failed to load commit list.
Sign in for the full log view
Merged

Add scaling tests #140

scale: redo the test to avoid assuming resolution
31dbc8a
Select commit
Loading
Failed to load commit list.
GitHub Actions / Test results failed Aug 21, 2024 in 0s

224 passed, 12 failed and 10 skipped

Tests failed

Report Passed Failed Skipped Time
test-results-confined_shell-beta/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-confined_shell-.xml 17✅ 2⚪ 133s
test-results-confined_shell-edge/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-confined_shell-.xml 17✅ 2⚪ 130s
test-results-gnome_shell/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-gnome_shell-.xml 8✅ 37s
test-results-mir_demo_server-mir-team_dev/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml 21✅ 2❌ 1⚪ 148s
test-results-mir_demo_server/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml 18✅ 5❌ 1⚪ 156s
test-results-mir_kiosk-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_kiosk-.xml 15✅ 107s
test-results-mir_test_tools-24_edge/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_test_tools-.xml 19✅ 1⚪ 190s
test-results-mir_test_tools-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_test_tools-.xml 16✅ 3❌ 1⚪ 243s
test-results-self/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-self-.xml 61✅ 29s
test-results-ubuntu_frame-24_edge/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-ubuntu_frame-.xml 17✅ 1⚪ 122s
test-results-ubuntu_frame-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-ubuntu_frame-.xml 15✅ 2❌ 1⚪ 120s

✅ test-results-confined_shell-beta/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-confined_shell-.xml

19 tests were completed in 133s with 17 passed, 0 failed and 2 skipped.

Test suite Passed Failed Skipped Time
pytest 17✅ 2⚪ 133s

✅ pytest

tests.test_apps_can_run.TestAppsCanRun
  ✅ test_app_can_run[confined_shell-wpe-webkit-mir-kiosk]
  ✅ test_app_can_run[confined_shell-mir-kiosk-neverputt]
  ✅ test_app_can_run[confined_shell-mir-kiosk-scummvm]
  ✅ test_app_can_run[confined_shell-mir-kiosk-kodi]
  ✅ test_app_can_run[confined_shell-pluma]
  ✅ test_app_can_run[confined_shell-qterminal]
tests.test_drag_and_drop.TestDragAndDrop
  ✅ test_drag_and_drop[confined_shell]
tests.test_osk.TestOSK
  ⚪ test_osk_typing[pluma-ubuntu-frame-osk-confined_shell-wayland]
  ⚪ test_osk_typing[pluma-ubuntu-frame-osk-confined_shell-zapper]
tests.test_screencopy_bandwidth.TestScreencopyBandwidth
  ✅ test_active_app[asciinema-confined_shell]
  ✅ test_active_app[mir-kiosk-neverputt-confined_shell]
  ✅ test_compositor_alone[confined_shell]
  ✅ test_inactive_app[qterminal-confined_shell]
  ✅ test_inactive_app[pluma-confined_shell]
  ✅ test_inactive_app[mir-kiosk-kodi-confined_shell]
  ✅ test_app_dragged_around[confined_shell]
tests.test_server.TestServerCanRun
  ✅ test_server_can_run[confined_shell]
tests.test_tests.TestDisplayServer
  ✅ test_can_get_cgroup[confined_shell]
tests.test_vnc.TestVnc
  ✅ test_vnc[qterminal-confined_shell]

✅ test-results-confined_shell-edge/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-confined_shell-.xml

19 tests were completed in 130s with 17 passed, 0 failed and 2 skipped.

Test suite Passed Failed Skipped Time
pytest 17✅ 2⚪ 130s

✅ pytest

tests.test_apps_can_run.TestAppsCanRun
  ✅ test_app_can_run[confined_shell-wpe-webkit-mir-kiosk]
  ✅ test_app_can_run[confined_shell-mir-kiosk-neverputt]
  ✅ test_app_can_run[confined_shell-mir-kiosk-scummvm]
  ✅ test_app_can_run[confined_shell-mir-kiosk-kodi]
  ✅ test_app_can_run[confined_shell-pluma]
  ✅ test_app_can_run[confined_shell-qterminal]
tests.test_drag_and_drop.TestDragAndDrop
  ✅ test_drag_and_drop[confined_shell]
tests.test_osk.TestOSK
  ⚪ test_osk_typing[pluma-ubuntu-frame-osk-confined_shell-wayland]
  ⚪ test_osk_typing[pluma-ubuntu-frame-osk-confined_shell-zapper]
tests.test_screencopy_bandwidth.TestScreencopyBandwidth
  ✅ test_active_app[asciinema-confined_shell]
  ✅ test_active_app[mir-kiosk-neverputt-confined_shell]
  ✅ test_compositor_alone[confined_shell]
  ✅ test_inactive_app[qterminal-confined_shell]
  ✅ test_inactive_app[pluma-confined_shell]
  ✅ test_inactive_app[mir-kiosk-kodi-confined_shell]
  ✅ test_app_dragged_around[confined_shell]
tests.test_server.TestServerCanRun
  ✅ test_server_can_run[confined_shell]
tests.test_tests.TestDisplayServer
  ✅ test_can_get_cgroup[confined_shell]
tests.test_vnc.TestVnc
  ✅ test_vnc[qterminal-confined_shell]

✅ test-results-gnome_shell/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-gnome_shell-.xml

8 tests were completed in 37s with 8 passed, 0 failed and 0 skipped.

Test suite Passed Failed Skipped Time
pytest 8✅ 37s

✅ pytest

tests.test_apps_can_run.TestAppsCanRun
  ✅ test_app_can_run[gnome_shell-wpe-webkit-mir-kiosk]
  ✅ test_app_can_run[gnome_shell-mir-kiosk-neverputt]
  ✅ test_app_can_run[gnome_shell-mir-kiosk-scummvm]
  ✅ test_app_can_run[gnome_shell-mir-kiosk-kodi]
  ✅ test_app_can_run[gnome_shell-pluma]
  ✅ test_app_can_run[gnome_shell-qterminal]
tests.test_server.TestServerCanRun
  ✅ test_server_can_run[gnome_shell]
tests.test_tests.TestDisplayServer
  ✅ test_can_get_cgroup[gnome_shell]

❌ test-results-mir_demo_server-mir-team_dev/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml

24 tests were completed in 148s with 21 passed, 2 failed and 1 skipped.

Test suite Passed Failed Skipped Time
pytest 21✅ 2❌ 1⚪ 148s

❌ pytest

tests.test_apps_can_run.TestAppsCanRun
  ✅ test_app_can_run[mir_demo_server-wpe-webkit-mir-kiosk]
  ✅ test_app_can_run[mir_demo_server-mir-kiosk-neverputt]
  ✅ test_app_can_run[mir_demo_server-mir-kiosk-scummvm]
  ✅ test_app_can_run[mir_demo_server-mir-kiosk-kodi]
  ✅ test_app_can_run[mir_demo_server-pluma]
  ✅ test_app_can_run[mir_demo_server-qterminal]
tests.test_display_configuration.TestDisplayConfiguration
  ✅ test_can_update_scale[mir_demo_server]
  ✅ test_can_update_position[mir_demo_server]
tests.test_drag_and_drop.TestDragAndDrop
  ✅ test_drag_and_drop[mir_demo_server]
tests.test_osk.TestOSK
  ❌ test_osk_typing[pluma-ubuntu-frame-osk-mir_demo_server-wayland]
	self = <test_osk.TestOSK object at 0x7f65d0776f90>
  ⚪ test_osk_typing[pluma-ubuntu-frame-osk-mir_demo_server-zapper]
tests.test_scale.TestScale
  ✅ test_scale[1.0-mir_demo_server]
  ✅ test_scale[1.5-mir_demo_server]
  ❌ test_scale[2.0-mir_demo_server]
	self = <test_scale.TestScale object at 0x7f65d07887d0>
tests.test_screencopy_bandwidth.TestScreencopyBandwidth
  ✅ test_active_app[asciinema-mir_demo_server]
  ✅ test_active_app[mir-kiosk-neverputt-mir_demo_server]
  ✅ test_compositor_alone[mir_demo_server]
  ✅ test_inactive_app[qterminal-mir_demo_server]
  ✅ test_inactive_app[pluma-mir_demo_server]
  ✅ test_inactive_app[mir-kiosk-kodi-mir_demo_server]
  ✅ test_app_dragged_around[mir_demo_server]
tests.test_server.TestServerCanRun
  ✅ test_server_can_run[mir_demo_server]
tests.test_tests.TestDisplayServer
  ✅ test_can_get_cgroup[mir_demo_server]
tests.test_vnc.TestVnc
  ✅ test_vnc[qterminal-mir_demo_server]

❌ test-results-mir_demo_server/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml

24 tests were completed in 156s with 18 passed, 5 failed and 1 skipped.

Test suite Passed Failed Skipped Time
pytest 18✅ 5❌ 1⚪ 156s

❌ pytest

tests.test_apps_can_run.TestAppsCanRun
  ✅ test_app_can_run[mir_demo_server-wpe-webkit-mir-kiosk]
  ❌ test_app_can_run[mir_demo_server-mir-kiosk-neverputt]
	self = <mir_ci.lib.cgroups.Cgroup object at 0x7eff696618d0>
  ✅ test_app_can_run[mir_demo_server-mir-kiosk-scummvm]
  ✅ test_app_can_run[mir_demo_server-mir-kiosk-kodi]
  ✅ test_app_can_run[mir_demo_server-pluma]
  ✅ test_app_can_run[mir_demo_server-qterminal]
tests.test_display_configuration.TestDisplayConfiguration
  ✅ test_can_update_scale[mir_demo_server]
  ✅ test_can_update_position[mir_demo_server]
tests.test_drag_and_drop.TestDragAndDrop
  ✅ test_drag_and_drop[mir_demo_server]
tests.test_osk.TestOSK
  ❌ test_osk_typing[pluma-ubuntu-frame-osk-mir_demo_server-wayland]
	self = <test_osk.TestOSK object at 0x7eff697977d0>
  ⚪ test_osk_typing[pluma-ubuntu-frame-osk-mir_demo_server-zapper]
tests.test_scale.TestScale
  ✅ test_scale[1.0-mir_demo_server]
  ❌ test_scale[1.5-mir_demo_server]
	self = <test_scale.TestScale object at 0x7eff697a8310>
  ❌ test_scale[2.0-mir_demo_server]
	self = <test_scale.TestScale object at 0x7eff697a85d0>
tests.test_screencopy_bandwidth.TestScreencopyBandwidth
  ✅ test_active_app[asciinema-mir_demo_server]
  ❌ test_active_app[mir-kiosk-neverputt-mir_demo_server]
	self = <test_screencopy_bandwidth.TestScreencopyBandwidth object at 0x7eff697bcf50>
  ✅ test_compositor_alone[mir_demo_server]
  ✅ test_inactive_app[qterminal-mir_demo_server]
  ✅ test_inactive_app[pluma-mir_demo_server]
  ✅ test_inactive_app[mir-kiosk-kodi-mir_demo_server]
  ✅ test_app_dragged_around[mir_demo_server]
tests.test_server.TestServerCanRun
  ✅ test_server_can_run[mir_demo_server]
tests.test_tests.TestDisplayServer
  ✅ test_can_get_cgroup[mir_demo_server]
tests.test_vnc.TestVnc
  ✅ test_vnc[qterminal-mir_demo_server]

✅ test-results-mir_kiosk-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_kiosk-.xml

15 tests were completed in 107s with 15 passed, 0 failed and 0 skipped.

Test suite Passed Failed Skipped Time
pytest 15✅ 107s

✅ pytest

tests.test_apps_can_run.TestAppsCanRun
  ✅ test_app_can_run[mir_kiosk-wpe-webkit-mir-kiosk]
  ✅ test_app_can_run[mir_kiosk-mir-kiosk-neverputt]
  ✅ test_app_can_run[mir_kiosk-mir-kiosk-scummvm]
  ✅ test_app_can_run[mir_kiosk-mir-kiosk-kodi]
  ✅ test_app_can_run[mir_kiosk-pluma]
  ✅ test_app_can_run[mir_kiosk-qterminal]
tests.test_screencopy_bandwidth.TestScreencopyBandwidth
  ✅ test_active_app[asciinema-mir_kiosk]
  ✅ test_active_app[mir-kiosk-neverputt-mir_kiosk]
  ✅ test_compositor_alone[mir_kiosk]
  ✅ test_inactive_app[qterminal-mir_kiosk]
  ✅ test_inactive_app[pluma-mir_kiosk]
  ✅ test_inactive_app[mir-kiosk-kodi-mir_kiosk]
tests.test_server.TestServerCanRun
  ✅ test_server_can_run[mir_kiosk]
tests.test_tests.TestDisplayServer
  ✅ test_can_get_cgroup[mir_kiosk]
tests.test_vnc.TestVnc
  ✅ test_vnc[qterminal-mir_kiosk]

✅ test-results-mir_test_tools-24_edge/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_test_tools-.xml

20 tests were completed in 190s with 19 passed, 0 failed and 1 skipped.

Test suite Passed Failed Skipped Time
pytest 19✅ 1⚪ 190s

✅ pytest

tests.test_apps_can_run.TestAppsCanRun
  ✅ test_app_can_run[mir_test_tools-wpe-webkit-mir-kiosk]
  ✅ test_app_can_run[mir_test_tools-mir-kiosk-neverputt]
  ✅ test_app_can_run[mir_test_tools-mir-kiosk-scummvm]
  ✅ test_app_can_run[mir_test_tools-mir-kiosk-kodi]
  ✅ test_app_can_run[mir_test_tools-pluma]
  ✅ test_app_can_run[mir_test_tools-qterminal]
tests.test_drag_and_drop.TestDragAndDrop
  ✅ test_drag_and_drop[mir_test_tools]
tests.test_mir_flutter_app.TestMirFlutterApp
  ✅ test_mir_flutter_app[mir-test-tools-mir_test_tools]
tests.test_osk.TestOSK
  ✅ test_osk_typing[pluma-ubuntu-frame-osk-mir_test_tools-wayland]
  ⚪ test_osk_typing[pluma-ubuntu-frame-osk-mir_test_tools-zapper]
tests.test_screencopy_bandwidth.TestScreencopyBandwidth
  ✅ test_active_app[asciinema-mir_test_tools]
  ✅ test_active_app[mir-kiosk-neverputt-mir_test_tools]
  ✅ test_compositor_alone[mir_test_tools]
  ✅ test_inactive_app[qterminal-mir_test_tools]
  ✅ test_inactive_app[pluma-mir_test_tools]
  ✅ test_inactive_app[mir-kiosk-kodi-mir_test_tools]
  ✅ test_app_dragged_around[mir_test_tools]
tests.test_server.TestServerCanRun
  ✅ test_server_can_run[mir_test_tools]
tests.test_tests.TestDisplayServer
  ✅ test_can_get_cgroup[mir_test_tools]
tests.test_vnc.TestVnc
  ✅ test_vnc[qterminal-mir_test_tools]

❌ test-results-mir_test_tools-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_test_tools-.xml

20 tests were completed in 243s with 16 passed, 3 failed and 1 skipped.

Test suite Passed Failed Skipped Time
pytest 16✅ 3❌ 1⚪ 243s

❌ pytest

tests.test_apps_can_run.TestAppsCanRun
  ✅ test_app_can_run[mir_test_tools-wpe-webkit-mir-kiosk]
  ❌ test_app_can_run[mir_test_tools-mir-kiosk-neverputt]
	self = <mir_ci.lib.cgroups.Cgroup object at 0x7f0021aaa050>
  ✅ test_app_can_run[mir_test_tools-mir-kiosk-scummvm]
  ✅ test_app_can_run[mir_test_tools-mir-kiosk-kodi]
  ✅ test_app_can_run[mir_test_tools-pluma]
  ✅ test_app_can_run[mir_test_tools-qterminal]
tests.test_drag_and_drop.TestDragAndDrop
  ✅ test_drag_and_drop[mir_test_tools]
tests.test_mir_flutter_app.TestMirFlutterApp
  ❌ test_mir_flutter_app[mir-test-tools-mir_test_tools]
	self = <test_mir_flutter_app.TestMirFlutterApp object at 0x7f0021bca890>
tests.test_osk.TestOSK
  ✅ test_osk_typing[pluma-ubuntu-frame-osk-mir_test_tools-wayland]
  ⚪ test_osk_typing[pluma-ubuntu-frame-osk-mir_test_tools-zapper]
tests.test_screencopy_bandwidth.TestScreencopyBandwidth
  ✅ test_active_app[asciinema-mir_test_tools]
  ❌ test_active_app[mir-kiosk-neverputt-mir_test_tools]
	self = <test_screencopy_bandwidth.TestScreencopyBandwidth object at 0x7f0021c00ad0>
  ✅ test_compositor_alone[mir_test_tools]
  ✅ test_inactive_app[qterminal-mir_test_tools]
  ✅ test_inactive_app[pluma-mir_test_tools]
  ✅ test_inactive_app[mir-kiosk-kodi-mir_test_tools]
  ✅ test_app_dragged_around[mir_test_tools]
tests.test_server.TestServerCanRun
  ✅ test_server_can_run[mir_test_tools]
tests.test_tests.TestDisplayServer
  ✅ test_can_get_cgroup[mir_test_tools]
tests.test_vnc.TestVnc
  ✅ test_vnc[qterminal-mir_test_tools]

✅ test-results-self/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-self-.xml

61 tests were completed in 29s with 61 passed, 0 failed and 0 skipped.

Test suite Passed Failed Skipped Time
pytest 61✅ 29s

✅ pytest

tests.test_tests.TestProgram
  ✅ test_program_gives_output
  ✅ test_program_can_be_waited_for
  ✅ test_program_can_be_terminated
  ✅ test_program_is_killed_when_terminate_fails
  ✅ test_program_runs_with_systemd_when_flag_is_set
  ✅ test_program_can_get_cgroup
  ✅ test_passes_when_cgroup_not_got
  ✅ test_get_cgroup_asserts_without_cgroupv2
tests.test_tests.TestBenchmarker
  ✅ test_benchmarker_can_generate_report
  ✅ test_benchmarker_cant_enter_twice
  ✅ test_benchmarker_unwinds_programs
  ✅ test_benchmarker_unwinds_programs_on_enter_failure
  ✅ test_benchmarker_unwinds_programs_on_exit_failure
  ✅ test_benchmarker_unwinds_programs_on_task_failure
  ✅ test_benchmarker_with_program
tests.test_tests.TestCGroupsBackend
  ✅ test_eats_runtime_error_on_poll
  ✅ test_converts_max_to_peak
  ✅ test_raises_runtime_error_on_empty
tests.test_tests.TestCgroup
  ✅ test_cgroup_can_get_cpu_time_microseconds
  ✅ test_cgroup_get_cpu_time_microseconds_raises_when_not_integer
  ✅ test_cgroup_get_cpu_time_microseconds_raises_when_usage_usec_not_found
  ✅ test_cgroup_can_get_current_memory
  ✅ test_cgroup_get_current_memory_raises_when_not_integer
  ✅ test_cgroup_can_get_peak_memory
  ✅ test_cgroup_get_peak_memory_raises_when_not_integer
  ✅ test_cgroup_path_raises_assertion_error_when_contents_are_incorrect
  ✅ test_cgroup_path_raises_runtime_error_when_contents_are_none
tests.test_tests.TestDisplayServer
  ✅ test_can_get_cgroup[mir_demo_server]
  ✅ test_can_get_cgroup[mir_test_tools]
  ✅ test_can_get_cgroup[gnome_shell]
  ✅ test_can_get_cgroup[ubuntu_frame]
  ✅ test_can_get_cgroup[confined_shell]
  ✅ test_can_get_cgroup[mir_kiosk]
  ✅ test_can_get_cgroup[miriway]
  ✅ test_display_server_records_mode
tests.test_tests.TestOutputWatcher
  ✅ test_can_register
tests.test_tests.TestServers
  ✅ test_can_parse_mir_ci_server[AppType.snap]
  ✅ test_can_parse_mir_ci_server[AppType.deb]
  ✅ test_can_parse_mir_ci_server[AppType.pip]
  ✅ test_mir_ci_server_string_missing_capabilities[AppType.snap]
  ✅ test_mir_ci_server_string_missing_capabilities[AppType.deb]
  ✅ test_mir_ci_server_string_missing_capabilities[AppType.pip]
  ✅ test_mir_ci_server_string_app_type_is_invalid
  ✅ test_mir_ci_server_string_capability_is_invalid[AppType.snap]
  ✅ test_mir_ci_server_string_capability_is_invalid[AppType.deb]
  ✅ test_mir_ci_server_string_capability_is_invalid[AppType.pip]
  ✅ test_mir_ci_server_is_present_in_server_list[AppType.snap]
  ✅ test_mir_ci_server_is_present_in_server_list[AppType.deb]
  ✅ test_mir_ci_server_is_present_in_server_list[AppType.pip]
  ✅ test_mir_ci_server_can_be_found_by_capability[capabilities0-AppType.snap]
  ✅ test_mir_ci_server_can_be_found_by_capability[capabilities0-AppType.deb]
  ✅ test_mir_ci_server_can_be_found_by_capability[capabilities0-AppType.pip]
  ✅ test_mir_ci_server_can_be_found_by_capability[capabilities1-AppType.snap]
  ✅ test_mir_ci_server_can_be_found_by_capability[capabilities1-AppType.deb]
  ✅ test_mir_ci_server_can_be_found_by_capability[capabilities1-AppType.pip]
  ✅ test_mir_ci_server_can_be_found_by_capability[capabilities2-AppType.snap]
  ✅ test_mir_ci_server_can_be_found_by_capability[capabilities2-AppType.deb]
  ✅ test_mir_ci_server_can_be_found_by_capability[capabilities2-AppType.pip]
  ✅ test_mir_ci_server_cannot_be_found_if_it_lacks_capability[AppType.snap]
  ✅ test_mir_ci_server_cannot_be_found_if_it_lacks_capability[AppType.deb]
  ✅ test_mir_ci_server_cannot_be_found_if_it_lacks_capability[AppType.pip]

✅ test-results-ubuntu_frame-24_edge/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-ubuntu_frame-.xml

18 tests were completed in 122s with 17 passed, 0 failed and 1 skipped.

Test suite Passed Failed Skipped Time
pytest 17✅ 1⚪ 122s

✅ pytest

tests.test_apps_can_run.TestAppsCanRun
  ✅ test_app_can_run[ubuntu_frame-wpe-webkit-mir-kiosk]
  ✅ test_app_can_run[ubuntu_frame-mir-kiosk-neverputt]
  ✅ test_app_can_run[ubuntu_frame-mir-kiosk-scummvm]
  ✅ test_app_can_run[ubuntu_frame-mir-kiosk-kodi]
  ✅ test_app_can_run[ubuntu_frame-pluma]
  ✅ test_app_can_run[ubuntu_frame-qterminal]
tests.test_drag_and_drop.TestDragAndDrop
  ✅ test_drag_and_drop[ubuntu_frame]
tests.test_osk.TestOSK
  ✅ test_osk_typing[pluma-ubuntu-frame-osk-ubuntu_frame-wayland]
  ⚪ test_osk_typing[pluma-ubuntu-frame-osk-ubuntu_frame-zapper]
tests.test_screencopy_bandwidth.TestScreencopyBandwidth
  ✅ test_active_app[asciinema-ubuntu_frame]
  ✅ test_active_app[mir-kiosk-neverputt-ubuntu_frame]
  ✅ test_compositor_alone[ubuntu_frame]
  ✅ test_inactive_app[qterminal-ubuntu_frame]
  ✅ test_inactive_app[pluma-ubuntu_frame]
  ✅ test_inactive_app[mir-kiosk-kodi-ubuntu_frame]
tests.test_server.TestServerCanRun
  ✅ test_server_can_run[ubuntu_frame]
tests.test_tests.TestDisplayServer
  ✅ test_can_get_cgroup[ubuntu_frame]
tests.test_vnc.TestVnc
  ✅ test_vnc[qterminal-ubuntu_frame]

❌ test-results-ubuntu_frame-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-ubuntu_frame-.xml

18 tests were completed in 120s with 15 passed, 2 failed and 1 skipped.

Test suite Passed Failed Skipped Time
pytest 15✅ 2❌ 1⚪ 120s

❌ pytest

tests.test_apps_can_run.TestAppsCanRun
  ✅ test_app_can_run[ubuntu_frame-wpe-webkit-mir-kiosk]
  ❌ test_app_can_run[ubuntu_frame-mir-kiosk-neverputt]
	self = <mir_ci.lib.cgroups.Cgroup object at 0x7f57f0ea9f90>
  ✅ test_app_can_run[ubuntu_frame-mir-kiosk-scummvm]
  ✅ test_app_can_run[ubuntu_frame-mir-kiosk-kodi]
  ✅ test_app_can_run[ubuntu_frame-pluma]
  ✅ test_app_can_run[ubuntu_frame-qterminal]
tests.test_drag_and_drop.TestDragAndDrop
  ✅ test_drag_and_drop[ubuntu_frame]
tests.test_osk.TestOSK
  ✅ test_osk_typing[pluma-ubuntu-frame-osk-ubuntu_frame-wayland]
  ⚪ test_osk_typing[pluma-ubuntu-frame-osk-ubuntu_frame-zapper]
tests.test_screencopy_bandwidth.TestScreencopyBandwidth
  ✅ test_active_app[asciinema-ubuntu_frame]
  ❌ test_active_app[mir-kiosk-neverputt-ubuntu_frame]
	self = <test_screencopy_bandwidth.TestScreencopyBandwidth object at 0x7f57f10009d0>
  ✅ test_compositor_alone[ubuntu_frame]
  ✅ test_inactive_app[qterminal-ubuntu_frame]
  ✅ test_inactive_app[pluma-ubuntu_frame]
  ✅ test_inactive_app[mir-kiosk-kodi-ubuntu_frame]
tests.test_server.TestServerCanRun
  ✅ test_server_can_run[ubuntu_frame]
tests.test_tests.TestDisplayServer
  ✅ test_can_get_cgroup[ubuntu_frame]
tests.test_vnc.TestVnc
  ✅ test_vnc[qterminal-ubuntu_frame]

Annotations

Check failure on line 0 in test-results-mir_demo_server-mir-team_dev/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml

See this annotation in the file changed.

@github-actions github-actions / Test results

pytest ► tests.test_osk.TestOSK ► test_osk_typing[pluma-ubuntu-frame-osk-mir_demo_server-wayland]

Failed test found in:
  test-results-mir_demo_server-mir-team_dev/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml
Error:
  self = <test_osk.TestOSK object at 0x7f65d0776f90>
Raw output
self = <test_osk.TestOSK object at 0x7f65d0776f90>
robot_log = PosixPath('log.html'), platform = 'wayland'
server = <mir_ci.program.app.App object at 0x7f65d0767a50>
osk = <mir_ci.program.program.Program object at 0x7f65d06dc210>
app = <mir_ci.program.program.Program object at 0x7f65d06dd3d0>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-1/test_osk_typing_pluma_ubuntu_f0')

    async def test_osk_typing(self, robot_log, platform, server, osk, app, tmp_path):
        extensions = VirtualPointer.required_extensions + ScreencopyTracker.required_extensions + osk.extensions
        server_instance = DisplayServer(
            server,
            add_extensions=extensions,
        )
        assets = collect_assets(platform, ("kvm", "osk"), "osk")
    
        async with server_instance, server_instance.program(app) as app, server_instance.program(osk) as osk:
            if platform == "wayland":
                tuple((tmp_path / k).symlink_to(v) for k, v in assets.items())
                robot = server_instance.program(App(("robot", "-d", tmp_path, "--log", robot_log, tmp_path)))
                async with robot:
>                   await robot.wait(120)

tests/test_osk.py:100: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.program.program.Program object at 0x7f65d06ce9d0>, timeout = 120
term_timeout = 10

    async def wait(self, timeout=default_wait_timeout, term_timeout=default_term_timeout) -> None:
        if self.is_running():
            self.send_signals_task = asyncio.create_task(self.send_kill_signals(timeout, term_timeout))
        if self.process_end is not None:
            await self.process_end
            self.process_end = None
            print("\n" + format_output(self.name, self.output))
            assert self.process
            if self.process.returncode != 0:
                message = self.name
                if self.sigkill_sent:
                    message += " refused to terminate"
                else:
                    message += " closed with exit code " + str(self.process.returncode)
>               raise ProgramError(message)
E               mir_ci.program.program.ProgramError: robot closed with exit code 1

program/program.py:97: ProgramError

Check failure on line 0 in test-results-mir_demo_server-mir-team_dev/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml

See this annotation in the file changed.

@github-actions github-actions / Test results

pytest ► tests.test_scale.TestScale ► test_scale[2.0-mir_demo_server]

Failed test found in:
  test-results-mir_demo_server-mir-team_dev/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml
Error:
  self = <test_scale.TestScale object at 0x7f65d07887d0>
Raw output
self = <test_scale.TestScale object at 0x7f65d07887d0>
robot_log = PosixPath('log.html')
server = <mir_ci.program.app.App object at 0x7f65d077e550>, scale = 2.0
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-1/test_scale_2_0_mir_demo_server0')

    async def test_scale(self, robot_log, server, scale, tmp_path) -> None:
        extensions = ("all",)  # TODO no need to enable all extension
    
        server_instance = DisplayServer(
            server,
            add_extensions=extensions,
            env={"MIR_SERVER_X11_OUTPUT": "1024x768", "MIR_SERVER_DISPLAY_SCALE": str(scale)},
        )
    
        assets = collect_assets("wayland", ["kvm"], "scale")
    
        async with server_instance, server_instance.program(App(APP_PATH, AppType.deb)):
            tuple((tmp_path / k).symlink_to(v) for k, v in assets.items())
    
            robot = server_instance.program(
                App(("robot", "-d", tmp_path, "--log", robot_log, "--variable", f"SCALE:{scale}", tmp_path))
            )
    
            async with robot:
>               await robot.wait(120)

tests/test_scale.py:85: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.program.program.Program object at 0x7f65d06cf790>, timeout = 120
term_timeout = 10

    async def wait(self, timeout=default_wait_timeout, term_timeout=default_term_timeout) -> None:
        if self.is_running():
            self.send_signals_task = asyncio.create_task(self.send_kill_signals(timeout, term_timeout))
        if self.process_end is not None:
            await self.process_end
            self.process_end = None
            print("\n" + format_output(self.name, self.output))
            assert self.process
            if self.process.returncode != 0:
                message = self.name
                if self.sigkill_sent:
                    message += " refused to terminate"
                else:
                    message += " closed with exit code " + str(self.process.returncode)
>               raise ProgramError(message)
E               mir_ci.program.program.ProgramError: robot closed with exit code 1

program/program.py:97: ProgramError

Check failure on line 0 in test-results-mir_demo_server/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml

See this annotation in the file changed.

@github-actions github-actions / Test results

pytest ► tests.test_apps_can_run.TestAppsCanRun ► test_app_can_run[mir_demo_server-mir-kiosk-neverputt]

Failed test found in:
  test-results-mir_demo_server/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml
Error:
  self = <mir_ci.lib.cgroups.Cgroup object at 0x7eff696618d0>
Raw output
self = <mir_ci.lib.cgroups.Cgroup object at 0x7eff696618d0>

    def get_cpu_time_microseconds(self) -> int:
        try:
>           for line in self._read_file("cpu.stat"):

lib/cgroups.py:51: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.lib.cgroups.Cgroup object at 0x7eff696618d0>
file_name = 'cpu.stat'

    def _read_file(self, file_name: str) -> Iterator[str]:
        file_path = f"{self.path}/{file_name}"
>       with open(file_path, "r") as file:
E       FileNotFoundError: [Errno 2] No such file or directory: '/sys/fs/cgroup/user.slice/user-1001.slice/[email protected]/app.slice/snap.mir-kiosk-neverputt.mir-kiosk-neverputt-5216dca0-4405-4eca-bca3-5d92d689df10.scope/cpu.stat'

lib/cgroups.py:46: FileNotFoundError

The above exception was the direct cause of the following exception:

self = <mir_ci.lib.benchmarker.CgroupsBackend object at 0x7eff69660e10>

    async def poll(self) -> None:
        for name, info in self.data_records.items():
            try:
                cgroup = await info.program.get_cgroup()
>               cpu_ms = cgroup.get_cpu_time_microseconds()

lib/benchmarker.py:94: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.lib.cgroups.Cgroup object at 0x7eff696618d0>

    def get_cpu_time_microseconds(self) -> int:
        try:
            for line in self._read_file("cpu.stat"):
                split_line = line.split(" ")
                if split_line[0] == "usage_usec":
                    return int(split_line[1])
    
            raise RuntimeError("usage_usec line not found")
        except Exception as ex:
>           raise RuntimeError(f"Unable to get the cpu time for cgroup: {self.path}") from ex
E           RuntimeError: Unable to get the cpu time for cgroup: /sys/fs/cgroup/user.slice/user-1001.slice/[email protected]/app.slice/snap.mir-kiosk-neverputt.mir-kiosk-neverputt-5216dca0-4405-4eca-bca3-5d92d689df10.scope

lib/cgroups.py:58: RuntimeError

During handling of the above exception, another exception occurred:

self = <mir_ci.lib.benchmarker.Benchmarker object at 0x7eff69660110>
args = (None, None, None)
exs = [AssertionError('mir-kiosk-neverputt died without being waited for or killed')]
program = <mir_ci.program.display_server.DisplayServer object at 0x7eff696601d0>

    async def __aexit__(self, *args):
        if self.running is False:
            return
    
        self.running = False
        try:
            if self.task:
                self.task.cancel()
                with suppress(asyncio.CancelledError):
                    await self.task
        except Exception as e:
>           raise e

lib/benchmarker.py:56: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
lib/benchmarker.py:54: in __aexit__
    await self.task
lib/benchmarker.py:24: in _run
    await self.backend.poll()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.lib.benchmarker.CgroupsBackend object at 0x7eff69660e10>

    async def poll(self) -> None:
        for name, info in self.data_records.items():
            try:
                cgroup = await info.program.get_cgroup()
                cpu_ms = cgroup.get_cpu_time_microseconds()
                mem_current = cgroup.get_current_memory()
                try:
                    mem_max = cgroup.get_peak_memory()
                except RuntimeError:
                    mem_max = max(self.data_records[name].mem_bytes_max, mem_current)
            except RuntimeError as ex:
>               warnings.warn(f"Ignoring cgroup read failure: {ex}")
E               UserWarning: Ignoring cgroup read failure: Unable to get the cpu time for cgroup: /sys/fs/cgroup/user.slice/user-1001.slice/[email protected]/app.slice/snap.mir-kiosk-neverputt.mir-kiosk-neverputt-5216dca0-4405-4eca-bca3-5d92d689df10.scope

lib/benchmarker.py:101: UserWarning

During handling of the above exception, another exception occurred:

self = <test_apps_can_run.TestAppsCanRun object at 0x7eff69a4a690>
any_server = <mir_ci.program.app.App object at 0x7eff696602d0>
app = <mir_ci.program.app.App object at 0x7eff69a23e50>
record_property = <function record_property.<locals>.append_property at 0x7eff698568e0>

    @pytest.mark.smoke
    @pytest.mark.parametrize(
        "app",
        [
            apps.wpe(),
            apps.snap("mir-kiosk-neverputt"),
            apps.snap("mir-kiosk-scummvm"),
            apps.snap("mir-kiosk-kodi"),
            apps.pluma(),
            apps.qterminal(),
        ],
    )
    async def test_app_can_run(self, any_server, app, record_property) -> None:
        server_instance = DisplayServer(any_server)
        program = server_instance.program(app)
        benchmarker = Benchmarker(OrderedDict(compositor=server_instance, client=program), poll_time_seconds=0.1)
>       async with benchmarker:

tests/test_apps_can_run.py:30: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.lib.benchmarker.Benchmarker object at 0x7eff69660110>
args = (None, None, None)
exs = [AssertionError('mir-kiosk-neverputt died without being waited for or killed')]
program = <mir_ci.program.display_server.DisplayServer object at 0x7eff696601d0>

    async def __aexit__(self, *args):
        if self.running is False:
            return
    
        self.running = False
        try:
            if self.task:
                self.task.cancel()
                with suppress(asyncio.CancelledError):
                    await self.task
        except Exception as e:
            raise e
        finally:
            exs = []
            for program in self.running_programs:
                try:
                    await program.__aexit__()
                except Exception as e:
                    exs.append(e)
            if exs:
>               raise Exception("; ".join(str(ex) for ex in (exs)))
E               Exception: mir-kiosk-neverputt died without being waited for or killed

lib/benchmarker.py:65: Exception

Check failure on line 0 in test-results-mir_demo_server/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml

See this annotation in the file changed.

@github-actions github-actions / Test results

pytest ► tests.test_osk.TestOSK ► test_osk_typing[pluma-ubuntu-frame-osk-mir_demo_server-wayland]

Failed test found in:
  test-results-mir_demo_server/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml
Error:
  self = <test_osk.TestOSK object at 0x7eff697977d0>
Raw output
self = <test_osk.TestOSK object at 0x7eff697977d0>
robot_log = PosixPath('log.html'), platform = 'wayland'
server = <mir_ci.program.app.App object at 0x7eff69787990>
osk = <mir_ci.program.program.Program object at 0x7eff697088d0>
app = <mir_ci.program.program.Program object at 0x7eff6970bad0>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-1/test_osk_typing_pluma_ubuntu_f0')

    async def test_osk_typing(self, robot_log, platform, server, osk, app, tmp_path):
        extensions = VirtualPointer.required_extensions + ScreencopyTracker.required_extensions + osk.extensions
        server_instance = DisplayServer(
            server,
            add_extensions=extensions,
        )
        assets = collect_assets(platform, ("kvm", "osk"), "osk")
    
        async with server_instance, server_instance.program(app) as app, server_instance.program(osk) as osk:
            if platform == "wayland":
                tuple((tmp_path / k).symlink_to(v) for k, v in assets.items())
                robot = server_instance.program(App(("robot", "-d", tmp_path, "--log", robot_log, tmp_path)))
                async with robot:
>                   await robot.wait(120)

tests/test_osk.py:100: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.program.program.Program object at 0x7eff697038d0>, timeout = 120
term_timeout = 10

    async def wait(self, timeout=default_wait_timeout, term_timeout=default_term_timeout) -> None:
        if self.is_running():
            self.send_signals_task = asyncio.create_task(self.send_kill_signals(timeout, term_timeout))
        if self.process_end is not None:
            await self.process_end
            self.process_end = None
            print("\n" + format_output(self.name, self.output))
            assert self.process
            if self.process.returncode != 0:
                message = self.name
                if self.sigkill_sent:
                    message += " refused to terminate"
                else:
                    message += " closed with exit code " + str(self.process.returncode)
>               raise ProgramError(message)
E               mir_ci.program.program.ProgramError: robot closed with exit code 1

program/program.py:97: ProgramError

Check failure on line 0 in test-results-mir_demo_server/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml

See this annotation in the file changed.

@github-actions github-actions / Test results

pytest ► tests.test_scale.TestScale ► test_scale[1.5-mir_demo_server]

Failed test found in:
  test-results-mir_demo_server/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml
Error:
  self = <test_scale.TestScale object at 0x7eff697a8310>
Raw output
self = <test_scale.TestScale object at 0x7eff697a8310>
robot_log = PosixPath('log.html')
server = <mir_ci.program.app.App object at 0x7eff697a2310>, scale = 1.5
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-1/test_scale_1_5_mir_demo_server0')

    async def test_scale(self, robot_log, server, scale, tmp_path) -> None:
        extensions = ("all",)  # TODO no need to enable all extension
    
        server_instance = DisplayServer(
            server,
            add_extensions=extensions,
            env={"MIR_SERVER_X11_OUTPUT": "1024x768", "MIR_SERVER_DISPLAY_SCALE": str(scale)},
        )
    
        assets = collect_assets("wayland", ["kvm"], "scale")
    
        async with server_instance, server_instance.program(App(APP_PATH, AppType.deb)):
            tuple((tmp_path / k).symlink_to(v) for k, v in assets.items())
    
            robot = server_instance.program(
                App(("robot", "-d", tmp_path, "--log", robot_log, "--variable", f"SCALE:{scale}", tmp_path))
            )
    
            async with robot:
>               await robot.wait(120)

tests/test_scale.py:85: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.program.program.Program object at 0x7eff692a7e50>, timeout = 120
term_timeout = 10

    async def wait(self, timeout=default_wait_timeout, term_timeout=default_term_timeout) -> None:
        if self.is_running():
            self.send_signals_task = asyncio.create_task(self.send_kill_signals(timeout, term_timeout))
        if self.process_end is not None:
            await self.process_end
            self.process_end = None
            print("\n" + format_output(self.name, self.output))
            assert self.process
            if self.process.returncode != 0:
                message = self.name
                if self.sigkill_sent:
                    message += " refused to terminate"
                else:
                    message += " closed with exit code " + str(self.process.returncode)
>               raise ProgramError(message)
E               mir_ci.program.program.ProgramError: robot closed with exit code 2

program/program.py:97: ProgramError

Check failure on line 0 in test-results-mir_demo_server/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml

See this annotation in the file changed.

@github-actions github-actions / Test results

pytest ► tests.test_scale.TestScale ► test_scale[2.0-mir_demo_server]

Failed test found in:
  test-results-mir_demo_server/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml
Error:
  self = <test_scale.TestScale object at 0x7eff697a85d0>
Raw output
self = <test_scale.TestScale object at 0x7eff697a85d0>
robot_log = PosixPath('log.html')
server = <mir_ci.program.app.App object at 0x7eff697a2310>, scale = 2.0
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-1/test_scale_2_0_mir_demo_server0')

    async def test_scale(self, robot_log, server, scale, tmp_path) -> None:
        extensions = ("all",)  # TODO no need to enable all extension
    
        server_instance = DisplayServer(
            server,
            add_extensions=extensions,
            env={"MIR_SERVER_X11_OUTPUT": "1024x768", "MIR_SERVER_DISPLAY_SCALE": str(scale)},
        )
    
        assets = collect_assets("wayland", ["kvm"], "scale")
    
        async with server_instance, server_instance.program(App(APP_PATH, AppType.deb)):
            tuple((tmp_path / k).symlink_to(v) for k, v in assets.items())
    
            robot = server_instance.program(
                App(("robot", "-d", tmp_path, "--log", robot_log, "--variable", f"SCALE:{scale}", tmp_path))
            )
    
            async with robot:
>               await robot.wait(120)

tests/test_scale.py:85: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.program.program.Program object at 0x7eff692f0250>, timeout = 120
term_timeout = 10

    async def wait(self, timeout=default_wait_timeout, term_timeout=default_term_timeout) -> None:
        if self.is_running():
            self.send_signals_task = asyncio.create_task(self.send_kill_signals(timeout, term_timeout))
        if self.process_end is not None:
            await self.process_end
            self.process_end = None
            print("\n" + format_output(self.name, self.output))
            assert self.process
            if self.process.returncode != 0:
                message = self.name
                if self.sigkill_sent:
                    message += " refused to terminate"
                else:
                    message += " closed with exit code " + str(self.process.returncode)
>               raise ProgramError(message)
E               mir_ci.program.program.ProgramError: robot closed with exit code 2

program/program.py:97: ProgramError

Check failure on line 0 in test-results-mir_demo_server/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml

See this annotation in the file changed.

@github-actions github-actions / Test results

pytest ► tests.test_screencopy_bandwidth.TestScreencopyBandwidth ► test_active_app[mir-kiosk-neverputt-mir_demo_server]

Failed test found in:
  test-results-mir_demo_server/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_demo_server-.xml
Error:
  self = <test_screencopy_bandwidth.TestScreencopyBandwidth object at 0x7eff697bcf50>
Raw output
self = <test_screencopy_bandwidth.TestScreencopyBandwidth object at 0x7eff697bcf50>
record_property = <function record_property.<locals>.append_property at 0x7eff69707ba0>
server = <mir_ci.program.display_server.DisplayServer object at 0x7eff69696590>
app = <mir_ci.program.app.App object at 0x7eff697aa950>

    @pytest.mark.parametrize("server", servers.servers(servers.ServerCap.SCREENCOPY))
    @pytest.mark.parametrize(
        "app",
        [
            apps.qterminal(
                "--execute",
                f"python3 -m asciinema play {ASCIINEMA_CAST}",
                pip_pkgs=("asciinema",),
                id="asciinema",
                extra=20 + SLOWDOWN,
            ),
            apps.snap("mir-kiosk-neverputt", extra=False),
        ],
    )
    async def test_active_app(self, record_property, server, app) -> None:
        server = DisplayServer(server, add_extensions=ScreencopyTracker.required_extensions)
        tracker = ScreencopyTracker(server.display_name)
>       async with server as s, tracker, s.program(App(app.command[0], app.app_type)) as p:

tests/test_screencopy_bandwidth.py:45: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.program.program.Program object at 0x7eff69695b90>
args = (None, None, None)

    async def __aexit__(self, *args) -> None:
        if self.cgroups_task:
            self.cgroups_task.cancel()
    
        if self.process_end is not None:
            if not self.is_running():
                await self.process_end
>               raise AssertionError(f"{self.name} died without being waited for or killed")
E               AssertionError: mir-kiosk-neverputt died without being waited for or killed

program/program.py:153: AssertionError

Check failure on line 0 in test-results-mir_test_tools-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_test_tools-.xml

See this annotation in the file changed.

@github-actions github-actions / Test results

pytest ► tests.test_apps_can_run.TestAppsCanRun ► test_app_can_run[mir_test_tools-mir-kiosk-neverputt]

Failed test found in:
  test-results-mir_test_tools-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_test_tools-.xml
Error:
  self = <mir_ci.lib.cgroups.Cgroup object at 0x7f0021aaa050>
Raw output
self = <mir_ci.lib.cgroups.Cgroup object at 0x7f0021aaa050>

    def get_cpu_time_microseconds(self) -> int:
        try:
>           for line in self._read_file("cpu.stat"):

lib/cgroups.py:51: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.lib.cgroups.Cgroup object at 0x7f0021aaa050>
file_name = 'cpu.stat'

    def _read_file(self, file_name: str) -> Iterator[str]:
        file_path = f"{self.path}/{file_name}"
>       with open(file_path, "r") as file:
E       FileNotFoundError: [Errno 2] No such file or directory: '/sys/fs/cgroup/user.slice/user-1001.slice/[email protected]/app.slice/snap.mir-kiosk-neverputt.mir-kiosk-neverputt-4e2ceb7f-f29e-4c72-95d6-ab470af86918.scope/cpu.stat'

lib/cgroups.py:46: FileNotFoundError

The above exception was the direct cause of the following exception:

self = <mir_ci.lib.benchmarker.CgroupsBackend object at 0x7f0021aa9010>

    async def poll(self) -> None:
        for name, info in self.data_records.items():
            try:
                cgroup = await info.program.get_cgroup()
>               cpu_ms = cgroup.get_cpu_time_microseconds()

lib/benchmarker.py:94: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.lib.cgroups.Cgroup object at 0x7f0021aaa050>

    def get_cpu_time_microseconds(self) -> int:
        try:
            for line in self._read_file("cpu.stat"):
                split_line = line.split(" ")
                if split_line[0] == "usage_usec":
                    return int(split_line[1])
    
            raise RuntimeError("usage_usec line not found")
        except Exception as ex:
>           raise RuntimeError(f"Unable to get the cpu time for cgroup: {self.path}") from ex
E           RuntimeError: Unable to get the cpu time for cgroup: /sys/fs/cgroup/user.slice/user-1001.slice/[email protected]/app.slice/snap.mir-kiosk-neverputt.mir-kiosk-neverputt-4e2ceb7f-f29e-4c72-95d6-ab470af86918.scope

lib/cgroups.py:58: RuntimeError

During handling of the above exception, another exception occurred:

self = <mir_ci.lib.benchmarker.Benchmarker object at 0x7f0021aa8fd0>
args = (None, None, None)
exs = [AssertionError('mir-kiosk-neverputt died without being waited for or killed')]
program = <mir_ci.program.display_server.DisplayServer object at 0x7f0021aa8e90>

    async def __aexit__(self, *args):
        if self.running is False:
            return
    
        self.running = False
        try:
            if self.task:
                self.task.cancel()
                with suppress(asyncio.CancelledError):
                    await self.task
        except Exception as e:
>           raise e

lib/benchmarker.py:56: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
lib/benchmarker.py:54: in __aexit__
    await self.task
lib/benchmarker.py:24: in _run
    await self.backend.poll()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.lib.benchmarker.CgroupsBackend object at 0x7f0021aa9010>

    async def poll(self) -> None:
        for name, info in self.data_records.items():
            try:
                cgroup = await info.program.get_cgroup()
                cpu_ms = cgroup.get_cpu_time_microseconds()
                mem_current = cgroup.get_current_memory()
                try:
                    mem_max = cgroup.get_peak_memory()
                except RuntimeError:
                    mem_max = max(self.data_records[name].mem_bytes_max, mem_current)
            except RuntimeError as ex:
>               warnings.warn(f"Ignoring cgroup read failure: {ex}")
E               UserWarning: Ignoring cgroup read failure: Unable to get the cpu time for cgroup: /sys/fs/cgroup/user.slice/user-1001.slice/[email protected]/app.slice/snap.mir-kiosk-neverputt.mir-kiosk-neverputt-4e2ceb7f-f29e-4c72-95d6-ab470af86918.scope

lib/benchmarker.py:101: UserWarning

During handling of the above exception, another exception occurred:

self = <test_apps_can_run.TestAppsCanRun object at 0x7f0021e657d0>
any_server = <mir_ci.program.app.App object at 0x7f0021aa8cd0>
app = <mir_ci.program.app.App object at 0x7f0021fc2bd0>
record_property = <function record_property.<locals>.append_property at 0x7f0021a9ea20>

    @pytest.mark.smoke
    @pytest.mark.parametrize(
        "app",
        [
            apps.wpe(),
            apps.snap("mir-kiosk-neverputt"),
            apps.snap("mir-kiosk-scummvm"),
            apps.snap("mir-kiosk-kodi"),
            apps.pluma(),
            apps.qterminal(),
        ],
    )
    async def test_app_can_run(self, any_server, app, record_property) -> None:
        server_instance = DisplayServer(any_server)
        program = server_instance.program(app)
        benchmarker = Benchmarker(OrderedDict(compositor=server_instance, client=program), poll_time_seconds=0.1)
>       async with benchmarker:

tests/test_apps_can_run.py:30: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.lib.benchmarker.Benchmarker object at 0x7f0021aa8fd0>
args = (None, None, None)
exs = [AssertionError('mir-kiosk-neverputt died without being waited for or killed')]
program = <mir_ci.program.display_server.DisplayServer object at 0x7f0021aa8e90>

    async def __aexit__(self, *args):
        if self.running is False:
            return
    
        self.running = False
        try:
            if self.task:
                self.task.cancel()
                with suppress(asyncio.CancelledError):
                    await self.task
        except Exception as e:
            raise e
        finally:
            exs = []
            for program in self.running_programs:
                try:
                    await program.__aexit__()
                except Exception as e:
                    exs.append(e)
            if exs:
>               raise Exception("; ".join(str(ex) for ex in (exs)))
E               Exception: mir-kiosk-neverputt died without being waited for or killed

lib/benchmarker.py:65: Exception

Check failure on line 0 in test-results-mir_test_tools-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_test_tools-.xml

See this annotation in the file changed.

@github-actions github-actions / Test results

pytest ► tests.test_mir_flutter_app.TestMirFlutterApp ► test_mir_flutter_app[mir-test-tools-mir_test_tools]

Failed test found in:
  test-results-mir_test_tools-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_test_tools-.xml
Error:
  self = <test_mir_flutter_app.TestMirFlutterApp object at 0x7f0021bca890>
Raw output
self = <test_mir_flutter_app.TestMirFlutterApp object at 0x7f0021bca890>
robot_log = PosixPath('log.html')
server = <mir_ci.program.app.App object at 0x7f0021bc8c90>
app = <mir_ci.program.program.Program object at 0x7f0021aabfd0>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-1/test_mir_flutter_app_mir_test_0')

    async def test_mir_flutter_app(self, robot_log, server, app, tmp_path) -> None:
        extensions = VirtualPointer.required_extensions + ScreencopyTracker.required_extensions
        server_instance = DisplayServer(server, add_extensions=extensions)
        assets = collect_assets("wayland", ["kvm"], "mir_flutter_app")
    
        async with server_instance, server_instance.program(App(app.command[0], app.app_type)) as app:
            tuple((tmp_path / k).symlink_to(v) for k, v in assets.items())
            robot = server_instance.program(App(("robot", "-d", tmp_path, "--log", robot_log, tmp_path)))
            async with robot:
>               await robot.wait(120)

tests/test_mir_flutter_app.py:55: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.program.program.Program object at 0x7f0021aabb10>, timeout = 120
term_timeout = 10

    async def wait(self, timeout=default_wait_timeout, term_timeout=default_term_timeout) -> None:
        if self.is_running():
            self.send_signals_task = asyncio.create_task(self.send_kill_signals(timeout, term_timeout))
        if self.process_end is not None:
            await self.process_end
            self.process_end = None
            print("\n" + format_output(self.name, self.output))
            assert self.process
            if self.process.returncode != 0:
                message = self.name
                if self.sigkill_sent:
                    message += " refused to terminate"
                else:
                    message += " closed with exit code " + str(self.process.returncode)
>               raise ProgramError(message)
E               mir_ci.program.program.ProgramError: robot closed with exit code 16

program/program.py:97: ProgramError

Check failure on line 0 in test-results-mir_test_tools-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_test_tools-.xml

See this annotation in the file changed.

@github-actions github-actions / Test results

pytest ► tests.test_screencopy_bandwidth.TestScreencopyBandwidth ► test_active_app[mir-kiosk-neverputt-mir_test_tools]

Failed test found in:
  test-results-mir_test_tools-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-mir_test_tools-.xml
Error:
  self = <test_screencopy_bandwidth.TestScreencopyBandwidth object at 0x7f0021c00ad0>
Raw output
self = <test_screencopy_bandwidth.TestScreencopyBandwidth object at 0x7f0021c00ad0>
record_property = <function record_property.<locals>.append_property at 0x7f0021ab8a40>
server = <mir_ci.program.display_server.DisplayServer object at 0x7f0021ac74d0>
app = <mir_ci.program.app.App object at 0x7f0021beead0>

    @pytest.mark.parametrize("server", servers.servers(servers.ServerCap.SCREENCOPY))
    @pytest.mark.parametrize(
        "app",
        [
            apps.qterminal(
                "--execute",
                f"python3 -m asciinema play {ASCIINEMA_CAST}",
                pip_pkgs=("asciinema",),
                id="asciinema",
                extra=20 + SLOWDOWN,
            ),
            apps.snap("mir-kiosk-neverputt", extra=False),
        ],
    )
    async def test_active_app(self, record_property, server, app) -> None:
        server = DisplayServer(server, add_extensions=ScreencopyTracker.required_extensions)
        tracker = ScreencopyTracker(server.display_name)
>       async with server as s, tracker, s.program(App(app.command[0], app.app_type)) as p:

tests/test_screencopy_bandwidth.py:45: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.program.program.Program object at 0x7f0021b54510>
args = (None, None, None)

    async def __aexit__(self, *args) -> None:
        if self.cgroups_task:
            self.cgroups_task.cancel()
    
        if self.process_end is not None:
            if not self.is_running():
                await self.process_end
>               raise AssertionError(f"{self.name} died without being waited for or killed")
E               AssertionError: mir-kiosk-neverputt died without being waited for or killed

program/program.py:153: AssertionError

Check failure on line 0 in test-results-ubuntu_frame-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-ubuntu_frame-.xml

See this annotation in the file changed.

@github-actions github-actions / Test results

pytest ► tests.test_apps_can_run.TestAppsCanRun ► test_app_can_run[ubuntu_frame-mir-kiosk-neverputt]

Failed test found in:
  test-results-ubuntu_frame-stable/home/runner/work/mir-ci/mir-ci/mir-ci/mir_ci/junit-ubuntu_frame-.xml
Error:
  self = <mir_ci.lib.cgroups.Cgroup object at 0x7f57f0ea9f90>
Raw output
self = <mir_ci.lib.cgroups.Cgroup object at 0x7f57f0ea9f90>

    def get_cpu_time_microseconds(self) -> int:
        try:
>           for line in self._read_file("cpu.stat"):

lib/cgroups.py:51: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.lib.cgroups.Cgroup object at 0x7f57f0ea9f90>
file_name = 'cpu.stat'

    def _read_file(self, file_name: str) -> Iterator[str]:
        file_path = f"{self.path}/{file_name}"
>       with open(file_path, "r") as file:
E       FileNotFoundError: [Errno 2] No such file or directory: '/sys/fs/cgroup/user.slice/user-1001.slice/[email protected]/app.slice/snap.mir-kiosk-neverputt.mir-kiosk-neverputt-daf64387-8368-44c7-a40c-8788b00422dc.scope/cpu.stat'

lib/cgroups.py:46: FileNotFoundError

The above exception was the direct cause of the following exception:

self = <mir_ci.lib.benchmarker.CgroupsBackend object at 0x7f57f0ea8f50>

    async def poll(self) -> None:
        for name, info in self.data_records.items():
            try:
                cgroup = await info.program.get_cgroup()
>               cpu_ms = cgroup.get_cpu_time_microseconds()

lib/benchmarker.py:94: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.lib.cgroups.Cgroup object at 0x7f57f0ea9f90>

    def get_cpu_time_microseconds(self) -> int:
        try:
            for line in self._read_file("cpu.stat"):
                split_line = line.split(" ")
                if split_line[0] == "usage_usec":
                    return int(split_line[1])
    
            raise RuntimeError("usage_usec line not found")
        except Exception as ex:
>           raise RuntimeError(f"Unable to get the cpu time for cgroup: {self.path}") from ex
E           RuntimeError: Unable to get the cpu time for cgroup: /sys/fs/cgroup/user.slice/user-1001.slice/[email protected]/app.slice/snap.mir-kiosk-neverputt.mir-kiosk-neverputt-daf64387-8368-44c7-a40c-8788b00422dc.scope

lib/cgroups.py:58: RuntimeError

During handling of the above exception, another exception occurred:

self = <mir_ci.lib.benchmarker.Benchmarker object at 0x7f57f0ea8f10>
args = (None, None, None)
exs = [AssertionError('mir-kiosk-neverputt died without being waited for or killed')]
program = <mir_ci.program.display_server.DisplayServer object at 0x7f57f0ea8dd0>

    async def __aexit__(self, *args):
        if self.running is False:
            return
    
        self.running = False
        try:
            if self.task:
                self.task.cancel()
                with suppress(asyncio.CancelledError):
                    await self.task
        except Exception as e:
>           raise e

lib/benchmarker.py:56: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
lib/benchmarker.py:54: in __aexit__
    await self.task
lib/benchmarker.py:24: in _run
    await self.backend.poll()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.lib.benchmarker.CgroupsBackend object at 0x7f57f0ea8f50>

    async def poll(self) -> None:
        for name, info in self.data_records.items():
            try:
                cgroup = await info.program.get_cgroup()
                cpu_ms = cgroup.get_cpu_time_microseconds()
                mem_current = cgroup.get_current_memory()
                try:
                    mem_max = cgroup.get_peak_memory()
                except RuntimeError:
                    mem_max = max(self.data_records[name].mem_bytes_max, mem_current)
            except RuntimeError as ex:
>               warnings.warn(f"Ignoring cgroup read failure: {ex}")
E               UserWarning: Ignoring cgroup read failure: Unable to get the cpu time for cgroup: /sys/fs/cgroup/user.slice/user-1001.slice/[email protected]/app.slice/snap.mir-kiosk-neverputt.mir-kiosk-neverputt-daf64387-8368-44c7-a40c-8788b00422dc.scope

lib/benchmarker.py:101: UserWarning

During handling of the above exception, another exception occurred:

self = <test_apps_can_run.TestAppsCanRun object at 0x7f57f1265910>
any_server = <mir_ci.program.app.App object at 0x7f57f0ea8c10>
app = <mir_ci.program.app.App object at 0x7f57f13fd910>
record_property = <function record_property.<locals>.append_property at 0x7f57f0e9ea20>

    @pytest.mark.smoke
    @pytest.mark.parametrize(
        "app",
        [
            apps.wpe(),
            apps.snap("mir-kiosk-neverputt"),
            apps.snap("mir-kiosk-scummvm"),
            apps.snap("mir-kiosk-kodi"),
            apps.pluma(),
            apps.qterminal(),
        ],
    )
    async def test_app_can_run(self, any_server, app, record_property) -> None:
        server_instance = DisplayServer(any_server)
        program = server_instance.program(app)
        benchmarker = Benchmarker(OrderedDict(compositor=server_instance, client=program), poll_time_seconds=0.1)
>       async with benchmarker:

tests/test_apps_can_run.py:30: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <mir_ci.lib.benchmarker.Benchmarker object at 0x7f57f0ea8f10>
args = (None, None, None)
exs = [AssertionError('mir-kiosk-neverputt died without being waited for or killed')]
program = <mir_ci.program.display_server.DisplayServer object at 0x7f57f0ea8dd0>

    async def __aexit__(self, *args):
        if self.running is False:
            return
    
        self.running = False
        try:
            if self.task:
                self.task.cancel()
                with suppress(asyncio.CancelledError):
                    await self.task
        except Exception as e:
            raise e
        finally:
            exs = []
            for program in self.running_programs:
                try:
                    await program.__aexit__()
                except Exception as e:
                    exs.append(e)
            if exs:
>               raise Exception("; ".join(str(ex) for ex in (exs)))
E               Exception: mir-kiosk-neverputt died without being waited for or killed

lib/benchmarker.py:65: Exception