use_pty:FALSE /usr/share/restraint/plugins/run_task_plugins bash ./runtest.sh :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :: Test :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :: [ 17:37:37 ] :: [ BEGIN ] :: Running 'uname -a' Linux hpe-dl360gen8-01.hpe2.lab.eng.bos.redhat.com 6.2.0-rc5 #1 SMP PREEMPT_DYNAMIC Sun Jan 22 18:37:13 UTC 2023 x86_64 GNU/Linux :: [ 17:37:37 ] :: [ PASS ] :: Command 'uname -a' (Expected 0, got 0) :: [ 17:37:38 ] :: [ BEGIN ] :: Running 'rpm -q mdadm || dnf install -y mdadm' mdadm-4.2-2.fc37.x86_64 :: [ 17:37:38 ] :: [ PASS ] :: Command 'rpm -q mdadm || dnf install -y mdadm' (Expected 0, got 0) :: [ 17:37:38 ] :: [ LOG ] :: ./runtest.sh :: [ 17:37:38 ] :: [ BEGIN ] :: Running 'modprobe raid456 devices_handle_discard_safely=Y' :: [ 17:37:38 ] :: [ PASS ] :: Command 'modprobe raid456 devices_handle_discard_safely=Y' (Expected 0, got 0) :: [ 17:37:38 ] :: [ BEGIN ] :: Running 'echo Y >/sys/module/raid456/parameters/devices_handle_discard_safely' :: [ 17:37:38 ] :: [ PASS ] :: Command 'echo Y >/sys/module/raid456/parameters/devices_handle_discard_safely' (Expected 0, got 0) /usr/sbin/mkfs.xfs INFO: Executing MD_Create_RAID() to create raid 0 INFO: Created md raid with these raid devices " /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4". :: [ 17:37:46 ] :: [ BEGIN ] :: Running 'mdadm --create --run /dev/md1 --level 0 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --chunk 512' mdadm: array /dev/md1 started. :: [ 17:37:54 ] :: [ PASS ] :: Command 'mdadm --create --run /dev/md1 --level 0 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --chunk 512' (Expected 0, got 0) INFO:cat /proc/mdstat###################### :: [ 17:37:54 ] :: [ BEGIN ] :: Running 'cat /proc/mdstat' Personalities : [raid6] [raid5] [raid4] [raid0] md1 : active raid0 loop4[4] loop3[3] loop2[2] loop1[1] loop0[0] 2549760 blocks super 1.2 512k chunks unused devices: :: [ 17:37:54 ] :: [ PASS ] :: Command 'cat /proc/mdstat' (Expected 0, got 0) :: [ 17:37:54 ] :: [ BEGIN ] :: Running 'lsblk' NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINTS loop0 7:0 0 500M 0 loop └─md1 9:1 0 2.4G 0 raid0 loop1 7:1 0 500M 0 loop └─md1 9:1 0 2.4G 0 raid0 loop2 7:2 0 500M 0 loop └─md1 9:1 0 2.4G 0 raid0 loop3 7:3 0 500M 0 loop └─md1 9:1 0 2.4G 0 raid0 loop4 7:4 0 500M 0 loop └─md1 9:1 0 2.4G 0 raid0 loop5 7:5 0 500M 0 loop sda 8:0 0 279.4G 0 disk ├─sda1 8:1 0 1M 0 part ├─sda2 8:2 0 1G 0 part /boot └─sda3 8:3 0 278.4G 0 part └─fedora_hpe--dl360gen8--01-root 253:0 0 278.4G 0 lvm / zram0 252:0 0 8G 0 disk [SWAP] :: [ 17:37:54 ] :: [ PASS ] :: Command 'lsblk' (Expected 0, got 0) egrep: warning: egrep is obsolescent; using grep -E /dev/md1 INFO:mdadm -D /dev/md1 ######################### :: [ 17:37:54 ] :: [ BEGIN ] :: Running 'mdadm --detail /dev/md1' /dev/md1: Version : 1.2 Creation Time : Sun Jan 22 17:37:46 2023 Raid Level : raid0 Array Size : 2549760 (2.43 GiB 2.61 GB) Raid Devices : 5 Total Devices : 5 Persistence : Superblock is persistent Update Time : Sun Jan 22 17:37:46 2023 State : clean Active Devices : 5 Working Devices : 5 Failed Devices : 0 Spare Devices : 0 Layout : -unknown- Chunk Size : 512K Consistency Policy : none Name : 1 UUID : 0307afb2:ef73df3d:213b51dd:d86fc6bc Events : 0 Number Major Minor RaidDevice State 0 7 0 0 active sync /dev/loop0 1 7 1 1 active sync /dev/loop1 2 7 2 2 active sync /dev/loop2 3 7 3 3 active sync /dev/loop3 4 7 4 4 active sync /dev/loop4 :: [ 17:37:54 ] :: [ PASS ] :: Command 'mdadm --detail /dev/md1' (Expected 0, got 0) :: [ 17:37:54 ] :: [ LOG ] :: INFO: Successfully created md raid /dev/md1 1674427074 start_time against this md array: /dev/md1 state is clean :: [ 17:37:54 ] :: [ LOG ] :: mkfs -t xfs /dev/md1 log stripe unit (524288 bytes) is too large (maximum is 256KiB) log stripe unit adjusted to 32KiB meta-data=/dev/md1 isize=512 agcount=8, agsize=79744 blks = sectsz=512 attr=2, projid32bit=1 = crc=1 finobt=1, sparse=1, rmapbt=0 = reflink=1 bigtime=1 inobtcount=1 nrext64=0 data = bsize=4096 blocks=637440, imaxpct=25 = sunit=128 swidth=640 blks naming =version 2 bsize=4096 ascii-ci=0, ftype=1 log =internal log bsize=4096 blocks=16384, version=2 = sectsz=512 sunit=8 blks, lazy-count=1 realtime =none extsz=4096 blocks=0, rtextents=0 Discarding blocks...Done. :: [ 17:38:00 ] :: [ BEGIN ] :: Running 'mount -t xfs /dev/md1 /mnt/md_test ' :: [ 17:38:00 ] :: [ PASS ] :: Command 'mount -t xfs /dev/md1 /mnt/md_test ' (Expected 0, got 0) :: [ 17:38:00 ] :: [ BEGIN ] :: Running 'fstrim -v /mnt/md_test' /mnt/md_test: 2.4 GiB (2543484928 bytes) trimmed :: [ 17:38:00 ] :: [ PASS ] :: Command 'fstrim -v /mnt/md_test' (Expected 0, got 0) :: [ 17:38:00 ] :: [ BEGIN ] :: Running 'umount /dev/md1' :: [ 17:38:01 ] :: [ PASS ] :: Command 'umount /dev/md1' (Expected 0, got 0) INFO: Executing MD_Clean_RAID() against this md device: mdadm --stop /dev/md1 mdadm: stopped /dev/md1 clean devs : /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 mdadm --zero-superblock /dev/loop0 mdadm --zero-superblock /dev/loop1 mdadm --zero-superblock /dev/loop2 mdadm --zero-superblock /dev/loop3 mdadm --zero-superblock /dev/loop4 ret is 0 ls /dev/md1 ls: cannot access '/dev/md1': No such file or directory mdadm --stop can delete md node name /dev/md1 in /dev INFO: Executing MD_Create_RAID() to create raid 1 INFO: Created md raid with these raid devices " /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4". INFO: Created md raid with these spare disks " /dev/loop5". :: [ 17:38:26 ] :: [ BEGIN ] :: Running 'mdadm --create --run /dev/md1 --level 1 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --bitmap=internal --bitmap-chunk=64M' mdadm: array /dev/md1 started. :: [ 17:38:26 ] :: [ PASS ] :: Command 'mdadm --create --run /dev/md1 --level 1 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --bitmap=internal --bitmap-chunk=64M' (Expected 0, got 0) INFO:cat /proc/mdstat###################### :: [ 17:38:26 ] :: [ BEGIN ] :: Running 'cat /proc/mdstat' Personalities : [raid6] [raid5] [raid4] [raid0] [raid1] md1 : active raid1 loop5[5](S) loop4[4] loop3[3] loop2[2] loop1[1] loop0[0] 510976 blocks super 1.2 [5/5] [UUUUU] [>....................] resync = 3.8% (19712/510976) finish=0.4min speed=19712K/sec bitmap: 1/1 pages [4KB], 65536KB chunk unused devices: :: [ 17:38:26 ] :: [ PASS ] :: Command 'cat /proc/mdstat' (Expected 0, got 0) :: [ 17:38:26 ] :: [ BEGIN ] :: Running 'lsblk' NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINTS loop0 7:0 0 500M 0 loop └─md1 9:1 0 499M 0 raid1 loop1 7:1 0 500M 0 loop └─md1 9:1 0 499M 0 raid1 loop2 7:2 0 500M 0 loop └─md1 9:1 0 499M 0 raid1 loop3 7:3 0 500M 0 loop └─md1 9:1 0 499M 0 raid1 loop4 7:4 0 500M 0 loop └─md1 9:1 0 499M 0 raid1 loop5 7:5 0 500M 0 loop └─md1 9:1 0 499M 0 raid1 sda 8:0 0 279.4G 0 disk ├─sda1 8:1 0 1M 0 part ├─sda2 8:2 0 1G 0 part /boot └─sda3 8:3 0 278.4G 0 part └─fedora_hpe--dl360gen8--01-root 253:0 0 278.4G 0 lvm / zram0 252:0 0 8G 0 disk [SWAP] :: [ 17:38:26 ] :: [ PASS ] :: Command 'lsblk' (Expected 0, got 0) egrep: warning: egrep is obsolescent; using grep -E /dev/md1 INFO:mdadm -D /dev/md1 ######################### :: [ 17:38:26 ] :: [ BEGIN ] :: Running 'mdadm --detail /dev/md1' /dev/md1: Version : 1.2 Creation Time : Sun Jan 22 17:38:26 2023 Raid Level : raid1 Array Size : 510976 (499.00 MiB 523.24 MB) Used Dev Size : 510976 (499.00 MiB 523.24 MB) Raid Devices : 5 Total Devices : 6 Persistence : Superblock is persistent Intent Bitmap : Internal Update Time : Sun Jan 22 17:38:26 2023 State : clean, resyncing Active Devices : 5 Working Devices : 6 Failed Devices : 0 Spare Devices : 1 Consistency Policy : bitmap Resync Status : 10% complete Name : 1 UUID : fdb61428:46a22a21:facc9406:631086c6 Events : 1 Number Major Minor RaidDevice State 0 7 0 0 active sync /dev/loop0 1 7 1 1 active sync /dev/loop1 2 7 2 2 active sync /dev/loop2 3 7 3 3 active sync /dev/loop3 4 7 4 4 active sync /dev/loop4 5 7 5 - spare /dev/loop5 :: [ 17:38:26 ] :: [ PASS ] :: Command 'mdadm --detail /dev/md1' (Expected 0, got 0) :: [ 17:38:26 ] :: [ LOG ] :: INFO: Successfully created md raid /dev/md1 1674427106 start_time against this md array: /dev/md1 state is clean, 1674427111 start_time against this md array: /dev/md1 state is clean :: [ 17:38:31 ] :: [ LOG ] :: mkfs -t xfs /dev/md1 meta-data=/dev/md1 isize=512 agcount=4, agsize=31936 blks = sectsz=512 attr=2, projid32bit=1 = crc=1 finobt=1, sparse=1, rmapbt=0 = reflink=1 bigtime=1 inobtcount=1 nrext64=0 data = bsize=4096 blocks=127744, imaxpct=25 = sunit=0 swidth=0 blks naming =version 2 bsize=4096 ascii-ci=0, ftype=1 log =internal log bsize=4096 blocks=16384, version=2 = sectsz=512 sunit=0 blks, lazy-count=1 realtime =none extsz=4096 blocks=0, rtextents=0 Discarding blocks...Done. :: [ 17:38:32 ] :: [ BEGIN ] :: Running 'mount -t xfs /dev/md1 /mnt/md_test ' :: [ 17:38:32 ] :: [ PASS ] :: Command 'mount -t xfs /dev/md1 /mnt/md_test ' (Expected 0, got 0) :: [ 17:38:32 ] :: [ BEGIN ] :: Running 'fstrim -v /mnt/md_test' /mnt/md_test: 434.8 MiB (455933952 bytes) trimmed :: [ 17:38:32 ] :: [ PASS ] :: Command 'fstrim -v /mnt/md_test' (Expected 0, got 0) :: [ 17:38:32 ] :: [ BEGIN ] :: Running 'umount /dev/md1' :: [ 17:38:32 ] :: [ PASS ] :: Command 'umount /dev/md1' (Expected 0, got 0) INFO: Executing MD_Clean_RAID() against this md device: mdadm --stop /dev/md1 mdadm: stopped /dev/md1 clean devs : /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 /dev/loop5 mdadm --zero-superblock /dev/loop0 mdadm --zero-superblock /dev/loop1 mdadm --zero-superblock /dev/loop2 mdadm --zero-superblock /dev/loop3 mdadm --zero-superblock /dev/loop4 mdadm --zero-superblock /dev/loop5 ret is 0 ls /dev/md1 ls: cannot access '/dev/md1': No such file or directory mdadm --stop can delete md node name /dev/md1 in /dev INFO: Executing MD_Create_RAID() to create raid 4 INFO: Created md raid with these raid devices " /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4". INFO: Created md raid with these spare disks " /dev/loop5". :: [ 17:38:57 ] :: [ BEGIN ] :: Running 'mdadm --create --run /dev/md1 --level 4 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --chunk 512 --bitmap=internal --bitmap-chunk=64M' mdadm: array /dev/md1 started. :: [ 17:38:57 ] :: [ PASS ] :: Command 'mdadm --create --run /dev/md1 --level 4 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --chunk 512 --bitmap=internal --bitmap-chunk=64M' (Expected 0, got 0) INFO:cat /proc/mdstat###################### :: [ 17:38:57 ] :: [ BEGIN ] :: Running 'cat /proc/mdstat' Personalities : [raid6] [raid5] [raid4] [raid0] [raid1] md1 : active raid4 loop4[6] loop5[5](S) loop3[3] loop2[2] loop1[1] loop0[0] 2039808 blocks super 1.2 level 4, 512k chunk, algorithm 0 [5/4] [UUUU_] [>....................] recovery = 0.9% (4616/509952) finish=1.8min speed=4616K/sec bitmap: 1/1 pages [4KB], 65536KB chunk unused devices: :: [ 17:38:57 ] :: [ PASS ] :: Command 'cat /proc/mdstat' (Expected 0, got 0) :: [ 17:38:57 ] :: [ BEGIN ] :: Running 'lsblk' NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINTS loop0 7:0 0 500M 0 loop └─md1 9:1 0 1.9G 0 raid4 loop1 7:1 0 500M 0 loop └─md1 9:1 0 1.9G 0 raid4 loop2 7:2 0 500M 0 loop └─md1 9:1 0 1.9G 0 raid4 loop3 7:3 0 500M 0 loop └─md1 9:1 0 1.9G 0 raid4 loop4 7:4 0 500M 0 loop └─md1 9:1 0 1.9G 0 raid4 loop5 7:5 0 500M 0 loop └─md1 9:1 0 1.9G 0 raid4 sda 8:0 0 279.4G 0 disk ├─sda1 8:1 0 1M 0 part ├─sda2 8:2 0 1G 0 part /boot └─sda3 8:3 0 278.4G 0 part └─fedora_hpe--dl360gen8--01-root 253:0 0 278.4G 0 lvm / zram0 252:0 0 8G 0 disk [SWAP] :: [ 17:38:57 ] :: [ PASS ] :: Command 'lsblk' (Expected 0, got 0) egrep: warning: egrep is obsolescent; using grep -E /dev/md1 INFO:mdadm -D /dev/md1 ######################### :: [ 17:38:57 ] :: [ BEGIN ] :: Running 'mdadm --detail /dev/md1' /dev/md1: Version : 1.2 Creation Time : Sun Jan 22 17:38:57 2023 Raid Level : raid4 Array Size : 2039808 (1992.00 MiB 2088.76 MB) Used Dev Size : 509952 (498.00 MiB 522.19 MB) Raid Devices : 5 Total Devices : 6 Persistence : Superblock is persistent Intent Bitmap : Internal Update Time : Sun Jan 22 17:38:57 2023 State : clean, degraded, recovering Active Devices : 4 Working Devices : 6 Failed Devices : 0 Spare Devices : 2 Chunk Size : 512K Consistency Policy : bitmap Rebuild Status : 3% complete Name : 1 UUID : 6f21c496:4f8bf2a5:a8909621:6b09dc60 Events : 1 Number Major Minor RaidDevice State 0 7 0 0 active sync /dev/loop0 1 7 1 1 active sync /dev/loop1 2 7 2 2 active sync /dev/loop2 3 7 3 3 active sync /dev/loop3 6 7 4 4 spare rebuilding /dev/loop4 5 7 5 - spare /dev/loop5 :: [ 17:38:57 ] :: [ PASS ] :: Command 'mdadm --detail /dev/md1' (Expected 0, got 0) :: [ 17:38:57 ] :: [ LOG ] :: INFO: Successfully created md raid /dev/md1 1674427137 start_time against this md array: /dev/md1 state is clean, 1674427142 start_time against this md array: /dev/md1 state is clean, 1674427147 start_time against this md array: /dev/md1 state is clean :: [ 17:39:07 ] :: [ LOG ] :: mkfs -t xfs /dev/md1 log stripe unit (524288 bytes) is too large (maximum is 256KiB) log stripe unit adjusted to 32KiB meta-data=/dev/md1 isize=512 agcount=8, agsize=63744 blks = sectsz=512 attr=2, projid32bit=1 = crc=1 finobt=1, sparse=1, rmapbt=0 = reflink=1 bigtime=1 inobtcount=1 nrext64=0 data = bsize=4096 blocks=509952, imaxpct=25 = sunit=128 swidth=512 blks naming =version 2 bsize=4096 ascii-ci=0, ftype=1 log =internal log bsize=4096 blocks=16384, version=2 = sectsz=512 sunit=8 blks, lazy-count=1 realtime =none extsz=4096 blocks=0, rtextents=0 Discarding blocks...Done. :: [ 17:39:13 ] :: [ BEGIN ] :: Running 'mount -t xfs /dev/md1 /mnt/md_test ' :: [ 17:39:13 ] :: [ PASS ] :: Command 'mount -t xfs /dev/md1 /mnt/md_test ' (Expected 0, got 0) :: [ 17:39:13 ] :: [ BEGIN ] :: Running 'fstrim -v /mnt/md_test' /mnt/md_test: 1.9 GiB (2020802560 bytes) trimmed :: [ 17:39:16 ] :: [ PASS ] :: Command 'fstrim -v /mnt/md_test' (Expected 0, got 0) :: [ 17:39:16 ] :: [ BEGIN ] :: Running 'umount /dev/md1' :: [ 17:39:16 ] :: [ PASS ] :: Command 'umount /dev/md1' (Expected 0, got 0) INFO: Executing MD_Clean_RAID() against this md device: mdadm --stop /dev/md1 mdadm: stopped /dev/md1 clean devs : /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 /dev/loop5 mdadm --zero-superblock /dev/loop0 mdadm --zero-superblock /dev/loop1 mdadm --zero-superblock /dev/loop2 mdadm --zero-superblock /dev/loop3 mdadm --zero-superblock /dev/loop4 mdadm --zero-superblock /dev/loop5 ret is 0 ls /dev/md1 ls: cannot access '/dev/md1': No such file or directory mdadm --stop can delete md node name /dev/md1 in /dev INFO: Executing MD_Create_RAID() to create raid 5 INFO: Created md raid with these raid devices " /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4". INFO: Created md raid with these spare disks " /dev/loop5". :: [ 17:39:41 ] :: [ BEGIN ] :: Running 'mdadm --create --run /dev/md1 --level 5 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --chunk 512 --bitmap=internal --bitmap-chunk=64M' mdadm: array /dev/md1 started. :: [ 17:39:41 ] :: [ PASS ] :: Command 'mdadm --create --run /dev/md1 --level 5 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --chunk 512 --bitmap=internal --bitmap-chunk=64M' (Expected 0, got 0) INFO:cat /proc/mdstat###################### :: [ 17:39:41 ] :: [ BEGIN ] :: Running 'cat /proc/mdstat' Personalities : [raid6] [raid5] [raid4] [raid0] [raid1] md1 : active raid5 loop4[6] loop5[5](S) loop3[3] loop2[2] loop1[1] loop0[0] 2039808 blocks super 1.2 level 5, 512k chunk, algorithm 2 [5/4] [UUUU_] [>....................] recovery = 0.7% (3968/509952) finish=2.1min speed=3968K/sec bitmap: 1/1 pages [4KB], 65536KB chunk unused devices: :: [ 17:39:41 ] :: [ PASS ] :: Command 'cat /proc/mdstat' (Expected 0, got 0) :: [ 17:39:41 ] :: [ BEGIN ] :: Running 'lsblk' NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINTS loop0 7:0 0 500M 0 loop └─md1 9:1 0 1.9G 0 raid5 loop1 7:1 0 500M 0 loop └─md1 9:1 0 1.9G 0 raid5 loop2 7:2 0 500M 0 loop └─md1 9:1 0 1.9G 0 raid5 loop3 7:3 0 500M 0 loop └─md1 9:1 0 1.9G 0 raid5 loop4 7:4 0 500M 0 loop └─md1 9:1 0 1.9G 0 raid5 loop5 7:5 0 500M 0 loop └─md1 9:1 0 1.9G 0 raid5 sda 8:0 0 279.4G 0 disk ├─sda1 8:1 0 1M 0 part ├─sda2 8:2 0 1G 0 part /boot └─sda3 8:3 0 278.4G 0 part └─fedora_hpe--dl360gen8--01-root 253:0 0 278.4G 0 lvm / zram0 252:0 0 8G 0 disk [SWAP] :: [ 17:39:41 ] :: [ PASS ] :: Command 'lsblk' (Expected 0, got 0) egrep: warning: egrep is obsolescent; using grep -E /dev/md1 INFO:mdadm -D /dev/md1 ######################### :: [ 17:39:41 ] :: [ BEGIN ] :: Running 'mdadm --detail /dev/md1' /dev/md1: Version : 1.2 Creation Time : Sun Jan 22 17:39:41 2023 Raid Level : raid5 Array Size : 2039808 (1992.00 MiB 2088.76 MB) Used Dev Size : 509952 (498.00 MiB 522.19 MB) Raid Devices : 5 Total Devices : 6 Persistence : Superblock is persistent Intent Bitmap : Internal Update Time : Sun Jan 22 17:39:41 2023 State : clean, degraded, recovering Active Devices : 4 Working Devices : 6 Failed Devices : 0 Spare Devices : 2 Layout : left-symmetric Chunk Size : 512K Consistency Policy : bitmap Rebuild Status : 3% complete Name : 1 UUID : c92c34cf:0a475b77:ccd0f01a:3c7a6fb1 Events : 1 Number Major Minor RaidDevice State 0 7 0 0 active sync /dev/loop0 1 7 1 1 active sync /dev/loop1 2 7 2 2 active sync /dev/loop2 3 7 3 3 active sync /dev/loop3 6 7 4 4 spare rebuilding /dev/loop4 5 7 5 - spare /dev/loop5 :: [ 17:39:41 ] :: [ PASS ] :: Command 'mdadm --detail /dev/md1' (Expected 0, got 0) :: [ 17:39:41 ] :: [ LOG ] :: INFO: Successfully created md raid /dev/md1 1674427181 start_time against this md array: /dev/md1 state is clean, 1674427186 start_time against this md array: /dev/md1 state is clean, 1674427191 start_time against this md array: /dev/md1 state is clean :: [ 17:39:51 ] :: [ LOG ] :: mkfs -t xfs /dev/md1 mkfs.xfs: /dev/md1 appears to contain an existing filesystem (xfs). mkfs.xfs: Use the -f option to force overwrite. log stripe unit (524288 bytes) is too large (maximum is 256KiB) log stripe unit adjusted to 32KiB meta-data=/dev/md1 isize=512 agcount=8, agsize=63744 blks = sectsz=512 attr=2, projid32bit=1 = crc=1 finobt=1, sparse=1, rmapbt=0 = reflink=1 bigtime=1 inobtcount=1 nrext64=0 data = bsize=4096 blocks=509952, imaxpct=25 = sunit=128 swidth=512 blks naming =version 2 bsize=4096 ascii-ci=0, ftype=1 log =internal log bsize=4096 blocks=16384, version=2 = sectsz=512 sunit=8 blks, lazy-count=1 realtime =none extsz=4096 blocks=0, rtextents=0 Discarding blocks...Done. :: [ 17:39:58 ] :: [ BEGIN ] :: Running 'mount -t xfs /dev/md1 /mnt/md_test ' :: [ 17:39:58 ] :: [ PASS ] :: Command 'mount -t xfs /dev/md1 /mnt/md_test ' (Expected 0, got 0) :: [ 17:39:58 ] :: [ BEGIN ] :: Running 'fstrim -v /mnt/md_test' /mnt/md_test: 1.9 GiB (2020802560 bytes) trimmed :: [ 17:40:00 ] :: [ PASS ] :: Command 'fstrim -v /mnt/md_test' (Expected 0, got 0) :: [ 17:40:00 ] :: [ BEGIN ] :: Running 'umount /dev/md1' :: [ 17:40:00 ] :: [ PASS ] :: Command 'umount /dev/md1' (Expected 0, got 0) INFO: Executing MD_Clean_RAID() against this md device: mdadm --stop /dev/md1 mdadm: stopped /dev/md1 clean devs : /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 /dev/loop5 mdadm --zero-superblock /dev/loop0 mdadm --zero-superblock /dev/loop1 mdadm --zero-superblock /dev/loop2 mdadm --zero-superblock /dev/loop3 mdadm --zero-superblock /dev/loop4 mdadm --zero-superblock /dev/loop5 ret is 0 ls /dev/md1 ls: cannot access '/dev/md1': No such file or directory mdadm --stop can delete md node name /dev/md1 in /dev INFO: Executing MD_Create_RAID() to create raid 6 INFO: Created md raid with these raid devices " /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4". INFO: Created md raid with these spare disks " /dev/loop5". :: [ 17:40:25 ] :: [ BEGIN ] :: Running 'mdadm --create --run /dev/md1 --level 6 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --chunk 512 --bitmap=internal --bitmap-chunk=64M' mdadm: array /dev/md1 started. :: [ 17:40:25 ] :: [ PASS ] :: Command 'mdadm --create --run /dev/md1 --level 6 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --chunk 512 --bitmap=internal --bitmap-chunk=64M' (Expected 0, got 0) INFO:cat /proc/mdstat###################### :: [ 17:40:25 ] :: [ BEGIN ] :: Running 'cat /proc/mdstat' Personalities : [raid6] [raid5] [raid4] [raid0] [raid1] md1 : active raid6 loop5[5](S) loop4[4] loop3[3] loop2[2] loop1[1] loop0[0] 1529856 blocks super 1.2 level 6, 512k chunk, algorithm 2 [5/5] [UUUUU] [>....................] resync = 1.5% (7680/509952) finish=1.0min speed=7680K/sec bitmap: 1/1 pages [4KB], 65536KB chunk unused devices: :: [ 17:40:25 ] :: [ PASS ] :: Command 'cat /proc/mdstat' (Expected 0, got 0) :: [ 17:40:25 ] :: [ BEGIN ] :: Running 'lsblk' NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINTS loop0 7:0 0 500M 0 loop └─md1 9:1 0 1.5G 0 raid6 loop1 7:1 0 500M 0 loop └─md1 9:1 0 1.5G 0 raid6 loop2 7:2 0 500M 0 loop └─md1 9:1 0 1.5G 0 raid6 loop3 7:3 0 500M 0 loop └─md1 9:1 0 1.5G 0 raid6 loop4 7:4 0 500M 0 loop └─md1 9:1 0 1.5G 0 raid6 loop5 7:5 0 500M 0 loop └─md1 9:1 0 1.5G 0 raid6 sda 8:0 0 279.4G 0 disk ├─sda1 8:1 0 1M 0 part ├─sda2 8:2 0 1G 0 part /boot └─sda3 8:3 0 278.4G 0 part └─fedora_hpe--dl360gen8--01-root 253:0 0 278.4G 0 lvm / zram0 252:0 0 8G 0 disk [SWAP] :: [ 17:40:25 ] :: [ PASS ] :: Command 'lsblk' (Expected 0, got 0) egrep: warning: egrep is obsolescent; using grep -E /dev/md1 INFO:mdadm -D /dev/md1 ######################### :: [ 17:40:25 ] :: [ BEGIN ] :: Running 'mdadm --detail /dev/md1' /dev/md1: Version : 1.2 Creation Time : Sun Jan 22 17:40:25 2023 Raid Level : raid6 Array Size : 1529856 (1494.00 MiB 1566.57 MB) Used Dev Size : 509952 (498.00 MiB 522.19 MB) Raid Devices : 5 Total Devices : 6 Persistence : Superblock is persistent Intent Bitmap : Internal Update Time : Sun Jan 22 17:40:25 2023 State : clean, resyncing Active Devices : 5 Working Devices : 6 Failed Devices : 0 Spare Devices : 1 Layout : left-symmetric Chunk Size : 512K Consistency Policy : bitmap Resync Status : 6% complete Name : 1 UUID : e410210c:46cd3298:8e08b086:7c5385b0 Events : 1 Number Major Minor RaidDevice State 0 7 0 0 active sync /dev/loop0 1 7 1 1 active sync /dev/loop1 2 7 2 2 active sync /dev/loop2 3 7 3 3 active sync /dev/loop3 4 7 4 4 active sync /dev/loop4 5 7 5 - spare /dev/loop5 :: [ 17:40:25 ] :: [ PASS ] :: Command 'mdadm --detail /dev/md1' (Expected 0, got 0) :: [ 17:40:25 ] :: [ LOG ] :: INFO: Successfully created md raid /dev/md1 1674427225 start_time against this md array: /dev/md1 state is clean, 1674427230 start_time against this md array: /dev/md1 state is clean :: [ 17:40:30 ] :: [ LOG ] :: mkfs -t xfs /dev/md1 log stripe unit (524288 bytes) is too large (maximum is 256KiB) log stripe unit adjusted to 32KiB meta-data=/dev/md1 isize=512 agcount=8, agsize=47872 blks = sectsz=512 attr=2, projid32bit=1 = crc=1 finobt=1, sparse=1, rmapbt=0 = reflink=1 bigtime=1 inobtcount=1 nrext64=0 data = bsize=4096 blocks=382464, imaxpct=25 = sunit=128 swidth=384 blks naming =version 2 bsize=4096 ascii-ci=0, ftype=1 log =internal log bsize=4096 blocks=16384, version=2 = sectsz=512 sunit=8 blks, lazy-count=1 realtime =none extsz=4096 blocks=0, rtextents=0 Discarding blocks...Done. :: [ 17:40:33 ] :: [ BEGIN ] :: Running 'mount -t xfs /dev/md1 /mnt/md_test ' :: [ 17:40:33 ] :: [ PASS ] :: Command 'mount -t xfs /dev/md1 /mnt/md_test ' (Expected 0, got 0) :: [ 17:40:33 ] :: [ BEGIN ] :: Running 'fstrim -v /mnt/md_test' /mnt/md_test: 1.4 GiB (1498611712 bytes) trimmed :: [ 17:40:35 ] :: [ PASS ] :: Command 'fstrim -v /mnt/md_test' (Expected 0, got 0) :: [ 17:40:35 ] :: [ BEGIN ] :: Running 'umount /dev/md1' :: [ 17:40:35 ] :: [ PASS ] :: Command 'umount /dev/md1' (Expected 0, got 0) INFO: Executing MD_Clean_RAID() against this md device: mdadm --stop /dev/md1 mdadm: stopped /dev/md1 clean devs : /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 /dev/loop5 mdadm --zero-superblock /dev/loop0 mdadm --zero-superblock /dev/loop1 mdadm --zero-superblock /dev/loop2 mdadm --zero-superblock /dev/loop3 mdadm --zero-superblock /dev/loop4 mdadm --zero-superblock /dev/loop5 ret is 0 ls /dev/md1 ls: cannot access '/dev/md1': No such file or directory mdadm --stop can delete md node name /dev/md1 in /dev INFO: Executing MD_Create_RAID() to create raid 10 INFO: Created md raid with these raid devices " /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4". INFO: Created md raid with these spare disks " /dev/loop5". :: [ 17:41:00 ] :: [ BEGIN ] :: Running 'mdadm --create --run /dev/md1 --level 10 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --chunk 512 --bitmap=internal --bitmap-chunk=64M' mdadm: array /dev/md1 started. :: [ 17:41:00 ] :: [ PASS ] :: Command 'mdadm --create --run /dev/md1 --level 10 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --chunk 512 --bitmap=internal --bitmap-chunk=64M' (Expected 0, got 0) INFO:cat /proc/mdstat###################### :: [ 17:41:00 ] :: [ BEGIN ] :: Running 'cat /proc/mdstat' Personalities : [raid6] [raid5] [raid4] [raid0] [raid1] [raid10] md1 : active raid10 loop5[5](S) loop4[4] loop3[3] loop2[2] loop1[1] loop0[0] 1274880 blocks super 1.2 512K chunks 2 near-copies [5/5] [UUUUU] [>....................] resync = 3.7% (47680/1274880) finish=0.4min speed=47680K/sec bitmap: 1/1 pages [4KB], 65536KB chunk unused devices: :: [ 17:41:00 ] :: [ PASS ] :: Command 'cat /proc/mdstat' (Expected 0, got 0) :: [ 17:41:00 ] :: [ BEGIN ] :: Running 'lsblk' NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINTS loop0 7:0 0 500M 0 loop └─md1 9:1 0 1.2G 0 raid10 loop1 7:1 0 500M 0 loop └─md1 9:1 0 1.2G 0 raid10 loop2 7:2 0 500M 0 loop └─md1 9:1 0 1.2G 0 raid10 loop3 7:3 0 500M 0 loop └─md1 9:1 0 1.2G 0 raid10 loop4 7:4 0 500M 0 loop └─md1 9:1 0 1.2G 0 raid10 loop5 7:5 0 500M 0 loop └─md1 9:1 0 1.2G 0 raid10 sda 8:0 0 279.4G 0 disk ├─sda1 8:1 0 1M 0 part ├─sda2 8:2 0 1G 0 part /boot └─sda3 8:3 0 278.4G 0 part └─fedora_hpe--dl360gen8--01-root 253:0 0 278.4G 0 lvm / zram0 252:0 0 8G 0 disk [SWAP] :: [ 17:41:00 ] :: [ PASS ] :: Command 'lsblk' (Expected 0, got 0) egrep: warning: egrep is obsolescent; using grep -E /dev/md1 INFO:mdadm -D /dev/md1 ######################### :: [ 17:41:00 ] :: [ BEGIN ] :: Running 'mdadm --detail /dev/md1' /dev/md1: Version : 1.2 Creation Time : Sun Jan 22 17:41:00 2023 Raid Level : raid10 Array Size : 1274880 (1245.00 MiB 1305.48 MB) Used Dev Size : 509952 (498.00 MiB 522.19 MB) Raid Devices : 5 Total Devices : 6 Persistence : Superblock is persistent Intent Bitmap : Internal Update Time : Sun Jan 22 17:41:00 2023 State : clean, resyncing Active Devices : 5 Working Devices : 6 Failed Devices : 0 Spare Devices : 1 Layout : near=2 Chunk Size : 512K Consistency Policy : bitmap Resync Status : 17% complete Name : 1 UUID : b7da6e1d:8f1201ad:08cb9d37:7b2aa007 Events : 2 Number Major Minor RaidDevice State 0 7 0 0 active sync /dev/loop0 1 7 1 1 active sync /dev/loop1 2 7 2 2 active sync /dev/loop2 3 7 3 3 active sync /dev/loop3 4 7 4 4 active sync /dev/loop4 5 7 5 - spare /dev/loop5 :: [ 17:41:00 ] :: [ PASS ] :: Command 'mdadm --detail /dev/md1' (Expected 0, got 0) :: [ 17:41:00 ] :: [ LOG ] :: INFO: Successfully created md raid /dev/md1 1674427260 start_time against this md array: /dev/md1 state is clean, 1674427265 start_time against this md array: /dev/md1 state is clean, 1674427270 start_time against this md array: /dev/md1 state is clean :: [ 17:41:10 ] :: [ LOG ] :: mkfs -t xfs /dev/md1 log stripe unit (524288 bytes) is too large (maximum is 256KiB) log stripe unit adjusted to 32KiB meta-data=/dev/md1 isize=512 agcount=8, agsize=39936 blks = sectsz=512 attr=2, projid32bit=1 = crc=1 finobt=1, sparse=1, rmapbt=0 = reflink=1 bigtime=1 inobtcount=1 nrext64=0 data = bsize=4096 blocks=318720, imaxpct=25 = sunit=128 swidth=640 blks naming =version 2 bsize=4096 ascii-ci=0, ftype=1 log =internal log bsize=4096 blocks=16384, version=2 = sectsz=512 sunit=8 blks, lazy-count=1 realtime =none extsz=4096 blocks=0, rtextents=0 Discarding blocks...Done. :: [ 17:41:11 ] :: [ BEGIN ] :: Running 'mount -t xfs /dev/md1 /mnt/md_test ' :: [ 17:41:11 ] :: [ PASS ] :: Command 'mount -t xfs /dev/md1 /mnt/md_test ' (Expected 0, got 0) :: [ 17:41:11 ] :: [ BEGIN ] :: Running 'fstrim -v /mnt/md_test' /mnt/md_test: 1.2 GiB (1238007808 bytes) trimmed :: [ 17:41:11 ] :: [ PASS ] :: Command 'fstrim -v /mnt/md_test' (Expected 0, got 0) :: [ 17:41:11 ] :: [ BEGIN ] :: Running 'umount /dev/md1' :: [ 17:41:11 ] :: [ PASS ] :: Command 'umount /dev/md1' (Expected 0, got 0) INFO: Executing MD_Clean_RAID() against this md device: mdadm --stop /dev/md1 mdadm: stopped /dev/md1 clean devs : /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 /dev/loop5 mdadm --zero-superblock /dev/loop0 mdadm --zero-superblock /dev/loop1 mdadm --zero-superblock /dev/loop2 mdadm --zero-superblock /dev/loop3 mdadm --zero-superblock /dev/loop4 mdadm --zero-superblock /dev/loop5 ret is 0 ls /dev/md1 ls: cannot access '/dev/md1': No such file or directory mdadm --stop can delete md node name /dev/md1 in /dev losetup -d /dev/loop0 losetup -d /dev/loop1 losetup -d /dev/loop2 losetup -d /dev/loop3 losetup -d /dev/loop4 losetup -d /dev/loop5 :: [ 17:41:31 ] :: [ BEGIN ] :: check the errors :: actually running 'dmesg | grep -i 'Call Trace:'' :: [ 17:41:31 ] :: [ PASS ] :: check the errors (Expected 1, got 1) :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :: Duration: 234s :: Assertions: 47 good, 0 bad :: RESULT: PASS (Test) ** Test PASS Score:0 Uploading resultoutputfile.log .done :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :: TEST PROTOCOL :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: Test run ID : 6 Package : unknown beakerlib RPM : beakerlib-1.29.3-1.fc38.noarch bl-redhat RPM : beakerlib-redhat-1-33.fc37eng.noarch Test name : unknown Test started : 2023-01-22 17:37:37 EST Test finished : 2023-01-22 17:41:32 EST (still running) Test duration : 235 seconds Distro : Fedora release 38 (Rawhide) Hostname : hpe-dl360gen8-01.hpe2.lab.eng.bos.redhat.com Architecture : unknown CPUs : 32 x Intel(R) Xeon(R) CPU E5-2650 0 @ 2.00GHz RAM size : 32091 MB HDD size : 279.23 GB :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :: Test description :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: PURPOSE of trim Description: Test the function of TRIM which quickly erase invalid data for the RAID that created by mdadm. :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :: Test :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :: [ 17:37:37 ] :: [ PASS ] :: Command 'uname -a' (Expected 0, got 0) :: [ 17:37:38 ] :: [ PASS ] :: Command 'rpm -q mdadm || dnf install -y mdadm' (Expected 0, got 0) :: [ 17:37:38 ] :: [ LOG ] :: ./runtest.sh :: [ 17:37:38 ] :: [ PASS ] :: Command 'modprobe raid456 devices_handle_discard_safely=Y' (Expected 0, got 0) :: [ 17:37:38 ] :: [ PASS ] :: Command 'echo Y >/sys/module/raid456/parameters/devices_handle_discard_safely' (Expected 0, got 0) :: [ 17:37:54 ] :: [ PASS ] :: Command 'mdadm --create --run /dev/md1 --level 0 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --chunk 512' (Expected 0, got 0) :: [ 17:37:54 ] :: [ PASS ] :: Command 'cat /proc/mdstat' (Expected 0, got 0) :: [ 17:37:54 ] :: [ PASS ] :: Command 'lsblk' (Expected 0, got 0) :: [ 17:37:54 ] :: [ PASS ] :: Command 'mdadm --detail /dev/md1' (Expected 0, got 0) :: [ 17:37:54 ] :: [ LOG ] :: INFO: Successfully created md raid /dev/md1 :: [ 17:37:54 ] :: [ LOG ] :: mkfs -t xfs /dev/md1 :: [ 17:38:00 ] :: [ PASS ] :: Command 'mount -t xfs /dev/md1 /mnt/md_test ' (Expected 0, got 0) :: [ 17:38:00 ] :: [ PASS ] :: Command 'fstrim -v /mnt/md_test' (Expected 0, got 0) :: [ 17:38:01 ] :: [ PASS ] :: Command 'umount /dev/md1' (Expected 0, got 0) :: [ 17:38:26 ] :: [ PASS ] :: Command 'mdadm --create --run /dev/md1 --level 1 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --bitmap=internal --bitmap-chunk=64M' (Expected 0, got 0) :: [ 17:38:26 ] :: [ PASS ] :: Command 'cat /proc/mdstat' (Expected 0, got 0) :: [ 17:38:26 ] :: [ PASS ] :: Command 'lsblk' (Expected 0, got 0) :: [ 17:38:26 ] :: [ PASS ] :: Command 'mdadm --detail /dev/md1' (Expected 0, got 0) :: [ 17:38:26 ] :: [ LOG ] :: INFO: Successfully created md raid /dev/md1 :: [ 17:38:31 ] :: [ LOG ] :: mkfs -t xfs /dev/md1 :: [ 17:38:32 ] :: [ PASS ] :: Command 'mount -t xfs /dev/md1 /mnt/md_test ' (Expected 0, got 0) :: [ 17:38:32 ] :: [ PASS ] :: Command 'fstrim -v /mnt/md_test' (Expected 0, got 0) :: [ 17:38:32 ] :: [ PASS ] :: Command 'umount /dev/md1' (Expected 0, got 0) :: [ 17:38:57 ] :: [ PASS ] :: Command 'mdadm --create --run /dev/md1 --level 4 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --chunk 512 --bitmap=internal --bitmap-chunk=64M' (Expected 0, got 0) :: [ 17:38:57 ] :: [ PASS ] :: Command 'cat /proc/mdstat' (Expected 0, got 0) :: [ 17:38:57 ] :: [ PASS ] :: Command 'lsblk' (Expected 0, got 0) :: [ 17:38:57 ] :: [ PASS ] :: Command 'mdadm --detail /dev/md1' (Expected 0, got 0) :: [ 17:38:57 ] :: [ LOG ] :: INFO: Successfully created md raid /dev/md1 :: [ 17:39:07 ] :: [ LOG ] :: mkfs -t xfs /dev/md1 :: [ 17:39:13 ] :: [ PASS ] :: Command 'mount -t xfs /dev/md1 /mnt/md_test ' (Expected 0, got 0) :: [ 17:39:16 ] :: [ PASS ] :: Command 'fstrim -v /mnt/md_test' (Expected 0, got 0) :: [ 17:39:16 ] :: [ PASS ] :: Command 'umount /dev/md1' (Expected 0, got 0) :: [ 17:39:41 ] :: [ PASS ] :: Command 'mdadm --create --run /dev/md1 --level 5 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --chunk 512 --bitmap=internal --bitmap-chunk=64M' (Expected 0, got 0) :: [ 17:39:41 ] :: [ PASS ] :: Command 'cat /proc/mdstat' (Expected 0, got 0) :: [ 17:39:41 ] :: [ PASS ] :: Command 'lsblk' (Expected 0, got 0) :: [ 17:39:41 ] :: [ PASS ] :: Command 'mdadm --detail /dev/md1' (Expected 0, got 0) :: [ 17:39:41 ] :: [ LOG ] :: INFO: Successfully created md raid /dev/md1 :: [ 17:39:51 ] :: [ LOG ] :: mkfs -t xfs /dev/md1 :: [ 17:39:58 ] :: [ PASS ] :: Command 'mount -t xfs /dev/md1 /mnt/md_test ' (Expected 0, got 0) :: [ 17:40:00 ] :: [ PASS ] :: Command 'fstrim -v /mnt/md_test' (Expected 0, got 0) :: [ 17:40:00 ] :: [ PASS ] :: Command 'umount /dev/md1' (Expected 0, got 0) :: [ 17:40:25 ] :: [ PASS ] :: Command 'mdadm --create --run /dev/md1 --level 6 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --chunk 512 --bitmap=internal --bitmap-chunk=64M' (Expected 0, got 0) :: [ 17:40:25 ] :: [ PASS ] :: Command 'cat /proc/mdstat' (Expected 0, got 0) :: [ 17:40:25 ] :: [ PASS ] :: Command 'lsblk' (Expected 0, got 0) :: [ 17:40:25 ] :: [ PASS ] :: Command 'mdadm --detail /dev/md1' (Expected 0, got 0) :: [ 17:40:25 ] :: [ LOG ] :: INFO: Successfully created md raid /dev/md1 :: [ 17:40:30 ] :: [ LOG ] :: mkfs -t xfs /dev/md1 :: [ 17:40:33 ] :: [ PASS ] :: Command 'mount -t xfs /dev/md1 /mnt/md_test ' (Expected 0, got 0) :: [ 17:40:35 ] :: [ PASS ] :: Command 'fstrim -v /mnt/md_test' (Expected 0, got 0) :: [ 17:40:35 ] :: [ PASS ] :: Command 'umount /dev/md1' (Expected 0, got 0) :: [ 17:41:00 ] :: [ PASS ] :: Command 'mdadm --create --run /dev/md1 --level 10 --metadata 1.2 --raid-devices 5 /dev/loop0 /dev/loop1 /dev/loop2 /dev/loop3 /dev/loop4 --spare-devices 1 /dev/loop5 --chunk 512 --bitmap=internal --bitmap-chunk=64M' (Expected 0, got 0) :: [ 17:41:00 ] :: [ PASS ] :: Command 'cat /proc/mdstat' (Expected 0, got 0) :: [ 17:41:00 ] :: [ PASS ] :: Command 'lsblk' (Expected 0, got 0) :: [ 17:41:00 ] :: [ PASS ] :: Command 'mdadm --detail /dev/md1' (Expected 0, got 0) :: [ 17:41:00 ] :: [ LOG ] :: INFO: Successfully created md raid /dev/md1 :: [ 17:41:10 ] :: [ LOG ] :: mkfs -t xfs /dev/md1 :: [ 17:41:11 ] :: [ PASS ] :: Command 'mount -t xfs /dev/md1 /mnt/md_test ' (Expected 0, got 0) :: [ 17:41:11 ] :: [ PASS ] :: Command 'fstrim -v /mnt/md_test' (Expected 0, got 0) :: [ 17:41:11 ] :: [ PASS ] :: Command 'umount /dev/md1' (Expected 0, got 0) :: [ 17:41:31 ] :: [ PASS ] :: check the errors (Expected 1, got 1) :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :: Duration: 234s :: Assertions: 47 good, 0 bad :: RESULT: PASS (Test) :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :: unknown :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :: [ 17:41:32 ] :: [ LOG ] :: Phases fingerprint: L5rLAvqh :: [ 17:41:32 ] :: [ LOG ] :: Asserts fingerprint: IcvkrVvU Uploading journal.xml .done :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :: Duration: 235s :: Phases: 1 good, 0 bad :: OVERALL RESULT: PASS (unknown)