[ 169.529958] run blktests nvmeof-mp/009 at 2022-12-12 18:39:59 [ 169.659094] null_blk: module loaded [ 169.678293] null_blk: disk nullb0 created [ 169.687079] null_blk: disk nullb1 created [ 169.736865] device-mapper: table: 253:3: multipath: error getting device (-EBUSY) [ 169.736889] device-mapper: ioctl: error adding target to table [ 169.775803] TECH PREVIEW: Software iWARP Driver may not be fully supported. Please review provided documentation for limitations. [ 169.775925] SoftiWARP attached [ 169.980022] nvmet: adding nsid 1 to subsystem nvme-test [ 170.000254] nvmet_rdma: enabling port 1 (10.16.233.132:7777) [ 170.087450] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000645379db [ 170.087624] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000645379db [ 170.087977] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000001f64e15f [ 170.087987] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 170.088211] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 129 cm_id= 000000001f64e15f [ 170.088264] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000001f64e15f [ 170.088276] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000645379db [ 170.088455] nvmet:nvmet_start_keep_alive_timer: nvmet: ctrl 1 start keep-alive timer for 5 secs [ 170.088460] nvmet: creating nvm controller 1 for subsystem nvme-test for NQN nqn.2014-08.org.nvmexpress:uuid:49c2a18c-36ff-463b-998e-e5a0d1723671. [ 170.089060] nvme nvme0: creating 8 I/O queues. [ 170.089094] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000032cc847 [ 170.089421] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000032cc847 [ 170.089525] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000f46ffbf1 [ 170.089533] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 170.089951] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000f46ffbf1 [ 170.090017] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000f46ffbf1 [ 170.090024] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000032cc847 [ 170.090080] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000c7d539f5 [ 170.095391] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000c7d539f5 [ 170.095463] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000472f0691 [ 170.095468] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 170.095708] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000472f0691 [ 170.095756] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000472f0691 [ 170.095765] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000c7d539f5 [ 170.095821] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000b25684eb [ 170.096105] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000b25684eb [ 170.096171] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000a141e3f0 [ 170.096175] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 170.096424] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000a141e3f0 [ 170.096470] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000a141e3f0 [ 170.096479] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000b25684eb [ 170.096533] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 0000000071554012 [ 170.096810] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 0000000071554012 [ 170.096873] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000cc6e2c6d [ 170.096877] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 170.097114] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000cc6e2c6d [ 170.097159] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000cc6e2c6d [ 170.097167] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 0000000071554012 [ 170.097222] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000f2c11c9a [ 170.097492] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000f2c11c9a [ 170.097556] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000a2337204 [ 170.097559] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 170.097806] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000a2337204 [ 170.097850] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000a2337204 [ 170.097858] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000f2c11c9a [ 170.097911] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 000000004c7c28d6 [ 170.098219] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 000000004c7c28d6 [ 170.098278] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000a213f3b3 [ 170.098282] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 170.098522] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000a213f3b3 [ 170.098568] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000a213f3b3 [ 170.098578] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 000000004c7c28d6 [ 170.098624] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000dcc92746 [ 170.098812] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000dcc92746 [ 170.098867] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000006bdb7d9d [ 170.098872] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 170.099213] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 000000006bdb7d9d [ 170.099269] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000006bdb7d9d [ 170.099276] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000dcc92746 [ 170.099329] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000366dbe64 [ 170.099606] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000366dbe64 [ 170.099664] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 0000000054197448 [ 170.099669] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 170.099917] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 0000000054197448 [ 170.099963] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 0000000054197448 [ 170.099972] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000366dbe64 [ 170.100012] nvme nvme0: mapped 8/0/0 default/read/poll queues. [ 170.100259] nvmet:nvmet_execute_io_connect: nvmet: adding queue 1 to ctrl 1. [ 170.105352] nvmet:nvmet_execute_io_connect: nvmet: adding queue 2 to ctrl 1. [ 170.105419] nvmet:nvmet_execute_io_connect: nvmet: adding queue 3 to ctrl 1. [ 170.105487] nvmet:nvmet_execute_io_connect: nvmet: adding queue 4 to ctrl 1. [ 170.105555] nvmet:nvmet_execute_io_connect: nvmet: adding queue 5 to ctrl 1. [ 170.105613] nvmet:nvmet_execute_io_connect: nvmet: adding queue 6 to ctrl 1. [ 170.105674] nvmet:nvmet_execute_io_connect: nvmet: adding queue 7 to ctrl 1. [ 170.105730] nvmet:nvmet_execute_io_connect: nvmet: adding queue 8 to ctrl 1. [ 170.105809] nvme nvme0: new ctrl: NQN "nvme-test", addr 10.16.233.132:7777 [ 170.105851] nvmet:nvmet_req_cns_error_complete: nvmet: unhandled identify cns 6 on qid 0 [ 170.129728] device-mapper: table: 253:3: multipath: error getting device (-EBUSY) [ 170.129752] device-mapper: ioctl: error adding target to table [ 170.278168] device-mapper: table: 253:4: multipath: error getting device (-EBUSY) [ 170.278183] device-mapper: ioctl: error adding target to table [ 175.148408] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 180.178703] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 181.573319] nvme nvme0: Removing ctrl: NQN "nvme-test" [ 181.778985] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000032cc847 [ 181.778996] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 181.779048] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000f46ffbf1 [ 181.779055] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000f46ffbf1 queue->state= 1 [ 181.779065] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 1 [ 181.779088] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000c7d539f5 [ 181.779092] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 181.779133] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000472f0691 [ 181.779137] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000472f0691 queue->state= 1 [ 181.779144] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 2 [ 181.779157] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000b25684eb [ 181.779162] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 181.779203] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000a141e3f0 [ 181.779206] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000a141e3f0 queue->state= 1 [ 181.779212] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 3 [ 181.779227] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 0000000071554012 [ 181.779231] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 181.779271] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000cc6e2c6d [ 181.779274] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000cc6e2c6d queue->state= 1 [ 181.779293] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000f2c11c9a [ 181.779296] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 181.779340] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 4 [ 181.779353] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000a2337204 [ 181.779357] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000a2337204 queue->state= 1 [ 181.779377] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 000000004c7c28d6 [ 181.779380] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 181.779398] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 5 [ 181.779432] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000a213f3b3 [ 181.779437] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000a213f3b3 queue->state= 1 [ 181.779471] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000dcc92746 [ 181.779479] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 181.779487] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 6 [ 181.779517] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000006bdb7d9d [ 181.779521] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000006bdb7d9d queue->state= 1 [ 181.779527] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 7 [ 181.779546] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000366dbe64 [ 181.779550] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 181.779587] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 0000000054197448 [ 181.779590] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 0000000054197448 queue->state= 1 [ 181.779596] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 8 [ 181.878819] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000645379db [ 181.878828] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 181.878864] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000001f64e15f [ 181.878870] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000001f64e15f queue->state= 1 [ 181.878882] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 0 [ 181.958857] nvmet:nvmet_stop_keep_alive_timer: nvmet: ctrl 1 stop keep-alive [ 182.446540] SoftiWARP detached