[ 155.462613] run blktests nvmeof-mp/009 at 2023-01-12 12:38:20 [ 155.603709] null_blk: module loaded [ 155.622544] null_blk: disk nullb0 created [ 155.631032] null_blk: disk nullb1 created [ 155.681039] device-mapper: table: 253:3: multipath: error getting device (-EBUSY) [ 155.681061] device-mapper: ioctl: error adding target to table [ 155.709718] TECH PREVIEW: Software iWARP Driver may not be fully supported. Please review provided documentation for limitations. [ 155.709829] SoftiWARP attached [ 155.904768] nvmet: adding nsid 1 to subsystem nvme-test [ 155.922504] nvmet_rdma: enabling port 1 (10.16.233.89:7777) [ 156.021973] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 000000003f60fac3 [ 156.022177] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 000000003f60fac3 [ 156.022354] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000002e819b8f [ 156.022364] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 156.022549] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 129 cm_id= 000000002e819b8f [ 156.022601] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000002e819b8f [ 156.022612] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 000000003f60fac3 [ 156.023665] nvmet:nvmet_start_keep_alive_timer: nvmet: ctrl 1 start keep-alive timer for 5 secs [ 156.023669] nvmet: creating nvm controller 1 for subsystem nvme-test for NQN nqn.2014-08.org.nvmexpress:uuid:655e4bd0-cc8c-4265-bc3e-3ff09e2f7242. [ 156.024064] nvme nvme0: creating 8 I/O queues. [ 156.024089] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 0000000030c1a307 [ 156.029431] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 0000000030c1a307 [ 156.029575] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000b328a805 [ 156.029581] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 156.029941] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000b328a805 [ 156.029997] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000b328a805 [ 156.030009] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 0000000030c1a307 [ 156.030057] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 0000000051910e63 [ 156.030264] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 0000000051910e63 [ 156.030332] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000007966bce3 [ 156.030336] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 156.030594] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 000000007966bce3 [ 156.030641] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000007966bce3 [ 156.030652] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 0000000051910e63 [ 156.030697] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000a3bffbae [ 156.030891] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000a3bffbae [ 156.030949] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000005dc61abf [ 156.030953] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 156.031196] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 000000005dc61abf [ 156.031242] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000005dc61abf [ 156.031253] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000a3bffbae [ 156.031295] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 000000005996e41c [ 156.031489] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 000000005996e41c [ 156.031547] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000c82f74db [ 156.031551] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 156.031802] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000c82f74db [ 156.031849] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000c82f74db [ 156.031859] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 000000005996e41c [ 156.031909] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000411a5cb3 [ 156.032102] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000411a5cb3 [ 156.032160] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000f6711670 [ 156.032164] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 156.032410] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000f6711670 [ 156.032457] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000f6711670 [ 156.032469] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000411a5cb3 [ 156.032517] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 000000006c5d16f2 [ 156.032708] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 000000006c5d16f2 [ 156.032764] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 0000000021f90d2f [ 156.032768] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 156.033005] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 0000000021f90d2f [ 156.033051] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 0000000021f90d2f [ 156.033062] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 000000006c5d16f2 [ 156.033104] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 000000004736aa50 [ 156.033297] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 000000004736aa50 [ 156.033350] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000eee27c74 [ 156.033355] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 156.033604] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000eee27c74 [ 156.033649] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000eee27c74 [ 156.033659] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 000000004736aa50 [ 156.033700] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000c2d6c608 [ 156.033891] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000c2d6c608 [ 156.033946] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 0000000067601e0d [ 156.033950] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added env2_siw. [ 156.039348] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 0000000067601e0d [ 156.039396] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 0000000067601e0d [ 156.039406] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000c2d6c608 [ 156.039443] nvme nvme0: mapped 8/0/0 default/read/poll queues. [ 156.039698] nvmet:nvmet_execute_io_connect: nvmet: adding queue 1 to ctrl 1. [ 156.039766] nvmet:nvmet_execute_io_connect: nvmet: adding queue 2 to ctrl 1. [ 156.039824] nvmet:nvmet_execute_io_connect: nvmet: adding queue 3 to ctrl 1. [ 156.039879] nvmet:nvmet_execute_io_connect: nvmet: adding queue 4 to ctrl 1. [ 156.039947] nvmet:nvmet_execute_io_connect: nvmet: adding queue 5 to ctrl 1. [ 156.039995] nvmet:nvmet_execute_io_connect: nvmet: adding queue 6 to ctrl 1. [ 156.040048] nvmet:nvmet_execute_io_connect: nvmet: adding queue 7 to ctrl 1. [ 156.040099] nvmet:nvmet_execute_io_connect: nvmet: adding queue 8 to ctrl 1. [ 156.040175] nvme nvme0: new ctrl: NQN "nvme-test", addr 10.16.233.89:7777 [ 156.040223] nvmet:nvmet_req_cns_error_complete: nvmet: unhandled identify cns 6 on qid 0 [ 156.079383] device-mapper: table: 253:3: multipath: error getting device (-EBUSY) [ 156.079402] device-mapper: ioctl: error adding target to table [ 161.079160] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 166.109161] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 167.193511] nvme nvme0: Removing ctrl: NQN "nvme-test" [ 167.399443] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 0000000030c1a307 [ 167.399456] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 167.399506] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000b328a805 [ 167.399512] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000b328a805 queue->state= 1 [ 167.399520] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 1 [ 167.399540] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 0000000051910e63 [ 167.399544] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 167.399587] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000007966bce3 [ 167.399591] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000007966bce3 queue->state= 1 [ 167.399597] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 2 [ 167.399611] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000a3bffbae [ 167.399615] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 167.399651] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000005dc61abf [ 167.399654] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000005dc61abf queue->state= 1 [ 167.399660] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 3 [ 167.399676] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 000000005996e41c [ 167.399679] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 167.399720] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000c82f74db [ 167.399723] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000c82f74db queue->state= 1 [ 167.399728] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 4 [ 167.399743] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000411a5cb3 [ 167.399746] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 167.399787] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000f6711670 [ 167.399791] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000f6711670 queue->state= 1 [ 167.399796] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 5 [ 167.399810] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 000000006c5d16f2 [ 167.399813] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 167.399853] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 0000000021f90d2f [ 167.399856] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 0000000021f90d2f queue->state= 1 [ 167.399861] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 6 [ 167.399876] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 000000004736aa50 [ 167.399879] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 167.399920] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000eee27c74 [ 167.399923] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000eee27c74 queue->state= 1 [ 167.399928] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 7 [ 167.399943] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000c2d6c608 [ 167.399946] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 167.399983] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 0000000067601e0d [ 167.399986] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 0000000067601e0d queue->state= 1 [ 167.399991] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 8 [ 167.489259] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 000000003f60fac3 [ 167.489268] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 167.489292] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000002e819b8f [ 167.489299] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000002e819b8f queue->state= 1 [ 167.489310] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 0 [ 167.539346] nvmet:nvmet_stop_keep_alive_timer: nvmet: ctrl 1 stop keep-alive [ 168.110052] SoftiWARP detached