[ 421.240079] run blktests nvmeof-mp/011 at 2022-09-23 14:53:54 [ 421.305226] null_blk: module loaded [ 421.326412] null_blk: disk nullb0 created [ 421.333115] null_blk: disk nullb1 created [ 421.365907] SoftiWARP attached [ 421.523712] nvmet: adding nsid 1 to subsystem nvme-test [ 421.550807] nvmet_rdma: enabling port 1 (169.254.95.120:7777) [ 421.559111] nvmet_rdma: enabling port 2 (10.19.35.243:7777) [ 421.567268] nvmet_rdma: enabling port 3 (10.19.34.33:7777) [ 421.613054] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 000000004c5a1414 [ 421.613197] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 000000004c5a1414 [ 421.613306] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000f4c3cfb7 [ 421.613311] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp0s29u1u1u5_siw. [ 421.613467] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 129 cm_id= 00000000f4c3cfb7 [ 421.613508] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000f4c3cfb7 [ 421.613513] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 000000004c5a1414 [ 421.613712] nvmet:nvmet_start_keep_alive_timer: nvmet: ctrl 1 start keep-alive timer for 5 secs [ 421.613716] nvmet: creating nvm controller 1 for subsystem nvme-test for NQN nqn.2014-08.org.nvmexpress:uuid:03f11f39-aee8-4c79-a6c8-a42dc09320d2. [ 421.628856] nvme nvme0: creating 4 I/O queues. [ 421.633913] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 000000000991e984 [ 421.634144] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 000000000991e984 [ 421.634223] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000003b47e658 [ 421.634227] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp0s29u1u1u5_siw. [ 421.634499] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 000000003b47e658 [ 421.634541] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000003b47e658 [ 421.634545] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 000000000991e984 [ 421.634574] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 000000004ffea3eb [ 421.634790] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 000000004ffea3eb [ 421.634861] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000471f3272 [ 421.634866] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp0s29u1u1u5_siw. [ 421.635158] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000471f3272 [ 421.635194] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000471f3272 [ 421.635198] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 000000004ffea3eb [ 421.635224] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000539e65b5 [ 421.635453] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000539e65b5 [ 421.635525] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000cf785911 [ 421.635528] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp0s29u1u1u5_siw. [ 421.635808] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000cf785911 [ 421.635840] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000cf785911 [ 421.635851] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000539e65b5 [ 421.635875] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 0000000086404a08 [ 421.636111] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 0000000086404a08 [ 421.636183] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000008bd56edf [ 421.636186] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp0s29u1u1u5_siw. [ 421.636462] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 000000008bd56edf [ 421.636501] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000008bd56edf [ 421.636504] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 0000000086404a08 [ 421.636520] nvme nvme0: mapped 4/0/0 default/read/poll queues. [ 421.643248] nvmet:nvmet_execute_io_connect: nvmet: adding queue 1 to ctrl 1. [ 421.643366] nvmet:nvmet_execute_io_connect: nvmet: adding queue 2 to ctrl 1. [ 421.643476] nvmet:nvmet_execute_io_connect: nvmet: adding queue 3 to ctrl 1. [ 421.643550] nvmet:nvmet_execute_io_connect: nvmet: adding queue 4 to ctrl 1. [ 421.643639] nvme nvme0: new ctrl: NQN "nvme-test", addr 169.254.95.120:7777 [ 421.651444] nvmet:nvmet_req_cns_error_complete: nvmet: unhandled identify cns 6 on qid 0 [ 421.657313] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: address resolved (0): status 0 id 00000000790bdb8f [ 421.657451] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: route resolved (2): status 0 id 00000000790bdb8f [ 421.657533] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000000621ec83 [ 421.657541] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp11s0_siw. [ 421.657670] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 129 cm_id= 000000000621ec83 [ 421.657705] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000000621ec83 [ 421.657710] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: established (9): status 0 id 00000000790bdb8f [ 421.657827] nvmet:nvmet_start_keep_alive_timer: nvmet: ctrl 2 start keep-alive timer for 5 secs [ 421.657831] nvmet: creating nvm controller 2 for subsystem nvme-test for NQN nqn.2014-08.org.nvmexpress:uuid:03f11f39-aee8-4c79-a6c8-a42dc09320d2. [ 421.673088] nvme nvme1: creating 4 I/O queues. [ 421.678103] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: address resolved (0): status 0 id 0000000032ab51bd [ 421.678346] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: route resolved (2): status 0 id 0000000032ab51bd [ 421.678429] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000001c0854dd [ 421.678433] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp11s0_siw. [ 421.678707] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 000000001c0854dd [ 421.678749] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000001c0854dd [ 421.678753] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: established (9): status 0 id 0000000032ab51bd [ 421.678780] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: address resolved (0): status 0 id 00000000b98ff9d6 [ 421.679004] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: route resolved (2): status 0 id 00000000b98ff9d6 [ 421.679065] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000a1c9ad29 [ 421.679068] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp11s0_siw. [ 421.679334] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000a1c9ad29 [ 421.679369] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000a1c9ad29 [ 421.679372] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: established (9): status 0 id 00000000b98ff9d6 [ 421.679400] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: address resolved (0): status 0 id 000000000ef7021b [ 421.679619] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: route resolved (2): status 0 id 000000000ef7021b [ 421.679684] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000000f2bd58d [ 421.679689] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp11s0_siw. [ 421.679983] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 000000000f2bd58d [ 421.680018] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000000f2bd58d [ 421.680022] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: established (9): status 0 id 000000000ef7021b [ 421.680090] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: address resolved (0): status 0 id 000000005259e6d0 [ 421.680325] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: route resolved (2): status 0 id 000000005259e6d0 [ 421.680393] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000f602af9f [ 421.680396] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp11s0_siw. [ 421.680678] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000f602af9f [ 421.680709] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000f602af9f [ 421.680713] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: established (9): status 0 id 000000005259e6d0 [ 421.680729] nvme nvme1: mapped 4/0/0 default/read/poll queues. [ 421.688645] nvmet:nvmet_execute_io_connect: nvmet: adding queue 1 to ctrl 2. [ 421.688727] nvmet:nvmet_execute_io_connect: nvmet: adding queue 2 to ctrl 2. [ 421.688912] nvmet:nvmet_execute_io_connect: nvmet: adding queue 3 to ctrl 2. [ 421.689149] nvmet:nvmet_execute_io_connect: nvmet: adding queue 4 to ctrl 2. [ 421.689300] nvme nvme1: new ctrl: NQN "nvme-test", addr 10.19.35.243:7777 [ 421.697029] nvmet:nvmet_req_cns_error_complete: nvmet: unhandled identify cns 6 on qid 0 [ 421.697384] nvme nvme1: Found shared namespace 1, but multipathing not supported. [ 421.701429] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: address resolved (0): status 0 id 00000000eec8ba18 [ 421.705866] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: route resolved (2): status 0 id 00000000eec8ba18 [ 421.706038] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000fd9e99c9 [ 421.706046] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp6s0_siw. [ 421.706164] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 129 cm_id= 00000000fd9e99c9 [ 421.706186] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000fd9e99c9 [ 421.706206] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: established (9): status 0 id 00000000eec8ba18 [ 421.706940] nvmet:nvmet_start_keep_alive_timer: nvmet: ctrl 3 start keep-alive timer for 5 secs [ 421.706944] nvmet: creating nvm controller 3 for subsystem nvme-test for NQN nqn.2014-08.org.nvmexpress:uuid:03f11f39-aee8-4c79-a6c8-a42dc09320d2. [ 421.722233] nvme nvme2: creating 4 I/O queues. [ 421.727372] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: address resolved (0): status 0 id 00000000eebed547 [ 421.727617] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: route resolved (2): status 0 id 00000000eebed547 [ 421.727704] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000006a4e67c5 [ 421.727708] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp6s0_siw. [ 421.727998] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 000000006a4e67c5 [ 421.728032] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000006a4e67c5 [ 421.728041] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: established (9): status 0 id 00000000eebed547 [ 421.728072] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: address resolved (0): status 0 id 0000000093a43b77 [ 421.728289] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: route resolved (2): status 0 id 0000000093a43b77 [ 421.728353] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000007a5f7b1d [ 421.728358] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp6s0_siw. [ 421.728639] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 000000007a5f7b1d [ 421.728675] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000007a5f7b1d [ 421.728679] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: established (9): status 0 id 0000000093a43b77 [ 421.728706] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: address resolved (0): status 0 id 0000000096ba0d17 [ 421.728966] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: route resolved (2): status 0 id 0000000096ba0d17 [ 421.729027] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000565c2d2f [ 421.729031] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp6s0_siw. [ 421.729304] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 00000000565c2d2f [ 421.729337] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000565c2d2f [ 421.729347] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: established (9): status 0 id 0000000096ba0d17 [ 421.729372] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: address resolved (0): status 0 id 00000000ecc8a979 [ 421.729591] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: route resolved (2): status 0 id 00000000ecc8a979 [ 421.729653] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000007ea7267b [ 421.729656] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp6s0_siw. [ 421.729957] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 4096 max_sge= 6 sq_size = 513 cm_id= 000000007ea7267b [ 421.729988] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000007ea7267b [ 421.729998] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: established (9): status 0 id 00000000ecc8a979 [ 421.730014] nvme nvme2: mapped 4/0/0 default/read/poll queues. [ 421.736728] nvmet:nvmet_execute_io_connect: nvmet: adding queue 1 to ctrl 3. [ 421.736799] nvmet:nvmet_execute_io_connect: nvmet: adding queue 2 to ctrl 3. [ 421.736847] nvmet:nvmet_execute_io_connect: nvmet: adding queue 3 to ctrl 3. [ 421.736945] nvmet:nvmet_execute_io_connect: nvmet: adding queue 4 to ctrl 3. [ 421.737163] nvme nvme2: new ctrl: NQN "nvme-test", addr 10.19.34.33:7777 [ 421.737203] nvmet:nvmet_req_cns_error_complete: nvmet: unhandled identify cns 6 on qid 0 [ 421.745266] nvme nvme2: Found shared namespace 1, but multipathing not supported. [ 427.046793] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 3 reschedule traffic based keep-alive timer [ 427.046796] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 2 reschedule traffic based keep-alive timer [ 427.046800] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 429.350777] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 429.414776] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 431.910731] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 431.974725] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 432.167702] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 437.286598] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 437.286604] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 3 reschedule traffic based keep-alive timer [ 437.287593] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 2 reschedule traffic based keep-alive timer [ 439.590583] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 439.654591] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 442.150536] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 442.215589] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 442.406497] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 444.710497] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 444.774481] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 447.270458] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 447.334448] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 447.526408] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 449.830395] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 449.895382] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 452.390492] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 452.454444] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 452.646343] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 454.886439] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 1 update keep-alive timer for 5 secs [ 454.950395] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 455.014431] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 457.364805] nvme nvme0: Removing ctrl: NQN "nvme-test" [ 457.364924] nvme nvme1: Removing ctrl: NQN "nvme-test" [ 457.365069] nvme nvme2: Removing ctrl: NQN "nvme-test" [ 457.402325] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnected (10): status 0 id 0000000032ab51bd [ 457.402332] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnect received - connection closed [ 457.402362] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000001c0854dd [ 457.402368] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000001c0854dd queue->state= 1 [ 457.402375] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnected (10): status 0 id 00000000b98ff9d6 [ 457.402378] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnect received - connection closed [ 457.402382] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 6 [ 457.402387] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000a1c9ad29 [ 457.402389] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000a1c9ad29 queue->state= 1 [ 457.402393] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnected (10): status 0 id 000000000ef7021b [ 457.402396] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnect received - connection closed [ 457.402399] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 7 [ 457.402413] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000000f2bd58d [ 457.402418] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000000f2bd58d queue->state= 1 [ 457.402424] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnected (10): status 0 id 000000005259e6d0 [ 457.402427] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnect received - connection closed [ 457.402431] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 8 [ 457.402448] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000f602af9f [ 457.402453] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000f602af9f queue->state= 1 [ 457.402460] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 9 [ 457.403272] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnected (10): status 0 id 00000000eebed547 [ 457.403277] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnect received - connection closed [ 457.403300] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 000000000991e984 [ 457.403303] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 457.403333] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000006a4e67c5 [ 457.403336] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000006a4e67c5 queue->state= 1 [ 457.403363] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 000000004ffea3eb [ 457.403365] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 457.403387] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000003b47e658 [ 457.403390] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000003b47e658 queue->state= 1 [ 457.403412] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000539e65b5 [ 457.403414] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 457.403437] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000471f3272 [ 457.403439] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000471f3272 queue->state= 1 [ 457.403441] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 0000000086404a08 [ 457.403443] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 457.403445] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000cf785911 [ 457.403447] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000cf785911 queue->state= 1 [ 457.403449] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000008bd56edf [ 457.403451] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000008bd56edf queue->state= 1 [ 457.403514] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 11 [ 457.403518] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 1 [ 457.403522] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 2 [ 457.403526] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 3 [ 457.403530] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 4 [ 457.403560] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnected (10): status 0 id 0000000093a43b77 [ 457.403562] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnect received - connection closed [ 457.403598] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000007a5f7b1d [ 457.403600] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000007a5f7b1d queue->state= 1 [ 457.403617] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 12 [ 457.403648] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnected (10): status 0 id 0000000096ba0d17 [ 457.403650] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnect received - connection closed [ 457.403684] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000565c2d2f [ 457.403686] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000565c2d2f queue->state= 1 [ 457.403690] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 13 [ 457.403733] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnected (10): status 0 id 00000000ecc8a979 [ 457.403736] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnect received - connection closed [ 457.403768] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000007ea7267b [ 457.403771] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000007ea7267b queue->state= 1 [ 457.403788] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 14 [ 457.416335] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 000000004c5a1414 [ 457.416341] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 457.416360] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000f4c3cfb7 [ 457.416366] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000f4c3cfb7 queue->state= 1 [ 457.416373] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 0 [ 457.418286] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnected (10): status 0 id 00000000790bdb8f [ 457.418291] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnect received - connection closed [ 457.418315] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000000621ec83 [ 457.418320] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000000621ec83 queue->state= 1 [ 457.418325] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 5 [ 457.418385] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnected (10): status 0 id 00000000eec8ba18 [ 457.418390] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnect received - connection closed [ 457.418405] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000fd9e99c9 [ 457.418408] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000fd9e99c9 queue->state= 1 [ 457.418414] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 10 [ 457.420221] nvmet:nvmet_stop_keep_alive_timer: nvmet: ctrl 1 stop keep-alive [ 457.427314] nvmet:nvmet_stop_keep_alive_timer: nvmet: ctrl 3 stop keep-alive [ 457.427385] nvmet:nvmet_stop_keep_alive_timer: nvmet: ctrl 2 stop keep-alive [ 457.654332] SoftiWARP detached