[ 384.249452] run blktests nvmeof-mp/011 at 2022-09-23 14:53:17 [ 384.309104] null_blk: module loaded [ 384.325582] null_blk: disk nullb0 created [ 384.332102] null_blk: disk nullb1 created [ 384.388408] rdma_rxe: loaded [ 384.400497] infiniband enp0s29u1u1u5_rxe: set active [ 384.406054] infiniband enp0s29u1u1u5_rxe: added enp0s29u1u1u5 [ 384.421737] infiniband enp11s0_rxe: set active [ 384.426704] infiniband enp11s0_rxe: added enp11s0 [ 384.441195] infiniband enp6s0_rxe: set active [ 384.446063] infiniband enp6s0_rxe: added enp6s0 [ 384.589302] nvmet: adding nsid 1 to subsystem nvme-test [ 384.616478] nvmet_rdma: enabling port 1 (169.254.95.120:7777) [ 384.624696] nvmet_rdma: enabling port 2 (10.19.35.243:7777) [ 384.632679] nvmet_rdma: enabling port 3 (10.19.34.33:7777) [ 384.678031] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 000000009d488195 [ 384.678492] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 000000009d488195 [ 384.678576] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000d19c7ee5 [ 384.678583] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp0s29u1u1u5_rxe. [ 384.679113] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 289 cm_id= 00000000d19c7ee5 [ 384.679217] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 000000009d488195 [ 384.679262] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000d19c7ee5 [ 384.679342] nvmet:nvmet_start_keep_alive_timer: nvmet: ctrl 1 start keep-alive timer for 5 secs [ 384.679345] nvmet: creating nvm controller 1 for subsystem nvme-test for NQN nqn.2014-08.org.nvmexpress:uuid:03f11f39-aee8-4c79-a6c8-a42dc09320d2. [ 384.694414] nvme nvme0: creating 4 I/O queues. [ 384.699451] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000f3dd0b08 [ 384.699646] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000f3dd0b08 [ 384.699785] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000906c9f1c [ 384.699790] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp0s29u1u1u5_rxe. [ 384.700062] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 00000000906c9f1c [ 384.700146] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000f3dd0b08 [ 384.700184] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 0000000003a70ee0 [ 384.700185] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000906c9f1c [ 384.700371] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 0000000003a70ee0 [ 384.700407] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 0000000051c855df [ 384.700409] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp0s29u1u1u5_rxe. [ 384.700712] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 0000000051c855df [ 384.700788] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 0000000003a70ee0 [ 384.700833] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 0000000051c855df [ 384.700859] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000d04f62be [ 384.701047] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000d04f62be [ 384.701106] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000002f47bf3f [ 384.701111] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp0s29u1u1u5_rxe. [ 384.701384] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 000000002f47bf3f [ 384.701458] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000d04f62be [ 384.701503] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000002f47bf3f [ 384.701512] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 0000000014c72103 [ 384.701692] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 0000000014c72103 [ 384.701787] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000e8cf3a1f [ 384.701790] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp0s29u1u1u5_rxe. [ 384.702060] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 00000000e8cf3a1f [ 384.702118] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 0000000014c72103 [ 384.702144] nvme nvme0: mapped 4/0/0 default/read/poll queues. [ 384.702160] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000e8cf3a1f [ 384.708894] nvmet:nvmet_execute_io_connect: nvmet: adding queue 1 to ctrl 1. [ 384.708989] nvmet:nvmet_execute_io_connect: nvmet: adding queue 2 to ctrl 1. [ 384.709049] nvmet:nvmet_execute_io_connect: nvmet: adding queue 3 to ctrl 1. [ 384.709140] nvmet:nvmet_execute_io_connect: nvmet: adding queue 4 to ctrl 1. [ 384.709218] nvme nvme0: new ctrl: NQN "nvme-test", addr 169.254.95.120:7777 [ 384.709270] nvmet:nvmet_req_cns_error_complete: nvmet: unhandled identify cns 6 on qid 0 [ 384.722727] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: address resolved (0): status 0 id 000000007fe24300 [ 384.723185] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: route resolved (2): status 0 id 000000007fe24300 [ 384.723255] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 0000000000b9100b [ 384.723262] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp11s0_rxe. [ 384.723764] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 289 cm_id= 0000000000b9100b [ 384.724458] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: established (9): status 0 id 000000007fe24300 [ 384.724484] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 0000000000b9100b [ 384.724561] nvmet:nvmet_start_keep_alive_timer: nvmet: ctrl 2 start keep-alive timer for 5 secs [ 384.724565] nvmet: creating nvm controller 2 for subsystem nvme-test for NQN nqn.2014-08.org.nvmexpress:uuid:03f11f39-aee8-4c79-a6c8-a42dc09320d2. [ 384.739604] nvme nvme1: creating 4 I/O queues. [ 384.744633] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: address resolved (0): status 0 id 000000003ece31fb [ 384.744814] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: route resolved (2): status 0 id 000000003ece31fb [ 384.744881] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000d7fa308b [ 384.744885] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp11s0_rxe. [ 384.745153] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 00000000d7fa308b [ 384.745232] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: established (9): status 0 id 000000003ece31fb [ 384.745261] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: address resolved (0): status 0 id 00000000b8105348 [ 384.745268] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000d7fa308b [ 384.745431] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: route resolved (2): status 0 id 00000000b8105348 [ 384.745469] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000a9d773f6 [ 384.745472] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp11s0_rxe. [ 384.745745] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 00000000a9d773f6 [ 384.745820] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: established (9): status 0 id 00000000b8105348 [ 384.745849] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: address resolved (0): status 0 id 00000000de829415 [ 384.745855] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000a9d773f6 [ 384.746021] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: route resolved (2): status 0 id 00000000de829415 [ 384.746064] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000de6b392c [ 384.746066] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp11s0_rxe. [ 384.746329] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 00000000de6b392c [ 384.746392] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: established (9): status 0 id 00000000de829415 [ 384.746426] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000de6b392c [ 384.746429] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: address resolved (0): status 0 id 0000000071c00c62 [ 384.746597] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: route resolved (2): status 0 id 0000000071c00c62 [ 384.746655] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000004915e063 [ 384.746658] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp11s0_rxe. [ 384.746986] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 000000004915e063 [ 384.747066] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: established (9): status 0 id 0000000071c00c62 [ 384.747086] nvme nvme1: mapped 4/0/0 default/read/poll queues. [ 384.747105] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000004915e063 [ 384.753882] nvmet:nvmet_execute_io_connect: nvmet: adding queue 1 to ctrl 2. [ 384.754987] nvmet:nvmet_execute_io_connect: nvmet: adding queue 2 to ctrl 2. [ 384.755092] nvmet:nvmet_execute_io_connect: nvmet: adding queue 3 to ctrl 2. [ 384.755141] nvmet:nvmet_execute_io_connect: nvmet: adding queue 4 to ctrl 2. [ 384.755192] nvme nvme1: new ctrl: NQN "nvme-test", addr 10.19.35.243:7777 [ 384.755253] nvmet:nvmet_req_cns_error_complete: nvmet: unhandled identify cns 6 on qid 0 [ 384.763143] nvme nvme1: Found shared namespace 1, but multipathing not supported. [ 384.769083] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: address resolved (0): status 0 id 00000000058182d6 [ 384.771953] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: route resolved (2): status 0 id 00000000058182d6 [ 384.772018] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000f09108dc [ 384.772024] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp6s0_rxe. [ 384.772494] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 289 cm_id= 00000000f09108dc [ 384.772573] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: established (9): status 0 id 00000000058182d6 [ 384.773202] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000f09108dc [ 384.773232] nvmet:nvmet_start_keep_alive_timer: nvmet: ctrl 3 start keep-alive timer for 5 secs [ 384.773235] nvmet: creating nvm controller 3 for subsystem nvme-test for NQN nqn.2014-08.org.nvmexpress:uuid:03f11f39-aee8-4c79-a6c8-a42dc09320d2. [ 384.788314] nvme nvme2: creating 4 I/O queues. [ 384.793324] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: address resolved (0): status 0 id 00000000b0c97194 [ 384.793510] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: route resolved (2): status 0 id 00000000b0c97194 [ 384.793604] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 000000009b79a865 [ 384.793669] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp6s0_rxe. [ 384.793950] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 000000009b79a865 [ 384.794019] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: established (9): status 0 id 00000000b0c97194 [ 384.794048] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 000000009b79a865 [ 384.794450] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: address resolved (0): status 0 id 00000000f472579b [ 384.794629] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: route resolved (2): status 0 id 00000000f472579b [ 384.794673] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000af45e2da [ 384.794676] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp6s0_rxe. [ 384.794981] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 00000000af45e2da [ 384.795063] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: established (9): status 0 id 00000000f472579b [ 384.795100] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000af45e2da [ 384.795134] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: address resolved (0): status 0 id 00000000eff2fa30 [ 384.795306] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: route resolved (2): status 0 id 00000000eff2fa30 [ 384.795552] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000f7160f21 [ 384.795556] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp6s0_rxe. [ 384.795830] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 00000000f7160f21 [ 384.795872] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: established (9): status 0 id 00000000eff2fa30 [ 384.795914] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: address resolved (0): status 0 id 00000000d644586b [ 384.796073] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: route resolved (2): status 0 id 00000000d644586b [ 384.796095] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000f7160f21 [ 384.796109] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000107ac035 [ 384.796111] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enp6s0_rxe. [ 384.796382] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 00000000107ac035 [ 384.796424] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: established (9): status 0 id 00000000d644586b [ 384.796441] nvme nvme2: mapped 4/0/0 default/read/poll queues. [ 384.803013] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000107ac035 [ 384.803187] nvmet:nvmet_execute_io_connect: nvmet: adding queue 1 to ctrl 3. [ 384.803237] nvmet:nvmet_execute_io_connect: nvmet: adding queue 2 to ctrl 3. [ 384.803320] nvmet:nvmet_execute_io_connect: nvmet: adding queue 3 to ctrl 3. [ 384.803402] nvmet:nvmet_execute_io_connect: nvmet: adding queue 4 to ctrl 3. [ 384.803463] nvme nvme2: new ctrl: NQN "nvme-test", addr 10.19.34.33:7777 [ 384.803476] nvmet:nvmet_req_cns_error_complete: nvmet: unhandled identify cns 6 on qid 0 [ 384.811248] nvme nvme2: Found shared namespace 1, but multipathing not supported. [ 389.863620] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 389.863631] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 390.183518] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 392.423518] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 392.423528] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 394.984505] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 394.984516] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 395.303421] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 400.103396] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 400.103406] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 400.423341] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 402.663305] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 402.663309] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 405.223298] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 405.223310] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 405.543232] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 407.784244] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 407.784255] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 410.343160] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 410.343169] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 410.663121] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 412.903146] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 412.903150] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 415.463159] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 2 update keep-alive timer for 5 secs [ 415.463173] nvmet:nvmet_execute_keep_alive: nvmet: ctrl 3 update keep-alive timer for 5 secs [ 415.783067] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 420.384557] nvme nvme0: Removing ctrl: NQN "nvme-test" [ 420.384682] nvme nvme1: Removing ctrl: NQN "nvme-test" [ 420.384811] nvme nvme2: Removing ctrl: NQN "nvme-test" [ 420.413128] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000009b79a865 [ 420.413135] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000009b79a865 queue->state= 1 [ 420.413146] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 11 [ 420.413192] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000af45e2da [ 420.413198] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000af45e2da queue->state= 1 [ 420.413208] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 12 [ 420.413211] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000107ac035 [ 420.413214] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000107ac035 queue->state= 1 [ 420.413220] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnected (10): status 0 id 00000000b0c97194 [ 420.413221] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 14 [ 420.413223] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnect received - connection closed [ 420.413226] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000f7160f21 [ 420.413228] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000f7160f21 queue->state= 1 [ 420.413235] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 13 [ 420.413285] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnected (10): status 0 id 00000000f472579b [ 420.413289] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnect received - connection closed [ 420.413292] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnected (10): status 0 id 00000000d644586b [ 420.413295] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnect received - connection closed [ 420.413299] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnected (10): status 0 id 00000000eff2fa30 [ 420.413302] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnect received - connection closed [ 420.419077] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000d7fa308b [ 420.419083] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000d7fa308b queue->state= 1 [ 420.419094] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 6 [ 420.419101] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000a9d773f6 [ 420.419104] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000a9d773f6 queue->state= 1 [ 420.419110] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 7 [ 420.419119] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000de6b392c [ 420.419122] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000de6b392c queue->state= 1 [ 420.419127] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 8 [ 420.419218] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnected (10): status 0 id 000000003ece31fb [ 420.419221] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnect received - connection closed [ 420.419223] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000004915e063 [ 420.419225] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000004915e063 queue->state= 1 [ 420.419231] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 9 [ 420.419237] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnected (10): status 0 id 00000000b8105348 [ 420.419239] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnect received - connection closed [ 420.419242] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnected (10): status 0 id 00000000de829415 [ 420.419243] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnect received - connection closed [ 420.419284] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnected (10): status 0 id 0000000071c00c62 [ 420.419286] nvme_rdma:nvme_rdma_cm_handler: nvme nvme1: disconnect received - connection closed [ 420.419288] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000906c9f1c [ 420.419290] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000906c9f1c queue->state= 1 [ 420.419296] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 1 [ 420.419303] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 0000000051c855df [ 420.419305] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 0000000051c855df queue->state= 1 [ 420.419310] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 2 [ 420.419375] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000f3dd0b08 [ 420.419377] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 420.419380] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 0000000003a70ee0 [ 420.419382] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 420.419384] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 000000002f47bf3f [ 420.419386] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 000000002f47bf3f queue->state= 1 [ 420.419391] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 3 [ 420.419396] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000e8cf3a1f [ 420.419397] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000e8cf3a1f queue->state= 1 [ 420.419402] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 4 [ 420.419460] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000d04f62be [ 420.419463] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 420.419465] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 0000000014c72103 [ 420.419467] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 420.428092] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000f09108dc [ 420.428097] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000f09108dc queue->state= 1 [ 420.428108] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 10 [ 420.428170] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnected (10): status 0 id 00000000058182d6 [ 420.428176] nvme_rdma:nvme_rdma_cm_handler: nvme nvme2: disconnect received - connection closed [ 420.430025] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000d19c7ee5 [ 420.430030] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000d19c7ee5 queue->state= 1 [ 420.430041] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 0 [ 420.438002] nvmet:nvmet_stop_keep_alive_timer: nvmet: ctrl 1 stop keep-alive [ 420.438135] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 0000000000b9100b [ 420.438140] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 0000000000b9100b queue->state= 1 [ 420.438150] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 5 [ 420.438931] nvmet:nvmet_stop_keep_alive_timer: nvmet: ctrl 3 stop keep-alive [ 420.441944] nvmet:nvmet_stop_keep_alive_timer: nvmet: ctrl 2 stop keep-alive [ 420.594855] rdma_rxe: unloaded