[ 264.082565] run blktests nvmeof-mp/009 at 2022-09-30 17:04:23 [ 264.178253] null_blk: module loaded [ 264.225931] null_blk: disk nullb0 created [ 264.230964] null_blk: disk nullb1 created [ 264.255414] rdma_rxe: loaded [ 264.262365] infiniband enc600_rxe: set active [ 264.262370] infiniband enc600_rxe: added enc600 [ 264.378209] nvmet: adding nsid 1 to subsystem nvme-test [ 264.386744] nvmet_rdma: enabling port 1 (10.0.160.124:7777) [ 264.429006] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000f5689220 [ 264.429722] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000f5689220 [ 264.429840] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000eae02ad0 [ 264.429848] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enc600_rxe. [ 264.430657] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 289 cm_id= 00000000eae02ad0 [ 264.430731] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000f5689220 [ 264.430745] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000eae02ad0 [ 264.430909] nvmet:nvmet_start_keep_alive_timer: nvmet: ctrl 1 start keep-alive timer for 5 secs [ 264.430912] nvmet: creating nvm controller 1 for subsystem nvme-test for NQN nqn.2014-08.org.nvmexpress:uuid:de740a86-6540-4779-9ba5-ca86a99714d3. [ 264.431559] nvme nvme0: creating 2 I/O queues. [ 264.431568] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 0000000017f577e9 [ 264.431778] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 0000000017f577e9 [ 264.432339] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 0000000016c91267 [ 264.432343] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enc600_rxe. [ 264.432810] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 0000000016c91267 [ 264.432847] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 0000000017f577e9 [ 264.432864] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: address resolved (0): status 0 id 00000000dcdc4639 [ 264.432865] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 0000000016c91267 [ 264.433075] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: route resolved (2): status 0 id 00000000dcdc4639 [ 264.433108] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: connect request (4): status 0 id 00000000ed764972 [ 264.433112] nvmet_rdma:nvmet_rdma_find_get_device: nvmet_rdma: added enc600_rxe. [ 264.433576] nvmet_rdma:nvmet_rdma_create_queue_ib: nvmet_rdma: nvmet_rdma_create_queue_ib: max_cqe= 8191 max_sge= 32 sq_size = 1153 cm_id= 00000000ed764972 [ 264.433604] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: established (9): status 0 id 00000000dcdc4639 [ 264.433623] nvme nvme0: mapped 2/0/0 default/read/poll queues. [ 264.433623] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: established (9): status 0 id 00000000ed764972 [ 264.433765] nvmet:nvmet_execute_io_connect: nvmet: adding queue 1 to ctrl 1. [ 264.433812] nvmet:nvmet_execute_io_connect: nvmet: adding queue 2 to ctrl 1. [ 264.433849] nvme nvme0: new ctrl: NQN "nvme-test", addr 10.0.160.124:7777 [ 264.433873] nvmet:nvmet_req_cns_error_complete: nvmet: unhandled identify cns 6 on qid 0 [ 269.431392] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 274.471371] nvmet:nvmet_keep_alive_timer: nvmet: ctrl 1 reschedule traffic based keep-alive timer [ 275.254915] nvme nvme0: Removing ctrl: NQN "nvme-test" [ 275.401619] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 0000000016c91267 [ 275.401638] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 0000000016c91267 queue->state= 1 [ 275.401693] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 1 [ 275.401706] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 0000000017f577e9 [ 275.401709] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 275.401755] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000ed764972 [ 275.401757] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000ed764972 queue->state= 1 [ 275.401760] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 2 [ 275.401809] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000dcdc4639 [ 275.401812] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 275.481548] nvmet_rdma:nvmet_rdma_cm_handler: nvmet_rdma: disconnected (10): status 0 id 00000000eae02ad0 [ 275.481561] nvmet_rdma:__nvmet_rdma_queue_disconnect: nvmet_rdma: cm_id= 00000000eae02ad0 queue->state= 1 [ 275.481594] nvmet_rdma:nvmet_rdma_free_queue: nvmet_rdma: freeing queue 0 [ 275.481614] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnected (10): status 0 id 00000000f5689220 [ 275.481617] nvme_rdma:nvme_rdma_cm_handler: nvme nvme0: disconnect received - connection closed [ 275.531615] nvmet:nvmet_stop_keep_alive_timer: nvmet: ctrl 1 stop keep-alive [ 275.862409] rdma_rxe: unloaded