Skip to content

Commit

Permalink
ci: add support for TLS connection
Browse files Browse the repository at this point in the history
Signed-off-by: Boris Glimcher <[email protected]>
  • Loading branch information
glimchb committed Sep 28, 2023
1 parent 202c6ed commit 1f5d185
Show file tree
Hide file tree
Showing 6 changed files with 60 additions and 12 deletions.
3 changes: 2 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ services:
- "9009:9009"
- "4444:4444"
- "5555:5555"
- "7772:7772"
- "7777:7777"
privileged: true
networks:
Expand Down Expand Up @@ -66,7 +67,7 @@ services:
condition: service_healthy
redis:
condition: service_healthy
command: sh -c "/opi-spdk-bridge -grpc_port=50051 -http_port=8082 -spdk_addr=/var/tmp/spdk.sock -tcp_trid=$$(getent hosts spdk | awk '{ print $$1 }'):7777"
command: sh -c "/opi-spdk-bridge -grpc_port=50051 -http_port=8082 -spdk_addr=/var/tmp/spdk.sock -tcp_trid=$$(getent hosts spdk | awk '{ print $$1 }'):777"
healthcheck:
test: grpcurl -plaintext localhost:50051 list || exit 1

Expand Down
2 changes: 1 addition & 1 deletion pkg/backend/nvme_path.go
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ func (s *Server) numberOfPathsForController(controllerName string) int {
}

func (s *Server) keyToTemporaryFile(pskKey []byte) (string, error) {
keyFile, err := s.psk.createTempFile("", "opikey")
keyFile, err := s.psk.createTempFile("/var/tmp", "opikey")
if err != nil {
log.Printf("error: failed to create file for key: %v", err)
return "", status.Error(codes.Internal, "failed to handle key")
Expand Down
37 changes: 31 additions & 6 deletions pkg/frontend/nvme_controller.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import (
"go.einride.tech/aip/resourceid"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/types/known/emptypb"
)

Expand Down Expand Up @@ -75,15 +74,25 @@ func NewNvmeTCPTransport(listenAddr string) NvmeTransport {
}
}

func (c *nvmeTCPTransport) Params(_ *pb.NvmeController, nqn string) (spdk.NvmfSubsystemAddListenerParams, error) {
func (c *nvmeTCPTransport) Params(ctrl *pb.NvmeController, nqn string) (spdk.NvmfSubsystemAddListenerParams, error) {
result := spdk.NvmfSubsystemAddListenerParams{}
result.Nqn = nqn
result.SecureChannel = false

var port int32
if *ctrl.Spec.NvmeControllerId < 0 {
result.SecureChannel = true
port = -*ctrl.Spec.NvmeControllerId
} else {
result.SecureChannel = false
port = *ctrl.Spec.NvmeControllerId
}

result.ListenAddress.Trtype = "tcp"
result.ListenAddress.Traddr = c.listenAddr.String()
result.ListenAddress.Trsvcid = c.listenPort
result.ListenAddress.Trsvcid = fmt.Sprintf("%s%d", c.listenPort, port)
result.ListenAddress.Adrfam = c.protocol

log.Printf("NvmfSubsystemAddListener params: %v", result)
return result, nil
}

Expand Down Expand Up @@ -129,11 +138,27 @@ func (s *Server) CreateNvmeController(_ context.Context, in *pb.CreateNvmeContro
msg := fmt.Sprintf("Could not create CTRL: %s", in.NvmeController.Name)
return nil, status.Errorf(codes.InvalidArgument, msg)
}

// TODO: this is hard coded to specific host
params2 := spdk.NvmfSubsystemAddHostParams{
Nqn: subsys.Spec.Nqn,
Host: "nqn.2014-08.org.nvmexpress:uuid:feb98abe-d51f-40c8-b348-2753f3571d3c",
Psk: "/tmp/opikey.txt",
}
var result2 spdk.NvmfSubsystemAddHostResult
err = s.rpc.Call("nvmf_subsystem_add_host", &params2, &result2)
if err != nil {
return nil, err
}
log.Printf("Received from SPDK: %v", result2)
if !result2 {
msg := fmt.Sprintf("Could not create CTRL: %s", in.NvmeController.Name)
return nil, status.Errorf(codes.InvalidArgument, msg)
}

response := utils.ProtoClone(in.NvmeController)
response.Spec.NvmeControllerId = proto.Int32(-1)
response.Status = &pb.NvmeControllerStatus{Active: true}
s.Nvme.Controllers[in.NvmeController.Name] = response

return response, nil
}

Expand Down
2 changes: 1 addition & 1 deletion pkg/frontend/nvme_controller_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ func TestFrontEnd_CreateNvmeController(t *testing.T) {
Name: testControllerName,
Spec: &pb.NvmeControllerSpec{
PcieId: testController.Spec.PcieId,
NvmeControllerId: proto.Int32(-1),
NvmeControllerId: proto.Int32(17),
},
Status: &pb.NvmeControllerStatus{
Active: true,
Expand Down
2 changes: 1 addition & 1 deletion pkg/frontend/nvme_subsystem.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ func (s *Server) CreateNvmeSubsystem(_ context.Context, in *pb.CreateNvmeSubsyst
Nqn: in.NvmeSubsystem.Spec.Nqn,
SerialNumber: in.NvmeSubsystem.Spec.SerialNumber,
ModelNumber: in.NvmeSubsystem.Spec.ModelNumber,
AllowAnyHost: true,
AllowAnyHost: false,
MaxNamespaces: int(in.NvmeSubsystem.Spec.MaxNamespaces),
}
var result spdk.NvmfCreateSubsystemResult
Expand Down
26 changes: 24 additions & 2 deletions scripts/tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ do
done
curl --fail --insecure --user spdkuser:spdkpass -X POST -H 'Content-Type: application/json' -d '{"id": 1, "method": "bdev_get_bdevs"}' http://127.0.0.1:9009

# wait for client completes and return exit code
STORAGE_CLIENT_NAME=$(docker-compose ps | grep opi-spdk-client | awk '{print $1}')
STORAGE_CLIENT_RC=$(docker wait "${STORAGE_CLIENT_NAME}")
if [ "${STORAGE_CLIENT_RC}" != "0" ]; then
Expand All @@ -31,6 +32,7 @@ fi

# Check exported port also works (host network)
docker run --network=host --rm docker.io/namely/grpc-cli ls 127.0.0.1:50051
docker run --network=host --rm docker.io/curlimages/curl:8.3.0 curl -qkL http://127.0.0.1:8082/v1/inventory/1/inventory/2

# check reflection
grpc_cli=(docker run --network=opi-spdk-bridge_opi --rm docker.io/namely/grpc-cli)
Expand Down Expand Up @@ -62,12 +64,12 @@ grep "Total" log.txt

# test nvme
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 CreateNvmeSubsystem "{nvme_subsystem_id: 'subsystem1', nvme_subsystem : {spec : {nqn: 'nqn.2022-09.io.spdk:opitest1', serial_number: 'myserial1', model_number: 'mymodel1', max_namespaces: 11} } }"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 CreateNvmeController "{nvme_controller_id: 'controller1', parent: '//storage.opiproject.org/subsystems/subsystem1', nvme_controller : {spec : {nvme_controller_id: 2, pcie_id : {physical_function : 0, virtual_function : 0, port_id: 0}, max_nsq:5, max_ncq:5 } } }"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 CreateNvmeController "{nvme_controller_id: 'controller1', parent: '//storage.opiproject.org/subsystems/subsystem1', nvme_controller : {spec : {nvme_controller_id: 7, pcie_id : {physical_function : 0, virtual_function : 0, port_id: 0}, max_nsq:5, max_ncq:5 } } }"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 CreateNvmeNamespace "{nvme_namespace_id: 'namespace1', parent: '//storage.opiproject.org/subsystems/subsystem1', nvme_namespace : {spec : {volume_name_ref : 'Malloc1', host_nsid : 1 } } }"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 GetNvmeSubsystem "{name : '//storage.opiproject.org/subsystems/subsystem1'}"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 GetNvmeController "{name : '//storage.opiproject.org/subsystems/subsystem1/controllers/controller1'}"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 GetNvmeNamespace "{name : '//storage.opiproject.org/subsystems/subsystem1/namespaces/namespace1'}"
docker run --rm --network=host --privileged -v /dev/hugepages:/dev/hugepages ghcr.io/opiproject/spdk:main spdk_nvme_identify -r 'traddr:127.0.0.1 trtype:TCP adrfam:IPv4 trsvcid:7777'
docker run --rm --network=host --privileged -v /dev/hugepages:/dev/hugepages ghcr.io/opiproject/spdk:main spdk_nvme_identify -r 'traddr:127.0.0.1 trtype:TCP adrfam:IPv4 trsvcid:7777 hostnqn:nqn.2014-08.org.nvmexpress:uuid:feb98abe-d51f-40c8-b348-2753f3571d3c'
docker run --rm --network=host --privileged -v /dev/hugepages:/dev/hugepages ghcr.io/opiproject/spdk:main spdk_nvme_perf -r 'traddr:127.0.0.1 trtype:TCP adrfam:IPv4 trsvcid:7777 subnqn:nqn.2022-09.io.spdk:opitest1 hostnqn:nqn.2014-08.org.nvmexpress:uuid:feb98abe-d51f-40c8-b348-2753f3571d3c' -c 0x1 -q 1 -o 4096 -w randread -t 10 | tee log.txt
grep "Total" log.txt
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 CreateNvmeRemoteController "{nvme_remote_controller : {multipath: 'NVME_MULTIPATH_MULTIPATH'}, nvme_remote_controller_id: 'nvmetcp12'}"
Expand All @@ -80,5 +82,25 @@ grep "Total" log.txt
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 DeleteNvmeController "{name : '//storage.opiproject.org/subsystems/subsystem1/controllers/controller1'}"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 DeleteNvmeSubsystem "{name : '//storage.opiproject.org/subsystems/subsystem1'}"

# test nvme with TLS
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 CreateNvmeSubsystem "{nvme_subsystem_id: 'subsystem2', nvme_subsystem : {spec : {nqn: 'nqn.2022-09.io.spdk:opitest2', serial_number: 'myserial1', model_number: 'mymodel1', max_namespaces: 11} } }"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 CreateNvmeController "{nvme_controller_id: 'controller2', parent: '//storage.opiproject.org/subsystems/subsystem2', nvme_controller : {spec : {nvme_controller_id:-2, pcie_id : {physical_function : 0, virtual_function : 0, port_id: 0}, max_nsq:5, max_ncq:5 } } }"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 CreateNvmeNamespace "{nvme_namespace_id: 'namespace2', parent: '//storage.opiproject.org/subsystems/subsystem2', nvme_namespace : {spec : {volume_name_ref : 'Malloc1', host_nsid : 1 } } }"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 GetNvmeSubsystem "{name : '//storage.opiproject.org/subsystems/subsystem2'}"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 GetNvmeController "{name : '//storage.opiproject.org/subsystems/subsystem2/controllers/controller2'}"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 GetNvmeNamespace "{name : '//storage.opiproject.org/subsystems/subsystem2/namespaces/namespace2'}"
# docker run --rm --network=host --privileged -v /dev/hugepages:/dev/hugepages ghcr.io/opiproject/spdk:main spdk_nvme_identify -r 'traddr:127.0.0.1 trtype:TCP adrfam:IPv4 trsvcid:7772 hostnqn:nqn.2014-08.org.nvmexpress:uuid:feb98abe-d51f-40c8-b348-2753f3571d3c'
docker run --rm --network=host --privileged -v /dev/hugepages:/dev/hugepages -v /tmp/opikey.txt:/tmp/opikey.txt ghcr.io/opiproject/spdk:main spdk_nvme_perf -r 'traddr:127.0.0.1 trtype:TCP adrfam:IPv4 trsvcid:7772 subnqn:nqn.2022-09.io.spdk:opitest2 hostnqn:nqn.2014-08.org.nvmexpress:uuid:feb98abe-d51f-40c8-b348-2753f3571d3c' -c 0x1 -q 1 -o 4096 -w randread -t 10 -S ssl --psk-path /tmp/opikey.txt | tee log.txt
grep "Total" log.txt
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 CreateNvmeRemoteController "{nvme_remote_controller : {multipath: 'NVME_MULTIPATH_MULTIPATH', tcp: {hdgst: false, ddgst: false, psk: 'TlZNZVRMU2tleS0xOjAxOk1EQXhNVEl5TXpNME5EVTFOalkzTnpnNE9UbGhZV0ppWTJOa1pHVmxabVp3SkVpUTo='}}, nvme_remote_controller_id: 'nvmetls17'}"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 CreateNvmePath "{nvme_path : {controller_name_ref: '//storage.opiproject.org/volumes/nvmetls17', traddr:\"$SPDK_IP\", trtype:'NVME_TRANSPORT_TCP', fabrics: { subnqn:'nqn.2022-09.io.spdk:opitest2', trsvcid:'7772', adrfam:'NVME_ADRFAM_IPV4', hostnqn:'nqn.2014-08.org.nvmexpress:uuid:feb98abe-d51f-40c8-b348-2753f3571d3c'}}, nvme_path_id: 'nvmetls17path0'}"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 GetNvmeRemoteController "{name: '//storage.opiproject.org/volumes/nvmetls17'}"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 GetNvmePath "{name: '//storage.opiproject.org/volumes/nvmetls17path0'}"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 DeleteNvmePath "{name: '//storage.opiproject.org/volumes/nvmetls17path0'}"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 DeleteNvmeRemoteController "{name: '//storage.opiproject.org/volumes/nvmetls17'}"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 DeleteNvmeNamespace "{name : '//storage.opiproject.org/subsystems/subsystem2/namespaces/namespace2'}"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 DeleteNvmeController "{name : '//storage.opiproject.org/subsystems/subsystem2/controllers/controller2'}"
"${grpc_cli[@]}" call --json_input --json_output opi-spdk-server:50051 DeleteNvmeSubsystem "{name : '//storage.opiproject.org/subsystems/subsystem2'}"

# this is last line
docker-compose ps -a

0 comments on commit 1f5d185

Please sign in to comment.