Skip to content
This repository has been archived by the owner on Feb 17, 2025. It is now read-only.

Bump aiohttp from 3.9.5 to 3.10.2 #8

Bump aiohttp from 3.9.5 to 3.10.2

Bump aiohttp from 3.9.5 to 3.10.2 #8

Workflow file for this run

name: CSI test
on: [push, pull_request]
jobs:
build-csi:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Create k8s Kind Cluster
uses: helm/kind-action@v1
with:
cluster_name: kind
- name: Set VERSION to latest
run: echo "VERSION=latest" >> $GITHUB_ENV
- name: Install KServe from tutorial
run: |
curl -s "https://raw.githubusercontent.com/kserve/kserve/release-0.12/hack/quick_install.sh" | bash
- name: Make local image, inject it in KinD
run: |
make VERSION=${VERSION} image-build
kind load docker-image quay.io/${USER}/oci-storage-initializer:${VERSION}
- name: Apply the ClusterStorageContainer resource
run: |
kubectl apply -f - <<EOF
apiVersion: "serving.kserve.io/v1alpha1"
kind: ClusterStorageContainer
metadata:
name: oci-storage-initializer
spec:
container:
name: storage-initializer
image: quay.io/$USER/oci-storage-initializer:${VERSION}
imagePullPolicy: IfNotPresent # NOT FOR PROD but allow easier testing of local images with KinD (just remove for prod)
resources:
requests:
memory: 100Mi
cpu: 100m
limits:
memory: 1Gi
cpu: "1"
supportedUriFormats:
- prefix: oci-artifact://
EOF
- name: Apply the InferenceService resource
run: |
kubectl create namespace kserve-test
kubectl apply -n kserve-test -f - <<EOF
apiVersion: "serving.kserve.io/v1beta1"
kind: "InferenceService"
metadata:
name: "sklearn-iris"
spec:
predictor:
model:
modelFormat:
name: sklearn
storageUri: "oci-artifact://quay.io/mmortari/demo20240606-orascsi-ociartifactrepo:latest"
EOF
- name: Check the InferenceService
run: |
kubectl get inferenceservices sklearn-iris -n kserve-test
kubectl wait --for=condition=Ready --timeout=600s inferenceservice/sklearn-iris -n kserve-test
kubectl describe inferenceservices sklearn-iris -n kserve-test
- name: Check the model
run: |
kubectl get svc istio-ingressgateway -n istio-system
INGRESS_GATEWAY_SERVICE=$(kubectl get svc --namespace istio-system --selector="app=istio-ingressgateway" --output jsonpath='{.items[0].metadata.name}')
nohup kubectl port-forward --namespace istio-system svc/${INGRESS_GATEWAY_SERVICE} 8081:80 &
while ! curl localhost:8081; do echo waiting for port-forwarding; sleep 1; done; echo port-forwarding ready
# local envs
export INGRESS_HOST=localhost
export INGRESS_PORT=8081
cat <<EOF > "/tmp/iris-input.json"
{
"instances": [
[6.8, 2.8, 4.8, 1.4],
[6.0, 3.4, 4.5, 1.6]
]
}
EOF
SERVICE_HOSTNAME=$(kubectl get inferenceservice sklearn-iris -n kserve-test -o jsonpath='{.status.url}' | cut -d "/" -f 3)
curl -s -v -H "Host: ${SERVICE_HOSTNAME}" -H "Content-Type: application/json" "http://${INGRESS_HOST}:${INGRESS_PORT}/v1/models/sklearn-iris:predict" -d @/tmp/iris-input.json
curl -s -H "Host: ${SERVICE_HOSTNAME}" -H "Content-Type: application/json" "http://${INGRESS_HOST}:${INGRESS_PORT}/v1/models/sklearn-iris:predict" -d @/tmp/iris-input.json | jq .