Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ci: deploy with kustomize #18

Merged
merged 1 commit into from
Jan 3, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 12 additions & 9 deletions .github/workflows/deploy-prod&demo.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,22 +13,25 @@ jobs:
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

build:
name: Build with reusable workflow when merging to main
build-dataset:
name: Build dataset-event-publisher when pull request is created
uses: Informasjonsforvaltning/workflows/.github/workflows/build-push.yaml@main
with:
app_name: fdk-dataset-event-publisher
environment: prod
build_env: true
build_env_name: BINARY
build_env_value: fdk-dataset-event-publisher
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GCP_SA_DIGDIR_FDK_GCR_KEY: ${{ secrets.GCP_SA_DIGDIR_FDK_GCR_KEY }}

deploy:
name: Deploy to prod environment with reusable workflow when test and build is successful
needs: [test, build]
uses: Informasjonsforvaltning/workflows/.github/workflows/deploy.yaml@main
name: Deploy publishers to prod environment with reusable workflow
needs: [test, build-dataset]
uses: Informasjonsforvaltning/workflows/.github/workflows/kustomize-deploy.yaml@main
with:
app_name: fdk-dataset-event-publisher
app_name: fdk-kafka-event-publisher
environment: prod
cluster: digdir-fdk-prod
secrets:
Expand All @@ -37,11 +40,11 @@ jobs:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

deploy_demo:
name: Deploy to demo environment with reusable workflow when prod deploy is successful
name: Deploy publishers to demo environment with reusable workflow
needs: [deploy]
uses: Informasjonsforvaltning/workflows/.github/workflows/deploy.yaml@main
uses: Informasjonsforvaltning/workflows/.github/workflows/kustomize-deploy.yaml@main
with:
app_name: fdk-dataset-event-publisher
app_name: fdk-kafka-event-publisher
environment: demo
cluster: digdir-fdk-dev
secrets:
Expand Down
15 changes: 9 additions & 6 deletions .github/workflows/deploy-staging.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,24 +14,27 @@ jobs:
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

build:
name: Build with reusable workflow when pull request is created
build-dataset:
name: Build dataset-event-publisher when pull request is created
if: github.event.pull_request.draft == false
uses: Informasjonsforvaltning/workflows/.github/workflows/build-push.yaml@main
with:
app_name: fdk-dataset-event-publisher
environment: staging
build_env: true
build_env_name: BINARY
build_env_value: fdk-dataset-event-publisher
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GCP_SA_DIGDIR_FDK_GCR_KEY: ${{ secrets.GCP_SA_DIGDIR_FDK_GCR_KEY }}

deploy:
name: Deploy to staging environment with reusable workflow when test and build is successful
name: Deploy publishers to staging environment with reusable workflow
if: github.event.pull_request.draft == false
needs: [test, build]
uses: Informasjonsforvaltning/workflows/.github/workflows/deploy.yaml@main
needs: [test, build-dataset]
uses: Informasjonsforvaltning/workflows/.github/workflows/kustomize-deploy.yaml@main
with:
app_name: fdk-dataset-event-publisher
app_name: fdk-kafka-event-publisher
environment: staging
cluster: digdir-fdk-dev
secrets:
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
/target
.idea
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[package]
name = "fdk-event-publisher"
name = "fdk-kafka-event-publisher"
version = "0.1.0"
edition = "2021"

Expand Down
8 changes: 5 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
clang

COPY ./ ./
RUN cargo build --release
ARG BINARY
RUN cargo build --release --bin ${BINARY}


FROM debian:bookworm-slim
Expand All @@ -20,6 +21,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
ENV TZ=Europe/Oslo
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone

COPY --from=builder /build/target/release/fdk-dataset-event-publisher /fdk-dataset-event-publisher
ARG BINARY
COPY --from=builder /build/target/release/${BINARY} /release

CMD ["/fdk-dataset-event-publisher"]
CMD ["/release"]
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1 +1 @@
# fdk-dataset-event-publisher
# fdk-kafka-event-publisher
9 changes: 9 additions & 0 deletions deploy/base/kustomization.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
- service-fdk-dataset-event-publisher.yaml
images:
- name: fdk-dataset-event-publisher
newName: eu.gcr.io/digdir-fdk-infra/fdk-dataset-event-publisher
newTag: $(GIT_COMMIT_SHA)
14 changes: 14 additions & 0 deletions deploy/base/service-fdk-dataset-event-publisher.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
---
apiVersion: v1
kind: Service
metadata:
name: fdk-dataset-event-publisher
spec:
selector:
fdk.service: fdk-dataset-event-publisher
ports:
- name: http
protocol: TCP
port: 8080
targetPort: 8081
type: NodePort
80 changes: 80 additions & 0 deletions deploy/demo/deployment-fdk-dataset-event-publisher.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: fdk-dataset-event-publisher
labels:
fdk.service: fdk-dataset-event-publisher
spec:
replicas: 1
selector:
matchLabels:
fdk.service: fdk-dataset-event-publisher
strategy:
type: RollingUpdate
template:
metadata:
labels:
fdk.service: fdk-dataset-event-publisher
annotations:
prometheus.io/scrape: "true"
spec:
containers:
- env:
- name: SCHEMA_REGISTRY
valueFrom:
secretKeyRef:
name: fdk-metadata-quality-demo
key: SCHEMA_REGISTRY
- name: BROKERS
valueFrom:
secretKeyRef:
name: fdk-metadata-quality-demo
key: BROKERS
- name: RABBITMQ_USERNAME
valueFrom:
secretKeyRef:
name: rabbitmq-demo
key: RABBITMQ_USERNAME
- name: RABBITMQ_PASSWORD
valueFrom:
secretKeyRef:
name: rabbitmq-demo
key: RABBITMQ_PASSWORD
- name: RABBITMQ_HOST
valueFrom:
secretKeyRef:
name: rabbitmq-demo
key: RABBITMQ_HOST
- name: RABBITMQ_PORT
valueFrom:
secretKeyRef:
name: rabbitmq-demo
key: RABBITMQ_PORT
- name: HARVESTER_API_URL
valueFrom:
secretKeyRef:
name: commonurl-demo
key: FDK_DATASET_HARVESTER_URI
- name: REASONING_API_URL
valueFrom:
secretKeyRef:
name: commonurl-demo
key: FDK_REASONING_SERVICE_URI
name: fdk-dataset-event-publisher
image: eu.gcr.io/digdir-fdk-infra/fdk-dataset-event-publisher:$(GIT_COMMIT_SHA)
imagePullPolicy: Always
ports:
- containerPort: 8080
resources:
requests:
memory: "500Mi"
livenessProbe:
exec:
command:
- "true"
readinessProbe:
exec:
command:
- "true"
restartPolicy: Always
7 changes: 7 additions & 0 deletions deploy/demo/kustomization.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: demo
resources:
- deployment-fdk-dataset-event-publisher.yaml
- ../base
80 changes: 80 additions & 0 deletions deploy/prod/deployment-fdk-dataset-event-publisher.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: fdk-dataset-event-publisher
labels:
fdk.service: fdk-dataset-event-publisher
spec:
replicas: 1
selector:
matchLabels:
fdk.service: fdk-dataset-event-publisher
strategy:
type: RollingUpdate
template:
metadata:
labels:
fdk.service: fdk-dataset-event-publisher
annotations:
prometheus.io/scrape: "true"
spec:
containers:
- env:
- name: SCHEMA_REGISTRY
valueFrom:
secretKeyRef:
name: fdk-metadata-quality-prod
key: SCHEMA_REGISTRY
- name: BROKERS
valueFrom:
secretKeyRef:
name: fdk-metadata-quality-prod
key: BROKERS
- name: RABBITMQ_USERNAME
valueFrom:
secretKeyRef:
name: rabbitmq-prod
key: RABBITMQ_USERNAME
- name: RABBITMQ_PASSWORD
valueFrom:
secretKeyRef:
name: rabbitmq-prod
key: RABBITMQ_PASSWORD
- name: RABBITMQ_HOST
valueFrom:
secretKeyRef:
name: rabbitmq-prod
key: RABBITMQ_HOST
- name: RABBITMQ_PORT
valueFrom:
secretKeyRef:
name: rabbitmq-prod
key: RABBITMQ_PORT
- name: HARVESTER_API_URL
valueFrom:
secretKeyRef:
name: commonurl-prod
key: FDK_DATASET_HARVESTER_URI
- name: REASONING_API_URL
valueFrom:
secretKeyRef:
name: commonurl-prod
key: FDK_REASONING_SERVICE_URI
name: fdk-dataset-event-publisher
image: eu.gcr.io/digdir-fdk-infra/fdk-dataset-event-publisher:$(GIT_COMMIT_SHA)
imagePullPolicy: Always
ports:
- containerPort: 8080
resources:
requests:
memory: "500Mi"
livenessProbe:
exec:
command:
- "true"
readinessProbe:
exec:
command:
- "true"
restartPolicy: Always
7 changes: 7 additions & 0 deletions deploy/prod/kustomization.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: prod
resources:
- deployment-fdk-dataset-event-publisher.yaml
- ../base
Loading