diff --git a/plgen/gen.go b/plgen/gen.go new file mode 100644 index 00000000..873bb0c8 --- /dev/null +++ b/plgen/gen.go @@ -0,0 +1,511 @@ +// Copyright © 2019 IBM Corporation and others. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + "gopkg.in/yaml.v2" + "os" + "regexp" + "strconv" + "strings" +) + +// Master structure that represents the entire pipeline +type PlGen struct { + pr PipelineResource + pspecs Spec + psteps []Steps + plt PipelineTask + pl Pipeline + role Role + rolebinding RoleBinding + plr PipelineRun +} + +// Used by PipelineResource, +type Metadata struct { + Name string `yaml:"name"` +} + +// Used by PipelineResource, through PipelineResource->Items->PipelineResourceSpec +type PipelineResourceParams struct { + Name string `yaml:"name"` + Value string `yaml:"value"` +} + +// Used by PipelineResource, through PipelineResource->Items +type PipelineResourceSpec struct { + Type string `yaml:"type"` + Params []PipelineResourceParams `yaml:"params"` +} + +// Used by PipelineResource, top level to PipelineResource +type Items struct { + APIVersion string `yaml:"apiVersion"` + Kind string `yaml:"kind"` + Metadata Metadata `yaml:"metadata"` + Spec PipelineResourceSpec `yaml:"spec"` +} + +// Used by PlGen, top level. +type PipelineResource struct { + APIVersion string `yaml:"apiVersion"` + Items []Items `yaml:"items"` + Kind string `yaml:"kind"` +} + +// Used by PlGen, top level +type Role struct { + APIVersion string `yaml:"apiVersion"` + Kind string `yaml:"kind"` + Metadata Metadata `yaml:"metadata"` +} + +// Used by RoleBinding, through Rolebinding +type Subjects struct { + Kind string `yaml:"kind"` + Name string `yaml:"name"` + Namespace string `yaml:"namespace"` +} + +// Used by RoleBinding, through Rolebinding +type RoleRef struct { + Kind string `yaml:"kind"` + Name string `yaml:"name"` + APIGroup string `yaml:"apiGroup"` +} + +// Used by PlGen, top level +type RoleBinding struct { + APIVersion string `yaml:"apiVersion"` + Kind string `yaml:"kind,omitempty"` + Metadata Metadata `yaml:"metadata,omitempty"` + Subjects []Subjects `yaml:"subjects,omitempty"` + RoleRef RoleRef `yaml:"roleRef,omitempty"` +} + +// Used by Pipeline, through Pipeline->PipelineSpec->Tasks +type Params struct { + Name string `yaml:"name"` + Value string `yaml:"default"` +} + +// Used by Pipeline, through Pipeline->PipelineSpec->Tasks->PipelineResources +type PipelineInputs struct { + Name string `yaml:"name"` + Resource string `yaml:"resource,omitempty"` +} + +// Used by Pipeline, through Pipeline->PipelineSpec->Tasks->PipelineResources +type PipelineOutputs struct { + Name string `yaml:"name"` + Resource string `yaml:"resource,omitempty"` +} + +// Used by Pipeline, through Pipeline->PipelineSpec->Tasks +type PipelineResources struct { + Inputs []PipelineInputs `yaml:"inputs,omitempty"` + Outputs []PipelineOutputs `yaml:"outputs,omitempty"` +} + +// Used by Pipeline, through Pipeline->PipelineSpec->Tasks +type TaskRef struct { + Name string `yaml:"name"` + Kind string `yaml:"kind"` +} + +// Used by Pipeline, through Pipeline->PipelineSpec +type Resources struct { + Name string `yaml:"name"` + Type string `yaml:"type"` +} + +// Used by Pipeline, through Pipeline->PipelineSpec +type Tasks struct { + Name string `yaml:"name,omitempty"` + Taskref TaskRef `yaml:"taskRef,omitempty"` + Resources PipelineResources `yaml:"resources,omitempty"` + Params []Params `yaml:"params,omitempty"` +} + +// Used by Pipeline, through Pipeline +type PipelineSpec struct { + Resources []Resources `yaml:"resources,omitempty"` + Tasks [1]Tasks `yaml:"tasks,omitempty"` +} + +// Used by PlGen, top level +type Pipeline struct { + APIVersion string `yaml:"apiVersion"` + Kind string `yaml:"kind"` + Meta Metadata `yaml:"metadata"` + Spec PipelineSpec `yaml:"spec"` +} + +// Used by PipelineTask, through PipelineTask->Spec->Steps +type Arg struct { + Name string `yaml:"name"` + Value string `yaml:"value"` +} + +// Used by PipelineTask, through PipelineTask->Spec->Steps +type Mount struct { + Name string `yaml:"name"` + Value string `yaml:"mountPath"` +} + +// Used by PipelineTask, through PipelineTask->Spec->Steps +type Env struct { + Name string `yaml:"name"` + Value string `yaml:"value"` +} + +// Used by PipelineTask, through PipelineTask->Spec->Volumes +type HostPath struct { + Path string `yaml:"path"` + Type string `yaml:"type"` +} + +// Used by PipelineTask, through PipelineTask->Spec +type Volumes struct { + Name string `yaml:"name"` + HostPath HostPath `yaml:"hostPath"` +} + +// Used by PipelineTask, through PipelineTask->Spec +type Steps struct { + Name string `yaml:"name"` + Image string `yaml:"image"` + Env []Env `yaml:"env,omitempty"` + Command []string `yaml:"command,omitempty` + Args []string `yaml:"args,omitempty"` + Mount []Mount `yaml:"volumeMounts,omitempty"` + Arg []Arg `yaml:"arg,omitempty"` +} + +// Used by PipelineTask, through PipelineTask->Spec +type Inputs struct { + Resources []Resources `yaml:"resources,omitempty"` + Params []Params `yaml:"params,omitempty"` +} + +// Used by PipelineTask, through PipelineTask->Spec +type Outputs struct { + Resources []Resources `yaml:"resources,omitempty"` + Params []Params `yaml:"params,omitempty"` +} + +// Used by PipelineTask, through PipelineTask +type Spec struct { + Inputs Inputs `yaml:"inputs,omitempty"` + Outputs Outputs `yaml:"outputs,omitempty"` + Steps []Steps `yaml:"steps,omitempty"` + Volumes []Volumes `yaml:"volumes,omitempty"` +} + +// Used by PlGen, top level +type PipelineTask struct { + APIVersion string `yaml:"apiVersion"` + Kind string `yaml:"kind"` + Meta Metadata `yaml:"metadata"` + Spec Spec `yaml:"spec"` +} + +// Used by PipelineRun, through PipelineRun->PipelineRunSpec->PipelineRunResources +type PipelineRunResourceRef struct { + Name string `yaml:"name"` +} + +// Used by PipelineRun, through PipelineRun->PipelineRunSpec +type PipelineRunResources struct { + Name string `yaml:"name"` + ResourceRef PipelineRunResourceRef `string:"resourceRef"` +} + +// Used by PipelineRun, through PipelineRun->PipelineRunSpec +type PipelineRunPipelineRef struct { + Name string `yaml:"name"` +} + +// Used by PipelineRun, through PipelineRun +type PipelineRunSpec struct { + ServiceAccount string `yaml:"serviceAccount"` + Timeout string `yaml:"timeout"` + PipelineRef PipelineRunPipelineRef `yaml:"pipelineRef"` + Resources []PipelineRunResources `yaml:"resources,omitempty"` +} + +// Used by PlGen, top level +type PipelineRun struct { + APIVersion string `yaml:"apiVersion"` + Kind string `yaml:"kind"` + Metadata Metadata `yaml:"metadata"` + Spec PipelineRunSpec `yaml:"spec"` +} + +// Marshals a GO struct into YAML definition +// and prints a --- separator. Any error, exit +func Marshal(in interface{}) { + data, err := yaml.Marshal(in) + if err != nil { + fmt.Println("Error while marshalling into yaml:", err) + os.Exit(1) + } + fmt.Print(string(data)) + fmt.Println("---") +} + +// Generates a Role from a USER verb +func GenRole(plg PlGen) { + role := plg.role + Marshal(&role) +} + +// Generates a RoleBinding from a USER verb +func GenRoleBinding(plg PlGen) { + rolebinding := plg.rolebinding + Marshal(&rolebinding) +} + +// Generates a pipeline, binds it with a pipeline task +func GenPipeline(plg PlGen) { + pl := plg.pl + pl.APIVersion = apiVersion + pl.Kind = "Pipeline" + pl.Meta.Name = nomenClature + "-pipeline" + pl.Spec.Tasks[0].Name = nomenClature + pl.Spec.Tasks[0].Taskref.Name = nomenClature + "-task" + pl.Spec.Tasks[0].Taskref.Kind = "Task" + Marshal(&pl) +} + +// Generates a pipeline run, binds it with a pipeline +func GenPipelineRun(plg PlGen) { + plr := plg.plr + plr.APIVersion = apiVersion + plr.Kind = "PipelineRun" + plr.Metadata.Name = nomenClature + "-pipeline-run" + plr.Spec.Timeout = pipelineTimeout + plr.Spec.PipelineRef.Name = nomenClature + "-pipeline" + Marshal(&plr) +} + +// Arg was used for internal purposes: +// spreading the input/output, +// as a cache for dollar variable lookup +// remove this before pipeline generation +// use normal for loop as opposed to iterator +// to effect the change in the actual structure +func removeArgs(plg *PlGen) { + steps := plg.pspecs.Steps + for i := 0; i < len(steps); i++ { + steps[i].Arg = []Arg{} + } +} + +// Generates a pipeline task +func GenPipelineTask(plg PlGen) { + plg.pspecs.Steps = plg.psteps + plg.plt.Spec = plg.pspecs + removeArgs(&plg) + plt := plg.plt + plt.APIVersion = apiVersion + plt.Kind = "Task" + plt.Meta.Name = nomenClature + "-task" + Marshal(&plt) +} + +// Generates a list of pipeline resources +func GenResource(plg PlGen) { + pr := plg.pr + pr.APIVersion = "v1" + pr.Kind = "List" + Marshal(&pr) +} + +// Transform a RUN step. Basically: +// 1. Transalate any $ variables +// search in the old steps, and the current one +// 2. suffix the commands under /bin/bash +func TransformRun(line string, step *Steps, steps *[]Steps) { + u := strings.Split(line, " ")[1:] + var v = strings.Join(u, " ") + for _, old := range u { + re := regexp.MustCompile("\\$([^\\s]+)") + words := re.FindAllString(old, -1) + for _, token := range words { + v = strings.ReplaceAll(v, token, replace(steps, token)) + current := []Steps{ + *step, + } + v = strings.ReplaceAll(v, token, replace(¤t, token)) + } + } + step.Command = []string{"/bin/bash"} + step.Args = append(step.Args, "-c", v) + debuglog("processing RUN", v, "as", step.Command) +} + +// Transform a USER verb. +// Create a Cluster Role +// Bind it with cluster-admin privilege +func TransformRole(line string, plg *PlGen) { + name := strings.Split(line, " ")[1] + var role Role + var rolebinding RoleBinding + role.APIVersion = "v1" + role.Kind = "ServiceAccount" + role.Metadata = Metadata{Name: name} + + var sub Subjects + sub.Kind = role.Kind + sub.Name = name + sub.Namespace = namespace + + var roleref RoleRef + roleref.Kind = "ClusterRole" + roleref.Name = "cluster-admin" + roleref.APIGroup = "rbac.authorization.k8s.io" + + rolebinding.APIVersion = "rbac.authorization.k8s.io/v1" + rolebinding.Kind = "ClusterRoleBinding" + rolebinding.Metadata.Name = rolebindingname + rolebinding.Subjects = []Subjects{sub} + rolebinding.RoleRef = roleref + + plg.plr.Spec.ServiceAccount = role.Metadata.Name + plg.role = role + plg.rolebinding = rolebinding + + debuglog("processing USER", name, "as ClusterRoleBinding") +} + +// Transform a MOUNT verb. +// For a MOUNT A=B: +// Create a volume with the name as _A_ and hostMount as A +// Create a voumeMount with name as _A_ and mountPath as B +func TransformMount(step *Steps, name string, val string, plg *PlGen) { + mname := strings.ReplaceAll(name, "/", "_") + step.Mount = append(step.Mount, Mount{Name: mname, Value: val}) + volumes := Volumes{Name: mname, HostPath: HostPath{Path: name, Type: "unknown"}} + plg.pspecs.Volumes = append(plg.pspecs.Volumes, volumes) + debuglog("processing MOUNT", mname, "as", volumes, "and", step.Mount) +} + +// Transform an ENV verb +// Also record the key values in ARG verb, for future $ translations +func TransformEnv(step *Steps, name string, val string, plg *PlGen) { + step.Env = append(step.Env, Env{Name: name, Value: val}) + step.Arg = append(step.Arg, Arg{Name: name, Value: val}) + debuglog("processing ENV", step.Env) +} + +// Transform ARG, ARGIN, ARGOUT verbs +// 1. From the value, try to `decipher` its resource type: +// ref: https://github.com/tektoncd/pipeline/blob/master/docs/resources.md +// 2. compose headers +// 3. create an artifical name for the resource +// 4. bind the resource as input or output appropriately +// 5. later, this will be looked up for $ variable translations. +func TransformArg(step *Steps, name string, key string, val string, plg *PlGen) { + itemName := "resource" + strconv.Itoa(rindex) + rindex++ + itemType := guessItemType(val) + params := []PipelineResourceParams{{Name: itemName, Value: val}} + plg.pr.Items = append(plg.pr.Items, Items{ + APIVersion: "tekton.dev/v1alpha1", + Kind: "PipelineResource", + Metadata: Metadata{Name: name}, + Spec: PipelineResourceSpec{Params: params, Type: itemType}}) + plrrr := PipelineRunResourceRef{Name: name} + plrr := PipelineRunResources{Name: name, ResourceRef: plrrr} + plg.plr.Spec.Resources = append(plg.plr.Spec.Resources, plrr) + r := Resources{Name: name, Type: itemType} + plg.pl.Spec.Resources = append(plg.pl.Spec.Resources, r) + if key == "ARGIN" || key == "ARG" { + r := Resources{Name: name, Type: itemType} + t := append(plg.pspecs.Inputs.Resources, r) + plg.pspecs.Inputs.Resources = t + pi := PipelineInputs{Name: name, Resource: name} + pq := append(plg.pl.Spec.Tasks[0].Resources.Inputs, pi) + plg.pl.Spec.Tasks[0].Resources.Inputs = pq + } else { + r := Resources{Name: name, Type: itemType} + s := append(plg.pspecs.Outputs.Resources, r) + plg.pspecs.Outputs.Resources = s + plo := PipelineOutputs{Name: name, Resource: name} + plp := append(plg.pl.Spec.Tasks[0].Resources.Outputs, plo) + plg.pl.Spec.Tasks[0].Resources.Outputs = plp + } + step.Arg = append(step.Arg, Arg{Name: name, Value: val}) + debuglog("processing ARG", step.Arg) +} + +func guessItemType(item string) string { + if strings.Contains(item, "github.com") { + if strings.Contains(item, "/pull/") { + return "pullRequest" + } + return "git" + } else if strings.Contains(item, "docker.io") || + strings.Contains(item, "gcr.io") || + strings.Contains(item, "registry.access.redhat.com") { + return "image" + } + // TODO: detect storage, cloud event, cluster patterns. + return "unknown" +} + +// Main translation loop. As we are not mandating any specific order +// in the pipeline script, the top level PlGen object is pre-created +// and passed to this so that as and when data elements (more import- +// antly resources) are encountered, they can be attached to it. +func transformSteps(plg *PlGen, stepstr string, index int) { + var step Steps + lines := strings.Split(stepstr, "\n") + for _, line := range lines { + if line != "" { + key := strings.Split(line, " ")[0] + switch key { + case "LABEL": + step.Name = strings.Split(line, " ")[1] + debuglog("processing LABEL", step.Name) + case "FROM": + step.Image = strings.Split(line, " ")[1] + debuglog("processing FROM", step.Image) + case "ARG", "ARGIN", "ARGOUT", "ENV", "MOUNT": + value := strings.Split(line, " ")[1] + name := strings.Split(value, "=")[0] + val := strings.Split(value, "=")[1] + if strings.HasPrefix(key, "ARG") { + TransformArg(&step, name, key, val, plg) + } else if key == "ENV" { + TransformEnv(&step, name, val, plg) + } else { + TransformMount(&step, name, val, plg) + } + case "RUN": + TransformRun(line, &step, &plg.psteps) + case "USER": + TransformRole(line, plg) + default: + fmt.Println("bad pipeline verb:", key) + os.Exit(1) + } + } + } + plg.psteps = append(plg.psteps, step) +} diff --git a/plgen/main.go b/plgen/main.go new file mode 100644 index 00000000..e09208d7 --- /dev/null +++ b/plgen/main.go @@ -0,0 +1,166 @@ +// Copyright © 2019 IBM Corporation and others. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + "io/ioutil" + "os" + "regexp" + "strings" +) + +// Some static assumptions used in the tool +const ( + apiVersion = "tekton.dev/v1alpha1" + namespace = "default" + pipelineTimeout = "1h0m0s" + pipelineTrigger = "manual" + rolebindingname = "admin" + apiGroup = "rbac.authorization.k8s.io" +) + +// Name of the pipeline, pipelinerun, pipelinetask etc. +// are gleaned from the input file name. +// a future optimization is to replace this with a proper +// flag that receives the name of the pipeline objects +var ( + nomenClature = "test" + debug = false + rindex = 0 +) + +// Valid verbs at the moment. When adding one, all +// what you need to make sure is you have an entry +// for it in the switch case of transformSteps. +var verbs = []string{"ARG", "ARGIN", "ARGOUT", "FROM", "RUN", "LABEL", "ENV", "MOUNT", "USER"} + +func replace(steps *[]Steps, variable string) string { + if steps == nil { + return variable + } + for _, step := range *steps { + for _, e := range step.Arg { + if "$"+e.Name == variable { + return e.Value + } + } + } + return variable +} + +func isValidVerb(a string) bool { + for _, b := range verbs { + if b == a { + return true + } + } + return false +} + +func validateSanity(step string, sindex int) { + + // sanity #1: verbs start with LABEL + if !strings.HasPrefix(step, "LABEL ") { + fmt.Println("Pipeline steps start with LABEL verb") + os.Exit(1) + } + + // sanity #2: verbs only from within a known subset + lines := strings.Split(step, "\n") + for lindex, line := range lines { + var verbs = strings.Split(line, " ") + verb := strings.TrimSpace(verbs[0]) + if len(verb) != 0 && !isValidVerb(verb) { + fmt.Println("Invalid pipeline verb", verb, "in step #", sindex+1, "line #", lindex+1) + os.Exit(1) + } + } + + // sanity #3: no empty verbs + for lindex, line := range lines { + var verbs = strings.Split(line, " ") + if strings.TrimSpace(verbs[0]) != "" && len(verbs) <= 1 { + verb := strings.TrimSpace(verbs[0]) + fmt.Println("Verb", verb, "does not have a value in step #", sindex+1, "line #", lindex+1) + os.Exit(1) + } + } +} +func debuglog(args ...interface{}) { + if debug { + fmt.Print("[Debug] ") + fmt.Println(args...) + } +} + +func main() { + + // Our master pipeline db + var plg PlGen + + // Validate proper usage + count := len(os.Args) + if count != 2 && count != 3 { + fmt.Println("Usage: plgen [-v]") + os.Exit(1) + } + + if count == 3 { + debug = true + } + + nomenClature = strings.ReplaceAll(os.Args[1], ".", "-") + + // Get the file data + filename := os.Args[1] + rawdata, err := ioutil.ReadFile(filename) + if err != nil { + fmt.Println("error reading input file:", err) + os.Exit(1) + } + debuglog("reading", filename) + data := string(rawdata) + + // Get the data split as steps + re := regexp.MustCompile("\nLABEL ") + steps := re.Split(data, -1) + for index := range steps { + if index > 0 { + steps[index] = "LABEL " + steps[index] + } + } + + // Do basic sanity on the data + debuglog("doing basic sanity checking on the input") + for index, step := range steps { + validateSanity(step, index) + } + + // Perform transformation with 'step' as the unit of work + for index, step := range steps { + transformSteps(&plg, step, index) + } + + // Print the pipeline definition + debuglog("generating the pipeline data") + debuglog("---") + GenRole(plg) + GenRoleBinding(plg) + GenResource(plg) + GenPipeline(plg) + GenPipelineTask(plg) + GenPipelineRun(plg) +} diff --git a/plgen/plgen.md b/plgen/plgen.md new file mode 100644 index 00000000..c3d031a6 --- /dev/null +++ b/plgen/plgen.md @@ -0,0 +1,330 @@ +# plgen + +`plgen` generates tekton pipelines that you can deploy onto kubernetes cluster with minimal or zero changes. + +The tool provides a high level abstraction on top of the tekton pipeline semantics, hiding most of the details and the `yaml complexity` altogether. The intent is to drastically improve user experience on working with pipelines, and transforming pipelines as an integral part of kabanero's capabiity. + +The main attraction of `plgen` is its input syntax - which it derives from Dockerfile. While Dockerfile uses these verbs to define attributes, execution environment and a sequence of actions that lead upto generation of an image, `plgen` uses those verbs for sequencing discrete steps into a pipeline definition, with meanings of most of the verbs in-tact. + +Supported verbs at the moment are: +``` +ARG +ARGIN +ARGOUT +FROM +RUN +LABEL +ENV +MOUNT +USER +``` + +Feel free to raise an issue / rfe in this repo, if there is need to define new verbs. + +Here is an example input and the generated pipeline:\s\s +$ cat pl.txt + +```Dockerfile +LABEL targz +FROM ubuntu +USER kubernetes-user +MOUNT containers=/var/lib/containers +ARG input=http://example.com/archive.tar.gz +ENV foo=bar +RUN tar xzvf $input +RUN cat source/file.txt +``` + +$ plgen pl.txt +```yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + name: kubernetes-user +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: admin +subjects: + kind: ServiceAccount + name: kubernetes-user + namespace: default +roleRef: + kind: ClusterRole + name: cluster-admin + apiGroup: rbac.authorization.k8s.io +--- +apiVersion: v1 +items: +- apiVersion: tekton.dev/v1alpha1 + kind: PipelineResource + metadata: + name: input + spec: + type: url + params: + name: resource0 + value: http://example.com/archive.tar.gz +kind: List +--- +apiVersion: tekton.dev/v1alpha1 +kind: Pipeline +metadata: + name: pl_txt-pipeline +spec: + resources: + - name: input + type: url + tasks: + name: pl_txt + taskRef: + name: pl_txt-task + kind: "" + resources: + inputs: + - name: input + resource: url +--- +apiVersion: tekton.dev/v1alpha1 +kind: Pipeline +metadata: + name: pl_txt-task +spec: + inputs: + resources: + - name: input + type: url + outputs: {} + steps: + - name: targz + image: ubuntu + env: + - name: foo + value: bar + command: + - command: '["/bin/bash"]' + args: + - -c + - tar xzvf http://example.com/archive.tar.gz + - command: '["/bin/bash"]' + args: + - -c + - cat source/file.txt + volumeMounts: + - name: containers + mountPath: /var/lib/containers + arg: + - name: input + value: http://example.com/archive.tar.gz + volumes: + - name: containers + hostPath: + path: containers + type: unknown +--- +apiVersion: tekton.dev/v1alpha1 +kind: PipelineRun +metadata: + name: pl_txt-pipeline-run +spec: + serviceAccount: kubernetes-user + timeout: 1h0m0s + pipelineRef: + name: pl_txt-pipeline + trigger: + type: manual + resources: + - name: input + resourceref: + name: input +--- +``` + + +# The Translation Specification + +### ARG + +Alias for ARGIN +Defines an input argument to the pipeline step as key-value pair. +An entry for the key is made to the PipelineResource, and referred by PipelineRun and Pipeline. +The key and values are passed as arg field to the current pipeline step. + +### Example: + +Input: + +`ARG input=http://example.com/archive.tar.gz` + +Output: +In PipelineResource: +```yaml +- apiVersion: tekton.dev/v1alpha1 + kind: PipelineResource + metadata: + name: input + spec: + type: url + params: + name: resource0 + value: http://example.com/archive.tar.gz +``` +In Pipeline step: +```yaml + arg: + - name: input + value: http://example.com/archive.tar.gz +``` + +Again in Pipeline step, after $ variable translation: +```yaml + - command: '["/bin/bash"]' + args: + - -c + - tar xzvf http://example.com/archive.tar.gz +``` + +### ARGIN + +Same as ARG . + +### ARGOUT +Defines an output argument to the pipeline step as key-value pair. +An entry for the key is made to the PipelineResource, and referred by PipelineRun and Pipeline. +The key and values are passed as `arg` field to the current pipeline step. + +### ENV + +Defines an environment variable for the container in the pipeline step. +The key and values are passed as `env` field to the current pipeline step. + +### Example: + +Input: +`ENV foo=bar` + +Output: +In Pipeline step: +```yaml + env: + - name: foo + value: bar +``` + +### FROM + +Defines the container to spin up for the current pipeline step. +The image name is passed as `image` field to the current pipeline step. + +### Example: +Input: +FROM ubuntu + +Output: +In Pipeline step: +```yaml + steps: + - name: targz + image: ubuntu +``` + + +### LABEL + +Defines the name of the current pipeline step. It is a mandate that each step starts with a LABEL +The label name is passed as the `name` field to the current pipeline step. + +### Example: +Input: +`LABEL targz` + +Output: +In Pipeline step: +```yaml + steps: + - name: targz + image: ubuntu +``` + +### MOUNT + +Defines the mount bindings between the host and the container in the current pipeline step. +An entry for `volumeMounts` in the current pipeline step is created with `_host_` as the name and `container` as the `mountPath` +An entry for the `volumes` in the pipeline is created with `_host_` as the name and `host` as the `hostPath` + +### Example: +Input: +`MOUNT containers=/var/lib/containers` +Output: +In Pipeline step: + +```yaml + volumeMounts: + - name: containers + mountPath: /var/lib/containers +``` + +Again in the Pipeline step: +```yaml + volumes: + - name: containers + hostPath: + path: containers + type: unknown +``` + +### RUN + +Defines the shell command(s) that will be run in the target container. +A /bin/bash is spawned in the target container, and the entire command string is passed to it. +$ variable translations occur before the command is dispatched, by looking up in the resources. + +### Example: +Input: +`RUN tar xzvf $input` +Output: +In the Pipeline step: +```yaml + command: + - command: '["/bin/bash"]' + args: + - -c + - tar xzvf http://example.com/archive.tar.gz +``` + + +### USER + +Defines a kubernetes cluster service account that will `own` the generated pipeline. +A Role is created with the user. +A RoleBinding is created with the user, that is bound to `cluster-admin` role. + +### Example: +Input: +`USER kubernetes-user` + +Output: +In Role definition: +```yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + name: kubernetes-user +``` + +In RoleBinding: +```yaml +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: admin +subjects: + kind: ServiceAccount + name: kubernetes-user + namespace: default +roleRef: + kind: ClusterRole + name: cluster-admin + apiGroup: rbac.authorization.k8s.io +```