diff --git a/.github/workflows/merge.yml b/.github/workflows/merge.yml
new file mode 100644
index 0000000..5b462a5
--- /dev/null
+++ b/.github/workflows/merge.yml
@@ -0,0 +1,93 @@
+name: Run Merge Script
+
+on:
+ workflow_dispatch:
+ # run after layout verification
+ workflow_run:
+ workflows: ["Run Layout Verification"]
+ types:
+ - completed
+
+jobs:
+ merge:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: checkout repo content
+ uses: actions/checkout@v2
+
+ # can also specify python version if needed
+ - name: setup python
+ uses: actions/setup-python@v5
+ with:
+ python-version: '3.11'
+ cache: 'pip'
+
+ - name: install python packages
+ run: |
+ #python -m pip install --upgrade pip
+ pip install klayout SiEPIC siepic_ebeam_pdk pandas
+ python -m pip install --upgrade SiEPIC
+
+ - name: run merge script
+ run: |
+
+ python merge/EBeam_merge.py
+
+ - name: move merge output files to new folder
+ run: |
+ #output_files="EBeam.gds EBeam.oas EBeam.txt EBeam.coords"
+ output_files="EBeam.oas EBeam.txt"
+
+ IFS=' '
+
+ mkdir -p merge_output
+
+ for file in $output_files; do
+ cp "merge/$file" merge_output/
+ done
+
+ - name: upload artifact
+ uses: actions/upload-artifact@v4
+ id: artifact-upload
+ with:
+ name: merge-files
+ path: merge_output/
+
+ - name: get artifact url
+ run: |
+ IFS='/' read -ra REPO <<< "$GITHUB_REPOSITORY"
+ OWNER="${REPO[0]}"
+ REPO_NAME="${REPO[1]}"
+ echo "Owner: $OWNER"
+ echo "Repository: $REPO_NAME"
+
+ RUN_ID=${{ github.run_id }}
+ ARTIFACT_ID=${{ steps.artifact-upload.outputs.artifact-id }}
+ ARTIFACT_URL="https://github.com/$OWNER/$REPO_NAME/actions/runs/$RUN_ID/artifacts/$ARTIFACT_ID"
+ echo "Artifact URL: $ARTIFACT_URL"
+
+ echo "ARTIFACT_URL=$ARTIFACT_URL" >> $GITHUB_ENV
+ echo "OWNER=$OWNER" >> $GITHUB_ENV
+
+ - name: update url in runner README
+ run: |
+ start_delim=""
+ end_delim=""
+
+ # remove current URL
+ sed -i "/$start_delim/,/$end_delim/d" README.md
+
+ # add new URL
+ printf "$start_delim\n$ARTIFACT_URL\n$end_delim\n" >> README.md
+
+ # merge script always runs on any PR, this ensures link is only updated after a PR is merged into SiEPIC
+ - name: commit and push changes to README if we are in SiEPIC repo
+ run: |
+ git diff
+ git config --local user.email "${{ github.actor }}@users.noreply.github.com"
+ git config --local user.name "${{ github.actor }}"
+ git add README.md
+ git commit -m "update README with new artifact url $ARTIFACT_URL"
+ git push
+ if: ${{ env.OWNER == 'SiEPIC'}}
diff --git a/.github/workflows/python-to-oas_gds.yml b/.github/workflows/python-to-oas_gds.yml
new file mode 100644
index 0000000..0c9610d
--- /dev/null
+++ b/.github/workflows/python-to-oas_gds.yml
@@ -0,0 +1,100 @@
+name: Run Python Files
+
+on:
+ workflow_dispatch:
+ push:
+ paths:
+ - "submissions/KLayout Python/**.py"
+ branches:
+ - '**'
+
+jobs:
+ run-python:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: checkout repo content
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+
+ # can also specify python version if needed
+ - name: setup python
+ uses: actions/setup-python@v4
+
+ - name: install python packages
+ run: |
+ python -m pip install --upgrade pip
+ pip install klayout SiEPIC siepic_ebeam_pdk
+ python -m pip install --upgrade SiEPIC
+
+ - name: run python scripts and get output gds / oas file
+ run: |
+
+ # get added/modified py files
+ export FILES=$(git diff --name-only --diff-filter=ACM ${{ github.event.before }} ${{ github.sha }} -- "submissions/KLayout Python" | grep -E '\.py$')
+ echo "FILES=$FILES" >> $GITHUB_ENV
+
+ echo "Added / modified Python files; $FILES"
+
+ # delete .oas and .gds files in the runner's submissions folder
+ # this is needed in the case where someone already has file_name.gds and is now trying to generate file_name.oas (or vice versa)
+ rm -f submissions/*.gds submissions/*.oas
+
+ IFS=$'\n'
+
+ OUTPUT_FILES=""
+
+ for file in $FILES; do
+
+ echo "Getting oas/gds output for $file"
+
+ # run file and generate a gds / oas output
+ python "$file"
+
+ # get output and save to OUTPUT_FILES
+ gds_files=$(find submissions -type f -name "*.gds" -exec basename {} .gds \;)
+ oas_files=$(find submissions -type f -name "*.gds" -exec basename {} .oas \;)
+
+ file_name=$(basename "$file")
+ file_name_no_py=$(basename "$file_name" .py)
+
+ output_files=""
+ if echo "$gds_files" | grep -q "$file_name_no_py"; then
+ output_file="${file_name_no_py}.gds"
+ else
+ output_file="${file_name_no_py}.oas"
+ fi
+
+ OUTPUT_FILES+="$output_file "
+
+ echo "Done for $file"
+
+ done
+
+ echo "output files; $OUTPUT_FILES"
+
+ echo "OUTPUT_FILES=$OUTPUT_FILES" >> $GITHUB_ENV
+
+ - name: write added oas and gds files to txt file
+ run: |
+ echo "$OUTPUT_FILES" > python-to-gds_oas.txt
+
+ - name: commit outputted oas and gds files into repository
+ run: |
+ git config --local user.email "${{ github.actor }}@users.noreply.github.com"
+ git config --local user.name "${{ github.actor }}"
+
+ git add python-to-gds_oas.txt
+
+ echo "git add python-to-gds_oas.txt"
+
+ # git add all produced oas files
+ for file in $OUTPUT_FILES; do
+ git add "submissions/$file"
+ echo "git add $file"
+ done
+
+ git commit -m "Add oas and gds files produced from .py files"
+ git push
+
diff --git a/.github/workflows/run-verification.yml b/.github/workflows/run-verification.yml
new file mode 100644
index 0000000..b33e90d
--- /dev/null
+++ b/.github/workflows/run-verification.yml
@@ -0,0 +1,121 @@
+name: Run Layout Verification
+
+on:
+ workflow_dispatch:
+ workflow_run:
+ workflows: ["Run Python Files"]
+ types:
+ - completed
+ push:
+ paths:
+ - 'submissions/**.gds'
+ - 'submissions/**.oas'
+ branches:
+ - '**'
+ pull_request:
+ branches:
+ - '**'
+
+
+jobs:
+ verification:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: checkout repo content
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+
+ # can also specify python version if needed
+ - name: setup python
+ uses: actions/setup-python@v4
+
+ - name: install python packages
+ run: |
+ python -m pip install --upgrade pip
+ pip install klayout SiEPIC siepic_ebeam_pdk
+ python -m pip install --upgrade SiEPIC
+
+ - name: get .gds and .oas files, run example layout verification
+ id: run-script
+ run: |
+
+ # if the action is being triggered after running python files, get resulting oas files from txt file
+ # github actions is not configured to detect files pushed from another action, thus we cannot use the method below
+ if [ -s "python-to-gds_oas.txt" ] && [ -n "$(cat python-to-gds_oas.txt)" ]; then
+ export FILES=$(cat python-to-gds_oas.txt)
+ IFS=' '
+ echo "" > python-to-gds_oas.txt
+
+ # push empty text file to repo
+ git config --local user.email "${{ github.actor }}@users.noreply.github.com"
+ git config --local user.name "${{ github.actor }}"
+ git add python-to-gds_oas.txt
+ git commit -m "Emptying text file"
+ git push
+
+ else
+ if [ "${{ github.event_name }}" = "pull_request" ]; then
+ # triggered on pull request, get all changed / added files from forked repo
+ export FILES=$(git diff --name-only --diff-filter=ACM FETCH_HEAD | grep -E '\.(gds|oas)$' | sed 's|^submissions/||')
+ else
+ # triggered push, locate the changed / added .gds and .oas files in the submission folder
+ export FILES=$(git diff --name-status --diff-filter=ACM --relative=submissions ${{ github.event.before }} ${{ github.sha }} submissions | grep -E '\.(gds|oas)$' | awk '{print $2}')
+ fi
+ IFS=$'\n'
+ fi
+
+ # print the names of the files
+ echo "Files for verification; $FILES"
+
+ files_with_errors=""
+
+ # run verification on all files
+ for file in $FILES; do
+
+ echo "Running verification on $file"
+
+ output=$(python run_verification.py "submissions/$file")
+
+ # get number of errors
+ errors_from_output=$(echo "$output" | tail -n 1)
+
+ echo "$errors_from_output errors detected for $file"
+
+ # if file results in verification errors add to string files_with_errors
+ if [[ "$errors_from_output" -ge 1 ]]; then
+ files_with_errors+="$file, $errors_from_output errors. "
+ fi
+
+ echo "Done verification on $file"
+ done
+
+ echo "files_with_errors=$files_with_errors" >> $GITHUB_ENV
+
+ - name: move output files to new folder
+ run: |
+ export OUTPUT_FILES=$(find /home/runner/work/openEBL-2024-02/openEBL-2024-02/submissions -name "*.lyrdb")
+ echo "Output files: $OUTPUT_FILES"
+
+ mkdir -p verification_output
+
+ for file in $OUTPUT_FILES; do
+ cp "$file" verification_output/
+ done
+
+ - name: upload artifact
+ uses: actions/upload-artifact@v2
+ with:
+ name: layout-errors
+ path: verification_output/
+
+ - name: fail if there are errors from layout verification
+ run: |
+ if [ -z "$files_with_errors" ]; then
+ echo "No errors detected."
+ else
+ echo "Errors detected: $files_with_errors"
+ exit 1
+ fi
+
diff --git a/.github/workflows/run-yaml-verification.yml b/.github/workflows/run-yaml-verification.yml
new file mode 100644
index 0000000..6d74703
--- /dev/null
+++ b/.github/workflows/run-yaml-verification.yml
@@ -0,0 +1,86 @@
+name: Run YAML verification
+
+on:
+ workflow_dispatch:
+ workflow_run:
+ workflows: ["Run Python Files"]
+ types:
+ - completed
+ push:
+ paths:
+ - 'submissions/**.yaml'
+ branches:
+ - '**'
+ pull_request:
+ branches:
+ - '**'
+
+
+jobs:
+ verification:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: checkout repo content
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+
+ # can also specify python version if needed
+ - name: setup python
+ uses: actions/setup-python@v4
+
+ - name: install python packages
+ run: |
+ python -m pip install --upgrade pip
+ pip install PyYAML
+
+ - name: get .yaml files, run verification
+ id: run-script
+ run: |
+
+ if [ "${{ github.event_name }}" = "pull_request" ]; then
+ # triggered on pull request, get all changed / added files from forked repo
+ export FILES=$(git diff --name-only --diff-filter=ACM FETCH_HEAD | grep -E '\.(yaml|yml)$' | sed 's|^submissions/||')
+ else
+ # triggered push, locate the changed / added .yaml files in the submission folder
+ export FILES=$(git diff --name-status --diff-filter=ACM --relative=submissions ${{ github.event.before }} ${{ github.sha }} submissions | grep -E '\.(yaml|yml)$' | awk '{print $2}')
+ fi
+ IFS=$'\n'
+
+ # print the names of the files
+ echo "Files for verification; $FILES"
+
+ files_with_errors=""
+
+ # run verification on all files
+ for file in $FILES; do
+
+ echo "Running verification on $file"
+
+ output=$(python run_yaml_verification.py "submissions/$file")
+
+ # get number of errors
+ #errors_from_output=$(echo "$output" | tail -n 1)
+
+ #echo "$errors_from_output errors detected for $file"
+
+ # if file results in verification errors add to string files_with_errors
+ #if [[ "$errors_from_output" -ge 1 ]]; then
+ # files_with_errors+="$file, $errors_from_output errors. "
+ #fi
+
+ echo "Done verification on $file"
+ done
+
+ echo "files_with_errors=$files_with_errors" >> $GITHUB_ENV
+
+ - name: fail if there are errors from verification
+ run: |
+ if [ -z "$files_with_errors" ]; then
+ echo "No errors detected."
+ else
+ echo "Errors detected: $files_with_errors"
+ exit 1
+ fi
+
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..a4448cf
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,68 @@
+merge/EBeam.oas
+merge/EBeam.txt
+
+# Byte-compiled / optimized / DLL files
+*/*.pyc
+
+__pycache__/
+*.py[cod]
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# *.ldf
+
+*.DS_Store
+.DS_Store
+course.tar.gz
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..352220e
--- /dev/null
+++ b/README.md
@@ -0,0 +1,80 @@
+
+# openEBL Design Submissions
+
+- The Canadian Silicon Photonics Foundry, SiEPICfab, presents the open electron beam lithography (EBL) fabrication process, where former and current students of SiEPIC workshops and courses can submit their design for manufacturing and testing.
+- More details about openEBL.
+- The previous submission was in [February 2024](https://github.com/SiEPIC/openEBL-2024-02-Si-Heaters).
+
+# Fabrication process: Passive Silicon + Heaters
+## Technical summary:
+- SOI wafer, 220 nm silicon
+- Baseline process:
+ - Single full etch, using a negative resist (HSQ)
+ - Oxide cladding
+ - TiW metal heater, and Au metal bond pads
+- Details: [Slides](https://docs.google.com/presentation/d/1_ppHYec6LydML4RMRJdNI4DXHb0hgW7znToQCGgSF6M)
+- Process Design Kit: [SiEPIC-EBeam-PDK](https://github.com/siepic/SiEPIC_EBeam_PDK)
+
+## Layer table
+| Name | Layer/datatype | Description |
+|-----------------|----------------|--------------------------------------------------------------------------------------|
+| Si | 1/99 | Layer to draw silicon geometries |
+| M1_heater | 11/0 | Layer to draw metal heater, TiW |
+| M2_router | 12/0 | Layer to draw metal routing, Au |
+| Floorplan | 99/0 | Marks the layout design area |
+| Text | 10/0 | Text labels for automated measurements |
+| DevRec | 68/0 | Device recognition layer for component connectivity, netlist extraction, and verification|
+| PinRec | 1/10 | Port/pins recognition layer for component connectivity, netlist extraction, and verification|
+| Waveguide | 1/99 | Virtual layer, guiding shape for waveguides |
+| SEM | 200/0 | Requests for SEM images. Rectangles in a 4:3 aspect |
+
+
+# Submission instructions:
+
+The submission involves several steps. First, you need to create your design(s) using the process design kit (PDK) for this specific fabrication run. Then you need to create a Fork of this repository, commit your design(s), ensure that it passes the checks, and create a pull request. Once your pull request is approved, your design(s) will be merged into the layout for fabrication. You should verify that your design is correctly merged. Once the designs are fabricated, they will be tested, and the measurement results will be posted in this repository.
+
+## Design software and PDK installation instructions:
+ - Design tools and process design kit (SiEPIC-EBeam-PDK, KLayout implementation)installation instructions.
+ - Automated measurement test routine is submitted as a YAML file that is created using the following utility:
+
+pip install dreamcreator
+from dreamcreator import sequencecreator as sc
+sc.launch()
+
+## Submission via GitHub
+
+ - Create an account on GitHub
+ - Fork a copy of this GitHub repository into your own account: Create a new fork
+ - [Optional] Install GitHub Desktop (or git) on your computer, and Clone a local copy: Open with GitHub Desktop
+ - Create your design, and ensure that the filename contains your edX.org username, and be formatted according to the course/workshop as follows:
+ - EBeam_username.oas: for the edX Phot1x silicon photonics design course
+ - ELEC413_username.oas: for the UBC ELEC 413 course
+ - SiEPIC_Passives_username.oas: for the CMC SiEPIC Passives silicon photonics workshop
+ - For example: EBeam_LukasChrostowski_rings.oas
+ - Create your YAML test routines file, following the same filename requirements as above, but ending with extension .yaml.
+ - Upload your design(s) into the "submissions" folder, as a binary file, namely a .gds (GDSII format) or .oas (OASIS format) file, and the YAML test routine file.
+ - This can be done via the GitHub web page, by navigating to the submissions folder, then clicking on Add file, and Upload files.
+ - Click Commit changes, and wait for the verification to complete
+ - If there are errors, please review and correct the errors
+ - Alternatively upload your Python file, which will be compiled by a GitHub Action.
+ - For KLayout designs, use the "submissions/KLayout Python" folder, namely a .py (Python format) file. e.g., EBeam_LukasChrostowski_MZI.py. The Python file should save a gds or oas file into the parent "submissions" folder. The Python script needs to be executable in non-GUI mode, namely using "import klayout SiEPIC SiEPIC-EBeam-PDK"
+ - Check below for the merged design, and ensure that your design is correctly included
+ - Create a Pull Request -- this will notify the team of your contribution, which we can aggregate into the main design file
+ - Return to the main repository, and check for the merged design
+
+## Black-box cells (IP Replacement)
+- We perform IP replacement on several cells (grating couplers). We call these cells Black Box (BB), and you can identify them by _BB in the cell name, or the presence of the Blackbox layer 998/0 in the cell.
+- You must not change the name of the cell, the contents, nor cell origins. Otherwise, the replacement will not work correctly.
+
+## Automated GitHub Actions
+
+1) Running the files in the "submissions/KLayout Python" folder, to generate the designs
+2) Performing Manufacturing DRC verification on the designs in the "submissions" folder, and outputing the errors as an Artifact
+3) Performing Functional verification on the designs in the "submissions" folder, and outputing the errors as an Artifact
+4) Merging the designs from the "submissions" folder, and outputing merged layout as an Artifact
+
+## Latest Merge Layout File
+
+
+https://github.com/SiEPIC/openEBL-2024-02-Si-Heaters/actions/runs/7898151206/artifacts/1244008478
+
diff --git a/framework/EBL_Framework_1cm_PCM_static.oas b/framework/EBL_Framework_1cm_PCM_static.oas
new file mode 100644
index 0000000..6c770ee
Binary files /dev/null and b/framework/EBL_Framework_1cm_PCM_static.oas differ
diff --git a/merge/EBeam_merge.py b/merge/EBeam_merge.py
new file mode 100644
index 0000000..4e23cf2
--- /dev/null
+++ b/merge/EBeam_merge.py
@@ -0,0 +1,324 @@
+''''
+Automated merge for the edX Silicon Photonics course
+by Lukas Chrostowski, 2014-2024
+
+Run using Python, with import klayout and SiEPIC
+
+Input:
+- folder submissions
+- containing files {EBeam*, ELEC463*, ELEC413*, SiEPIC_Passives*, SiEPIC_Actives*}.{GDS,gds,OAS,oas,py}
+Output
+- in folder "merge"
+- files: EBeam.oas, EBeam.txt, EBeam.coords
+
+'''
+
+
+# configuration
+tech_name = 'EBeam_Si_Heaters'
+top_cell_name = 'EBeam_2024_08_Si_Heaters'
+cell_Width = 440000
+cell_Height = 470000
+cell_Gap_Width = 8000
+cell_Gap_Height = 8000
+chip_Width = 8650000
+chip_Height1 = 8490000
+chip_Height2 = 8780000
+br_cutout_x = 7484000
+br_cutout_y = 898000
+br_cutout2_x = 7855000
+br_cutout2_y = 5063000
+tr_cutout_x = 7037000
+tr_cutout_y = 8494000
+
+filename_out = 'EBeam_Si_Heaters'
+layers_keep = ['1/0', '1/10', '1/11', '11/0', '12/0', '13/0', '68/0', '81/0', '10/0', '99/0', '26/0', '31/0', '32/0', '33/0', '998/0']
+layer_text = '10/0'
+layer_SEM = '200/0'
+layer_SEM_allow = ['edXPhot1x'] # which submission folder is allowed to include SEM images
+layers_move = [[[31,0],[1,0]]] # move shapes from layer 1 to layer 2
+dbu = 0.001
+log_siepictools = False
+framework_file = 'EBL_Framework_1cm_PCM_static.oas'
+ubc_file = 'UBC_static.oas'
+
+
+# record processing time
+import time
+start_time = time.time()
+from datetime import datetime
+now = datetime.now()
+
+# KLayout
+import pya
+from pya import *
+
+# SiEPIC-Tools
+import SiEPIC
+from SiEPIC._globals import Python_Env, KLAYOUT_VERSION, KLAYOUT_VERSION_3
+from SiEPIC.scripts import zoom_out, export_layout
+from SiEPIC.utils import find_automated_measurement_labels
+import os
+
+if Python_Env == 'Script':
+ import importlib.util
+ spam_spec = importlib.util.find_spec("siepic_ebeam_pdk")
+ if spam_spec is not None:
+ # For external Python mode, when installed using pip install siepic_ebeam_pdk
+ import siepic_ebeam_pdk
+ else:
+ # Load the PDK from a folder, e.g, GitHub
+ import os, sys
+ path_GitHub = os.path.expanduser('~/Documents/GitHub/')
+ sys.path.append(os.path.join(path_GitHub, 'SiEPIC_EBeam_PDK/klayout'))
+ import siepic_ebeam_pdk
+
+# Output layout
+layout = pya.Layout()
+layout.dbu = dbu
+top_cell = layout.create_cell(top_cell_name)
+layerText = pya.LayerInfo(int(layer_text.split('/')[0]), int(layer_text.split('/')[1]))
+layerTextN = top_cell.layout().layer(layerText)
+
+def disable_libraries():
+ print('Disabling KLayout libraries')
+ for l in pya.Library().library_ids():
+ print(' - %s' % pya.Library().library_by_id(l).name())
+ pya.Library().library_by_id(l).delete()
+
+disable_libraries()
+
+# path for this python file
+path = os.path.dirname(os.path.realpath(__file__))
+
+# Log file
+global log_file
+log_file = open(os.path.join(path,filename_out+'.txt'), 'w')
+def log(text):
+ global log_file
+ log_file.write(text)
+ log_file.write('\n')
+
+log('SiEPIC-Tools %s, layout merge, running KLayout 0.%s.%s ' % (SiEPIC.__version__, KLAYOUT_VERSION,KLAYOUT_VERSION_3) )
+current_time = now.strftime("%Y-%m-%d, %H:%M:%S local time")
+log("Date: %s" % current_time)
+
+# Load all the GDS/OAS files from the "submissions" folder:
+path2 = os.path.abspath(os.path.join(path,"../submissions"))
+files_in = []
+_, _, files = next(os.walk(path2), (None, None, []))
+for f in sorted(files):
+ files_in.append(os.path.join(path2,f))
+
+# Load all the GDS/OAS files from the "framework" folder:
+path2 = os.path.abspath(os.path.join(path,"../framework"))
+_, _, files = next(os.walk(path2), (None, None, []))
+for f in sorted(files):
+ files_in.append(os.path.join(path2,f))
+
+# Create course cells using the folder name under the top cell
+cell_edXphot1x = layout.create_cell("edX")
+t = Trans(Trans.R0, 0,0)
+top_cell.insert(CellInstArray(cell_edXphot1x.cell_index(), t))
+cell_ELEC413 = layout.create_cell("ELEC413")
+top_cell.insert(CellInstArray(cell_ELEC413.cell_index(), t))
+cell_SiEPIC_Passives = layout.create_cell("SiEPIC_Passives")
+top_cell.insert(CellInstArray(cell_SiEPIC_Passives.cell_index(), t))
+
+# Create a date stamp cell
+cell_date = layout.create_cell('.merged:'+now.strftime("%Y-%m-%d-%H:%M:%S"))
+top_cell.insert(CellInstArray(cell_date.cell_index(), t))
+
+# Origins for the layouts
+x,y = 0,cell_Height+cell_Gap_Height
+
+import subprocess
+import pandas as pd
+for f in [f for f in files_in if '.oas' in f.lower() or '.gds' in f.lower()]:
+ basefilename = os.path.basename(f)
+ # get the time the file was last updated from the Git repository
+ a = subprocess.run(['git', '-C', os.path.dirname(f), 'log', '-1', '--pretty=%ci', basefilename], stdout = subprocess.PIPE)
+ filedate = pd.to_datetime(str(a.stdout.decode("utf-8"))).strftime("%Y%m%d_%H%M")
+ log("\nLoading: %s, dated %s" % (os.path.basename(f), filedate))
+ # rather than getting it from the disk, which is not correct:
+ # filedate = datetime.fromtimestamp(os.path.getmtime(f)).strftime("%Y%m%d_%H%M")
+
+ # Load layout
+ layout2 = pya.Layout()
+ layout2.read(f)
+
+ if 'ebeam' in f.lower():
+ course = 'edXphot1x'
+ elif 'elec413' in f.lower():
+ course = 'ELEC413'
+ elif 'siepic_passives' in f.lower():
+ course = 'SiEPIC_Passives'
+
+ cell_course = eval('cell_' + course)
+ log(" - course name: %s" % (course) )
+
+ # Check the DBU Database Unit, in case someone changed it, e.g., 5 nm, or 0.1 nm.
+ if round(layout2.dbu,10) != dbu:
+ log(' - WARNING: The database unit (%s dbu) in the layout does not match the required dbu of %s.' % (layout2.dbu, dbu))
+ print(' - WARNING: The database unit (%s dbu) in the layout does not match the required dbu of %s.' % (layout2.dbu, dbu))
+ # Step 1: change the DBU to match, but that magnifies the layout
+ wrong_dbu = layout2.dbu
+ layout2.dbu = dbu
+ # Step 2: scale the layout
+ try:
+ # determine the scaling required
+ scaling = round(wrong_dbu / dbu, 10)
+ layout2.transform (pya.ICplxTrans(scaling, 0, False, 0, 0))
+ log(' - WARNING: Database resolution has been corrected and the layout scaled by %s' % scaling)
+ except:
+ print('ERROR IN EBeam_merge.py: Incorrect DBU and scaling unsuccessful')
+
+ # check that there is one top cell in the layout
+ num_top_cells = len(layout2.top_cells())
+ if num_top_cells > 1:
+ log(' - layout should only contain one top cell; contains (%s): %s' % (num_top_cells, [c.name for c in layout2.top_cells()]) )
+ if num_top_cells == 0:
+ log(' - layout does not contain a top cell')
+
+ # Find the top cell
+ for cell in layout2.top_cells():
+ if os.path.basename(f) == framework_file:
+ # Create sub-cell using the filename under top cell
+ subcell2 = layout.create_cell(os.path.basename(f)+"_"+filedate)
+ t = Trans(Trans.R0, 0,0)
+ top_cell.insert(CellInstArray(subcell2.cell_index(), t))
+ # copy
+ subcell2.copy_tree(layout2.cell(cell.name))
+ break
+
+ if os.path.basename(f) == ubc_file:
+ # Create sub-cell using the filename under top cell
+ subcell2 = layout.create_cell(os.path.basename(f)+"_"+filedate)
+ t = Trans(Trans.R0, 8780000,8780000)
+ top_cell.insert(CellInstArray(subcell2.cell_index(), t))
+ # copy
+ subcell2.copy_tree(layout2.cell(cell.name))
+ break
+
+
+ if num_top_cells == 1 or cell.name.lower() == 'top':
+ log(" - top cell: %s" % cell.name)
+
+ # check layout height
+ if cell.bbox().top < cell.bbox().bottom:
+ log(' - WARNING: empty layout. Skipping.')
+ break
+
+ # Create sub-cell using the filename under course cell
+ subcell2 = layout.create_cell(os.path.basename(f)+"_"+filedate)
+ t = Trans(Trans.R0, x,y)
+ cell_course.insert(CellInstArray(subcell2.cell_index(), t))
+
+ # Clear extra layers
+ layers_keep2 = [layer_SEM] if course in layer_SEM_allow else []
+ for li in layout2.layer_infos():
+ if li.to_s() in layers_keep + layers_keep2:
+ log(' - loading layer: %s' % li.to_s())
+ else:
+ log(' - deleting layer: %s' % li.to_s())
+ layer_index = layout2.find_layer(li)
+ layout2.delete_layer(layer_index)
+
+ # Delete non-text geometries in the Text layer
+ layer_index = layout2.find_layer(int(layer_text.split('/')[0]), int(layer_text.split('/')[1]))
+ if type(layer_index) != type(None):
+ s = cell.begin_shapes_rec(layer_index)
+ shapes_to_delete = []
+ while not s.at_end():
+ if s.shape().is_text():
+ text = s.shape().text.string
+ if text.startswith('SiEPIC-Tools'):
+ if log_siepictools:
+ log(' - %s' % s.shape() )
+ s.shape().delete()
+ subcell2.shapes(layerTextN).insert(pya.Text(text, 0, 0))
+ elif text.startswith('opt_in'):
+ log(' - measurement label: %s' % text )
+ else:
+ shapes_to_delete.append( s.shape() )
+ s.next()
+ for s in shapes_to_delete:
+ s.delete()
+
+ # bounding box of the cell
+ bbox = cell.bbox()
+ log(' - bounding box: %s' % bbox.to_s() )
+
+ # Create sub-cell under subcell cell, using user's cell name
+ subcell = layout.create_cell(cell.name)
+ t = Trans(Trans.R0, -bbox.left,-bbox.bottom)
+ subcell2.insert(CellInstArray(subcell.cell_index(), t))
+
+ # clip cells
+ cell2 = layout2.clip(cell.cell_index(), pya.Box(bbox.left,bbox.bottom,bbox.left+cell_Width,bbox.bottom+cell_Height))
+ bbox2 = layout2.cell(cell2).bbox()
+ if bbox != bbox2:
+ log(' - WARNING: Cell was clipped to maximum size of %s X %s' % (cell_Width, cell_Height) )
+ log(' - clipped bounding box: %s' % bbox2.to_s() )
+
+ # copy
+ subcell.copy_tree(layout2.cell(cell2))
+
+ log(' - Placed at position: %s, %s' % (x,y) )
+
+ # Measure the height of the cell that was added, and move up
+ y += max (cell_Height, subcell.bbox().height()) + cell_Gap_Height
+ # move right and bottom when we reach the top of the chip
+ if y + cell_Height > chip_Height1 and x == 0:
+ y = cell_Height + cell_Gap_Height
+ x += cell_Width + cell_Gap_Width
+ if y + cell_Height > chip_Height2:
+ y = cell_Height + cell_Gap_Height
+ x += cell_Width + cell_Gap_Width
+ # check top right cutout for PCM
+ if x + cell_Width > tr_cutout_x and y + cell_Height > tr_cutout_y:
+ # go to the next column
+ y = cell_Height + cell_Gap_Height
+ x += cell_Width + cell_Gap_Width
+ # Check bottom right cutout for PCM
+ if x + cell_Width > br_cutout_x and y < br_cutout_y:
+ y = br_cutout_y
+ # Check bottom right cutout #2 for PCM
+ if x + cell_Width > br_cutout2_x and y < br_cutout2_y:
+ y = br_cutout2_y
+
+'''
+text_out,opt_in = find_automated_measurement_labels(topcell=top_cell, LayerTextN=layerTextN)
+coords_file = open(os.path.join(path,'merge',filename_out+'_coords.txt'), 'w')
+coords_file.write(text_out)
+coords_file.close()
+'''
+
+# move layers
+for i in range(0,len(layers_move)):
+ layer1=layout.find_layer(*layers_move[i][0])
+ layer2=layout.find_layer(*layers_move[i][1])
+ layout.move_layer(layer1, layer2)
+
+
+log('')
+
+#export_layout (top_cell, path, filename='EBeam', relative_path='', format='gds')
+file_out = export_layout (top_cell, path, filename='EBeam', relative_path='', format='oas')
+# log("Layout exported successfully %s: %s" % (save_options.format, file_out) )
+
+log("\nExecution time: %s seconds" % int((time.time() - start_time)))
+
+log_file.close()
+
+try:
+ # Display the layout in KLayout, using KLayout Package "klive", which needs to be installed in the KLayout Application
+ if Python_Env == 'Script':
+ from SiEPIC.utils import klive
+ klive.show(file_out, technology=tech_name)
+except:
+ pass
+
+print("KLayout EBeam_merge.py, completed in: %s seconds" % int((time.time() - start_time)))
+
+
diff --git a/python-to-gds_oas.txt b/python-to-gds_oas.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/python-to-gds_oas.txt
@@ -0,0 +1 @@
+
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..7bfeef2
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,4 @@
+numpy
+scipy
+SiEPIC
+siepic_ebeam_pdk
diff --git a/run_verification.py b/run_verification.py
new file mode 100644
index 0000000..fdab294
--- /dev/null
+++ b/run_verification.py
@@ -0,0 +1,49 @@
+import pya
+from pya import *
+import SiEPIC
+from SiEPIC.verification import layout_check
+from SiEPIC.scripts import zoom_out
+from SiEPIC.utils import get_technology_by_name
+import siepic_ebeam_pdk
+import os
+import sys
+"""
+Script to load .gds file passed in through commmand line and run verification using layout_check().
+Ouput lyrdb file is saved to path specified by 'file_lyrdb' variable in the script.
+
+Jasmina Brar 12/08/23
+
+"""
+
+# gds file to run verification on
+gds_file = sys.argv[1]
+
+# load into layout
+layout = pya.Layout()
+layout.read(gds_file)
+
+# get top cell from layout
+top_cell = layout.top_cell()
+
+
+# set layout technology because the technology seems to be empty, and we cannot load the technology using TECHNOLOGY = get_technology() because this isn't GUI mode
+# refer to line 103 in layout_check()
+# tech = layout.technology()
+# print("Tech:", tech.name)
+layout.TECHNOLOGY = get_technology_by_name('EBeam')
+
+
+# run verification
+zoom_out(top_cell)
+
+# get file path, filename, path for output lyrdb file
+path = os.path.dirname(os.path.realpath(__file__))
+filename = gds_file.split(".")[0]
+file_lyrdb = os.path.join(path,filename+'.lyrdb')
+
+# run verification
+num_errors = layout_check(cell = top_cell, verbose=True, GUI=True, file_rdb=file_lyrdb)
+
+# Print the result value to standard output
+print(num_errors)
+
diff --git a/run_yaml_verification.py b/run_yaml_verification.py
new file mode 100644
index 0000000..f1fc30f
--- /dev/null
+++ b/run_yaml_verification.py
@@ -0,0 +1,20 @@
+import os, sys, yaml
+
+"""
+Load YAML file and check that the format is correct
+
+"""
+
+# gds file to run verification on
+yaml_file = sys.argv[1]
+print(yaml_file)
+
+with open(yaml_file, 'r') as file:
+ yaml_data = yaml.safe_load(file)
+
+print(yaml_data)
+
+print(' - number of devices: %s' % len(yaml_data['Devices']))
+print(' - number of routines: %s' % len(yaml_data['Routines']))
+for r in yaml_data['Routines']:
+ print(' - routine: %s' % r)
diff --git a/submissions/EBeam_heaters_BraggGratingwithHeater.gds b/submissions/EBeam_heaters_BraggGratingwithHeater.gds
new file mode 100644
index 0000000..c002a8e
Binary files /dev/null and b/submissions/EBeam_heaters_BraggGratingwithHeater.gds differ
diff --git a/submissions/EBeam_heaters_BraggGratingwithHeater.yaml b/submissions/EBeam_heaters_BraggGratingwithHeater.yaml
new file mode 100644
index 0000000..56690a9
--- /dev/null
+++ b/submissions/EBeam_heaters_BraggGratingwithHeater.yaml
@@ -0,0 +1,226 @@
+Devices:
+ BraggGratingwithHeater,comment,comment:
+ DeviceID: BraggGratingwithHeater,comment,comment
+ Electrical Coordinates: []
+ Optical Coordinates:
+ - 50.0
+ - 303.0
+ Polarization: TE
+ RoutineCheck: true
+ Routines:
+ - Set Wavelength Voltage Sweep:VoltageSweep_1480,1500,1530,1550
+ - Set Wavelength Current Sweep:CurrentSweep_1480,1500,1550
+ - Set Voltage Wavelength Sweep:WavelengthSweep_0.1,0.2,0.3
+ Type: device
+ Wavelength: '1550'
+Routines:
+ Current Sweep:
+ Default:
+ Channel A: true
+ Channel B: false
+ ELECflag: true
+ IV: true
+ Initialrange: ''
+ Laseroutput: High power
+ Max: '1'
+ Min: '0'
+ Numscans: '1'
+ OPTICflag: false
+ PV: true
+ RV: true
+ RangeDec: ''
+ Res: '1'
+ Start: ''
+ Stepsize: ''
+ Stop: ''
+ Sweeppower: ''
+ Sweepspeed: auto
+ Voltages: ''
+ Wavelengths: ''
+ setvflag: false
+ setwflag: false
+ Set Voltage Wavelength Sweep:
+ Default:
+ Channel A: true
+ Channel B: false
+ ELECflag: false
+ IV: false
+ Initialrange: '-20'
+ Laseroutput: High power
+ Max: ''
+ Min: ''
+ Numscans: '1'
+ OPTICflag: false
+ PV: false
+ RV: false
+ RangeDec: '20'
+ Res: ''
+ Start: '1480'
+ Stepsize: '1'
+ Stop: '1580'
+ Sweeppower: ''
+ Sweepspeed: auto
+ Voltages: 1, 2, 3
+ Wavelengths: ''
+ setvflag: true
+ setwflag: false
+ WavelengthSweep_0.1,0.2,0.3:
+ Channel A: true
+ Channel B: false
+ IV: false
+ Initialrange: '-20'
+ Laseroutput: High power
+ Max: ''
+ Min: ''
+ Numscans: '1'
+ PV: false
+ RV: false
+ RangeDec: '20'
+ Res: ''
+ Start: '1480'
+ Stepsize: '1'
+ Stop: '1580'
+ Sweeppower: '0'
+ Sweepspeed: auto
+ Voltages: 0.1, 0.2, 0.3
+ Wavelengths: ''
+ Set Wavelength Current Sweep:
+ CurrentSweep_1480,1500,1550:
+ Channel A: true
+ Channel B: false
+ IV: true
+ Initialrange: '-20'
+ Laseroutput: High power
+ Max: '5'
+ Min: '0'
+ Numscans: '1'
+ PV: true
+ RV: true
+ RangeDec: '20'
+ Res: '0.01'
+ Start: ''
+ Stepsize: ''
+ Stop: ''
+ Sweeppower: ''
+ Sweepspeed: auto
+ Voltages: ''
+ Wavelengths: 1480, 1500, 1550
+ Default:
+ Channel A: true
+ Channel B: false
+ ELECflag: false
+ IV: false
+ Initialrange: '-20'
+ Laseroutput: High power
+ Max: '5'
+ Min: '0'
+ Numscans: '1'
+ OPTICflag: false
+ PV: false
+ RV: false
+ RangeDec: '20'
+ Res: '0.1'
+ Start: ''
+ Stepsize: ''
+ Stop: ''
+ Sweeppower: ''
+ Sweepspeed: auto
+ Voltages: ''
+ Wavelengths: 1480, 1500, 1550
+ setvflag: false
+ setwflag: true
+ Set Wavelength Voltage Sweep:
+ Default:
+ Channel A: true
+ Channel B: false
+ ELECflag: false
+ IV: true
+ Initialrange: ''
+ Laseroutput: High power
+ Max: '1'
+ Min: '0'
+ Numscans: '1'
+ OPTICflag: false
+ PV: true
+ RV: true
+ RangeDec: ''
+ Res: '1'
+ Start: ''
+ Stepsize: ''
+ Stop: ''
+ Sweeppower: ''
+ Sweepspeed: auto
+ Voltages: ''
+ Wavelengths: ''
+ setvflag: false
+ setwflag: true
+ VoltageSweep_1480,1500,1530,1550:
+ Channel A: true
+ Channel B: false
+ IV: true
+ Initialrange: ''
+ Laseroutput: High power
+ Max: '1'
+ Min: '0'
+ Numscans: '1'
+ PV: true
+ RV: true
+ RangeDec: ''
+ Res: '0.01'
+ Start: ''
+ Stepsize: ''
+ Stop: ''
+ Sweeppower: ''
+ Sweepspeed: auto
+ Voltages: ''
+ Wavelengths: 1480,1500,1530,1550
+ Voltage Sweep:
+ Default:
+ Channel A: true
+ Channel B: false
+ ELECflag: true
+ IV: true
+ Initialrange: ''
+ Laseroutput: High power
+ Max: '1'
+ Min: '0'
+ Numscans: '1'
+ OPTICflag: false
+ PV: true
+ RV: true
+ RangeDec: ''
+ Res: '100'
+ Start: ''
+ Stepsize: ''
+ Stop: ''
+ Sweeppower: ''
+ Sweepspeed: auto
+ Voltages: ''
+ Wavelengths: ''
+ setvflag: false
+ setwflag: false
+ Wavelength Sweep:
+ Default:
+ Channel A: false
+ Channel B: false
+ ELECflag: false
+ IV: false
+ Initialrange: '-20'
+ Laseroutput: High power
+ Max: ''
+ Min: ''
+ Numscans: '1'
+ OPTICflag: true
+ PV: false
+ RV: false
+ RangeDec: '20'
+ Res: ''
+ Start: '1480'
+ Stepsize: '1'
+ Stop: '1580'
+ Sweeppower: '0'
+ Sweepspeed: auto
+ Voltages: ''
+ Wavelengths: ''
+ setvflag: false
+ setwflag: false
diff --git a/submissions/KLayout Python/readme.md b/submissions/KLayout Python/readme.md
new file mode 100644
index 0000000..3412b53
--- /dev/null
+++ b/submissions/KLayout Python/readme.md
@@ -0,0 +1,6 @@
+This folder must contain Python files which output GDS/OAS files in the parent folder.
+
+Limitations of the present GitHub Action scripts:
+- The basename must be the same.
+- Each .py file must generate one (exactly one) output file. One Python file cannot generate multiple outputs. Any helper Python files that don't generate the top cell must be placed elsewhere (e.g., subfolder)
+