Skip to content

Commit 04a2ee5

Browse files
authored
Merge pull request #433 from AllenInstitute/docker
try adding docker to workflow
2 parents b3446f7 + ede92f4 commit 04a2ee5

File tree

7 files changed

+112
-76
lines changed

7 files changed

+112
-76
lines changed

.github/workflows/build.yml

Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@ jobs:
1212
build:
1313
runs-on:
1414
group: LargerInstance
15+
container:
16+
image: rcpeene/openscope_databook:latest
1517

1618
env:
1719
DANDI_API_KEY: ${{ secrets.DANDI_API_KEY }}
@@ -21,27 +23,27 @@ jobs:
2123
- uses: actions/checkout@v3
2224
with:
2325
fetch-depth: 0
24-
ref: main
26+
ref: ${{ github.ref }}
2527

2628
# - name: Set up Python
2729
# uses: actions/setup-python@v4
2830
# with:
2931
# python-version: "3.11"
3032

31-
- name: Upgrading pip
32-
run: pip install --upgrade pip
33+
# - name: Upgrading pip
34+
# run: pip install --upgrade pip
3335

34-
- name: Install deps
35-
run: pip install cython numpy
36+
# - name: Install deps
37+
# run: pip install cython numpy
3638

3739
- name: pip freeze
3840
run: pip freeze
3941

40-
- name: Installing packages again (this prevents a weird error)
41-
run: pip install -r requirements.txt
42+
# - name: Installing packages again (this prevents a weird error)
43+
# run: pip install -r requirements.txt
4244

43-
- name: Installing package
44-
run: pip install -e .
45+
# - name: Installing package
46+
# run: pip install -e .
4547

4648
- name: Installing build dependencies
4749
run: |
@@ -85,7 +87,9 @@ jobs:
8587
rm ./docs/embargoed/*.nwb
8688
8789
- name: Printing log
88-
run: git status
90+
run: |
91+
git config --global --add safe.directory /__w/openscope_databook/openscope_databook
92+
git status
8993
9094
- name: Printing shortlog
9195
run: git log | git shortlog -sn

.github/workflows/test.yml

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,8 @@ jobs:
1111
test:
1212
runs-on:
1313
group: LargerInstance
14+
container:
15+
image: rcpeene/openscope_databook:latest
1416

1517
env:
1618
DANDI_API_KEY: ${{ secrets.DANDI_API_KEY }}
@@ -19,20 +21,20 @@ jobs:
1921
steps:
2022
- uses: actions/checkout@v3
2123

22-
- name: Upgrading pip
23-
run: pip install --upgrade pip
24+
# - name: Upgrading pip
25+
# run: pip install --upgrade pip
2426

2527
- name: print environment
2628
run: pip freeze
2729

28-
- name: Install cython
29-
run: pip install cython numpy
30+
# - name: Install cython
31+
# run: pip install cython numpy
3032

31-
- name: Installing package
32-
run: pip install -e .
33+
# - name: Installing package
34+
# run: pip install -e .
3335

34-
- name: Installing requirements
35-
run: pip install -r ./requirements.txt
36+
# - name: Installing requirements
37+
# run: pip install -r ./requirements.txt
3638

3739
- name: Installing build dependencies
3840
run: |

Dockerfile

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
FROM ubuntu:22.04
2+
# base requirements
3+
RUN apt-get update
4+
RUN apt-get install -y coreutils
5+
RUN apt-get install -y libgl1-mesa-glx
6+
RUN apt-get install -y libglib2.0-0
7+
RUN apt-get install -y python3 python3-pip
8+
RUN apt-get install -y git
9+
10+
RUN git config --global --add safe.directory /__w/openscope_databook/openscope_databook
11+
12+
# copy databook setup files
13+
COPY requirements.txt ./openscope_databook/requirements.txt
14+
COPY setup.py ./openscope_databook/setup.py
15+
COPY README.md ./openscope_databook/README.md
16+
COPY LICENSE.txt ./openscope_databook/LICENSE.txt
17+
COPY databook_utils ./openscope_databook/databook_utils
18+
19+
# for reasons I don't understand, these must be installed before the rest the requirements
20+
RUN pip install numpy cython
21+
# set up databook dependencies
22+
RUN pip install -e ./openscope_databook[dev]

docs/embargoed/cell_matching.ipynb

Lines changed: 35 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -50,12 +50,10 @@
5050
"import json\n",
5151
"import os\n",
5252
"\n",
53-
"import matplotlib as mpl\n",
5453
"import matplotlib.pyplot as plt\n",
5554
"import numpy as np\n",
5655
"\n",
57-
"from PIL import Image\n",
58-
"from time import sleep"
56+
"from PIL import Image"
5957
]
6058
},
6159
{
@@ -93,6 +91,13 @@
9391
"id": "77d78e7d",
9492
"metadata": {},
9593
"outputs": [
94+
{
95+
"name": "stderr",
96+
"output_type": "stream",
97+
"text": [
98+
"A newer version (0.63.1) of dandi/dandi-cli is available. You are using 0.61.2\n"
99+
]
100+
},
96101
{
97102
"name": "stdout",
98103
"output_type": "stream",
@@ -255,66 +260,42 @@
255260
"name": "stderr",
256261
"output_type": "stream",
257262
"text": [
258-
"c:\\Users\\carter.peene\\Desktop\\Projects\\openscope_databook\\databook_env\\lib\\site-packages\\scipy\\__init__.py:169: UserWarning: A NumPy version >=1.18.5 and <1.26.0 is required for this version of SciPy (detected version 1.26.4\n",
259-
" warnings.warn(f\"A NumPy version >={np_minversion} and <{np_maxversion}\"\n",
260263
"WARNING:root:many=True not supported from argparse\n",
261264
"INFO:NwayMatching:NWAY_COMMIT_SHA None\n",
262265
"INFO:NwayMatching:Nway matching version 0.6.0\n",
263-
"c:\\Users\\carter.peene\\Desktop\\Projects\\openscope_databook\\databook_env\\lib\\site-packages\\scipy\\__init__.py:169: UserWarning: A NumPy version >=1.18.5 and <1.26.0 is required for this version of SciPy (detected version 1.26.4\n",
264-
" warnings.warn(f\"A NumPy version >={np_minversion} and <{np_maxversion}\"\n",
265-
"c:\\Users\\carter.peene\\Desktop\\Projects\\openscope_databook\\databook_env\\lib\\site-packages\\scipy\\__init__.py:169: UserWarning: A NumPy version >=1.18.5 and <1.26.0 is required for this version of SciPy (detected version 1.26.4\n",
266-
" warnings.warn(f\"A NumPy version >={np_minversion} and <{np_maxversion}\"\n",
267-
"c:\\Users\\carter.peene\\Desktop\\Projects\\openscope_databook\\databook_env\\lib\\site-packages\\scipy\\__init__.py:169: UserWarning: A NumPy version >=1.18.5 and <1.26.0 is required for this version of SciPy (detected version 1.26.4\n",
268-
" warnings.warn(f\"A NumPy version >={np_minversion} and <{np_maxversion}\"\n",
269266
"WARNING:root:many=True not supported from argparse\n",
270267
"WARNING:root:many=True not supported from argparse\n",
271268
"INFO:PairwiseMatching:Matching 1193675753 to 1194754135\n",
272269
"INFO:PairwiseMatching:Matching 1193675753 to 1194754135: best registration was ['Crop', 'CLAHE', 'PhaseCorrelate']\n",
273-
"multiprocessing.pool.RemoteTraceback: \n",
274-
"\"\"\"\n",
275-
"Traceback (most recent call last):\n",
276-
" File \"C:\\Users\\carter.peene\\AppData\\Local\\Programs\\Python\\Python310\\lib\\multiprocessing\\pool.py\", line 125, in worker\n",
277-
" result = (True, func(*args, **kwds))\n",
278-
" File \"C:\\Users\\carter.peene\\AppData\\Local\\Programs\\Python\\Python310\\lib\\multiprocessing\\pool.py\", line 48, in mapstar\n",
279-
" return list(map(*args))\n",
280-
" File \"c:\\Users\\carter.peene\\Desktop\\Projects\\openscope_databook\\databook_env\\lib\\site-packages\\nway\\nway_matching.py\", line 121, in pair_match_job\n",
281-
" pair_match.run()\n",
282-
" File \"c:\\Users\\carter.peene\\Desktop\\Projects\\openscope_databook\\databook_env\\lib\\site-packages\\nway\\pairwise_matching.py\", line 495, in run\n",
283-
" segmask_moving_3d_registered = transform_mask(\n",
284-
" File \"c:\\Users\\carter.peene\\Desktop\\Projects\\openscope_databook\\databook_env\\lib\\site-packages\\nway\\pairwise_matching.py\", line 384, in transform_mask\n",
285-
" dtype=np.int)\n",
286-
" File \"c:\\Users\\carter.peene\\Desktop\\Projects\\openscope_databook\\databook_env\\lib\\site-packages\\numpy\\__init__.py\", line 338, in __getattr__\n",
287-
" raise AttributeError(__former_attrs__[attr])\n",
288-
"AttributeError: module 'numpy' has no attribute 'int'.\n",
289-
"`np.int` was a deprecated alias for the builtin `int`. To avoid this error in existing code, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n",
290-
"The aliases was originally deprecated in NumPy 1.20; for more details and guidance see the original release note at:\n",
291-
" https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n",
292-
"\"\"\"\n",
293-
"\n",
294-
"The above exception was the direct cause of the following exception:\n",
295-
"\n",
296-
"Traceback (most recent call last):\n",
297-
" File \"C:\\Users\\carter.peene\\AppData\\Local\\Programs\\Python\\Python310\\lib\\runpy.py\", line 196, in _run_module_as_main\n",
298-
" return _run_code(code, main_globals, None,\n",
299-
" File \"C:\\Users\\carter.peene\\AppData\\Local\\Programs\\Python\\Python310\\lib\\runpy.py\", line 86, in _run_code\n",
300-
" exec(code, run_globals)\n",
301-
" File \"c:\\Users\\carter.peene\\Desktop\\Projects\\openscope_databook\\databook_env\\lib\\site-packages\\nway\\nway_matching.py\", line 502, in <module>\n",
302-
" nmod.run()\n",
303-
" File \"c:\\Users\\carter.peene\\Desktop\\Projects\\openscope_databook\\databook_env\\lib\\site-packages\\nway\\nway_matching.py\", line 462, in run\n",
304-
" self.pair_matches = pool.map(pair_match_job, pair_arg_list)\n",
305-
" File \"C:\\Users\\carter.peene\\AppData\\Local\\Programs\\Python\\Python310\\lib\\multiprocessing\\pool.py\", line 367, in map\n",
306-
" return self._map_async(func, iterable, mapstar, chunksize).get()\n",
307-
" File \"C:\\Users\\carter.peene\\AppData\\Local\\Programs\\Python\\Python310\\lib\\multiprocessing\\pool.py\", line 774, in get\n",
308-
" raise self._value\n",
309-
"AttributeError: module 'numpy' has no attribute 'int'.\n",
310-
"`np.int` was a deprecated alias for the builtin `int`. To avoid this error in existing code, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n",
311-
"The aliases was originally deprecated in NumPy 1.20; for more details and guidance see the original release note at:\n",
312-
" https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n"
270+
"c:\\Users\\carter.peene\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\nway\\utils.py:48: FutureWarning: In a future version of pandas all arguments of DataFrame.sort_index will be keyword-only.\n",
271+
" df = df.sort_index(0)\n",
272+
"c:\\Users\\carter.peene\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\nway\\utils.py:49: FutureWarning: In a future version of pandas all arguments of DataFrame.sort_index will be keyword-only.\n",
273+
" df = df.sort_index(1)\n",
274+
"INFO:NwayMatching:registration success(1) or failure (0):\n",
275+
" 0 1\n",
276+
"0 1 1\n",
277+
"1 1 1\n",
278+
"id map{\n",
279+
" \"0\": 1193675753,\n",
280+
" \"1\": 1194754135\n",
281+
"}\n",
282+
"c:\\Users\\carter.peene\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\nway\\nway_matching.py:208: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n",
283+
" matching_frame = matching_frame.append(pairframe)\n",
284+
"INFO:NwayMatching:Nway matching is done!\n",
285+
"INFO:NwayMatching:Creating match summary plots\n",
286+
"WARNING:root:setting Dict fields not supported from argparse\n",
287+
"c:\\Users\\carter.peene\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\argschema\\utils.py:346: FutureWarning: '--nway_output.nway_matches' is using old-style command-line syntax with each element as a separate argument. This will not be supported in argschema after 2.0. See http://argschema.readthedocs.io/en/master/user/intro.html#command-line-specification for details.\n",
288+
" warnings.warn(warn_msg, FutureWarning)\n",
289+
"WARNING:root:many=True not supported from argparse\n",
290+
"INFO:NwayMatching:wrote matching_output\\nway_match_fraction_plot_2024_11_14_13_37_50.png\n",
291+
"INFO:NwayMatching:wrote matching_output\\nway_warp_overlay_plot_2024_11_14_13_37_50.png\n",
292+
"INFO:NwayMatching:wrote matching_output\\nway_warp_summary_plot_2024_11_14_13_37_50.png\n",
293+
"INFO:NwayMatching:wrote ./output.json\n"
313294
]
314295
}
315296
],
316297
"source": [
317-
"!python -m nway.nway_matching --input_json input.json --output_json \"./output.json\" --output_dir matching_output"
298+
"!python3 -m nway.nway_matching --input_json input.json --output_json \"./output.json\" --output_dir matching_output"
318299
]
319300
},
320301
{
@@ -385,7 +366,7 @@
385366
{
386367
"data": {
387368
"text/plain": [
388-
"<matplotlib.image.AxesImage at 0x1c3b53e35b0>"
369+
"<matplotlib.image.AxesImage at 0x21dff47bfa0>"
389370
]
390371
},
391372
"execution_count": 13,
@@ -421,7 +402,7 @@
421402
{
422403
"data": {
423404
"text/plain": [
424-
"<matplotlib.image.AxesImage at 0x1c3b7dbdf00>"
405+
"<matplotlib.image.AxesImage at 0x21dff4fe680>"
425406
]
426407
},
427408
"execution_count": 14,

docs/intro.md

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,9 @@ You can download an individual notebook by pressing the `Download` button in the
9797
```
9898
pip install -e .
9999
```
100-
It is recommended that this is done within a conda environment using Python 3.10 to minimize any interference with local machine environments. For information on installing and using conda, go [here](https://conda.io/projects/conda/en/latest/user-guide/getting-started.html). *Before* running the pip installation above, you can create a conda environment in the conda prompt with the command
100+
101+
#### Locally (Conda)
102+
It is recommended that this is done within a conda environment using Python 3.10 or Docker to minimize any interference with local machine environments. For information on installing and using conda, go [here](https://conda.io/projects/conda/en/latest/user-guide/getting-started.html). *Before* running the pip installation above, you can create a conda environment in the conda prompt with the command
101103
```
102104
conda create -n databook_env python=3.10
103105
```
@@ -106,6 +108,22 @@ and you can run that environment with
106108
conda activate databook_env
107109
```
108110

111+
112+
#### Locally (Docker)
113+
The Databook also includes a dockerfile. If you want to build a docker container for the Databook yourself (for some reason), you can do so by running the following command in the Databook main directory after you have docker installed and running
114+
```
115+
docker build -t openscope_databook
116+
```
117+
You can then activate the docker by running the following command. Note that, to access the databook in your host machine's web browser, the port 8888 should be mapped to the docker container's port.
118+
```
119+
docker run -p 8888:8888 openscope_databook
120+
```
121+
Instead of building the container yourself, you can use the main docker container that we maintain, registered publically on Docker hub with the following command
122+
```
123+
docker run -p 8888:8888 rcpeene/openscope_databook:latest
124+
```
125+
126+
#### Locally (Running Notebook)
109127
Once you environment is setup, you can execute the notebooks in Jupyter by running the following command within the repo directory;
110128
```
111129
Jupyter notebook

requirements.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
autograd==1.3
12
ccfwidget==0.5.3
23
cebra
34
cython
@@ -21,7 +22,7 @@ quantities==0.14.1
2122
remfile==0.1.10
2223
scikit-image==0.19.3
2324
scipy==1.9.3
24-
ssm
25+
ssm @ git+https://github.com/lindermanlab/ssm
2526
statsmodels==0.14.0
2627
suite2p==0.12.1
2728
tensortools==0.4

setup.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from setuptools import setup, find_packages
1+
from setuptools import setup
22

33
with open("README.md", encoding="utf-8") as f:
44
readme = f.read()
@@ -20,5 +20,13 @@
2020
url="https://github.com/AllenInstitute/openscope_databook",
2121
license=license,
2222
package_dir={"databook_utils": "databook_utils"},
23-
install_requires=required
23+
install_requires=required,
24+
extras_require={
25+
"dev": [
26+
"markupsafe==2.0.1",
27+
"jupyter-book==1.0.0",
28+
"nbmake==1.5.3",
29+
"pytest-xdist==3.5.0"
30+
]
31+
}
2432
)

0 commit comments

Comments
 (0)