Skip to content

Commit

Permalink
Merge pull request #13 from taxe10/main
Browse files Browse the repository at this point in the history
Update read and writing directories and bug fixes
  • Loading branch information
runboj authored Apr 6, 2024
2 parents 6bcc72e + 9455333 commit fb254a6
Show file tree
Hide file tree
Showing 9 changed files with 208 additions and 164 deletions.
22 changes: 14 additions & 8 deletions .env.example
Original file line number Diff line number Diff line change
@@ -1,19 +1,25 @@
DEFAULT_ALGORITHM_DESCRIPTION=/path/to/PCA_v1.0.0.json
# Directories
READ_DIR=/path/to/read/data
WRITE_DIR=/path/to/write/results

# Prefect
PREFECT_API_URL=http://prefect:4200/api
FLOW_NAME="Parent flow/launch_parent_flow"
TIMEZONE="US/Pacific"
PREFECT_TAGS='["latent-space-explorer"]'
FLOW_TYPE="conda"

# MLEx Content Registry API
CONTENT_API_URL="http://content-api:8000/api/v0/models"

TILED_API_KEY=<api key>

READ_DIR=/path/to/read/data
WRITE_DIR=/path/to/write/results
# Tiled key
API_KEY=<api key>

# Slurm jobs
PARTITIONS='["p1", "p2"]'
RESERVATIONS='["r1", "r2"]'
MAX_TIME="1:00:00"
PARTITIONS_CPU='["p_cpu1", "p_cpu2"]'
RESERVATIONS_CPU='["r_cpu1", "r_cpu2"]'
MAX_TIME_CPU="1:00:00"

PARTITIONS_GPU='["p_gpu1", "p_gpu2"]'
RESERVATIONS_GPU='["r_gpu1", "r_gpu2"]'
MAX_TIME_GPU="1:00:00"
Binary file removed data/example_latentrepresentation/f_vectors.parquet
Binary file not shown.
Binary file removed data/example_shapes/pacmacX.npy
Binary file not shown.
24 changes: 13 additions & 11 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,24 +8,26 @@ services:
build:
context: "."
dockerfile: "docker/Dockerfile"
mem_limit: 2g
environment:
READ_DIR: "${READ_DIR}"
WRITE_DIR: "${WRITE_DIR}"
PREFECT_TAGS: "${PREFECT_TAGS}"
READ_DIR_MOUNT: "${READ_DIR}" # Used to mount the read directory in podman jobs
WRITE_DIR_MOUNT: "${WRITE_DIR}" # Used to mount the write directory in podman jobs
READ_DIR: "/app/work/data"
WRITE_DIR: "/app/work/mlex_store"
PREFECT_API_URL: '${PREFECT_API_URL}'
CONTENT_API_URL: '${CONTENT_API_URL}'
TILED_API_KEY: '${TILED_API_KEY}'
FLOW_NAME: '${FLOW_NAME}'
TIMEZONE: "${TIMEZONE}"
PARTITIONS: "${PARTITIONS}"
RESERVATIONS: "${RESERVATIONS}"
MAX_TIME: "${MAX_TIME}"
PREFECT_TAGS: "${PREFECT_TAGS}"
CONTENT_API_URL: '${CONTENT_API_URL}'
API_KEY: '${API_KEY}'
PARTITIONS_CPU: "${PARTITIONS_CPU}"
MAX_TIME_CPU: "${MAX_TIME_CPU}"
RESERVATIONS_CPU: "${RESERVATIONS_CPU}"
PARTITIONS_GPU: "${PARTITIONS_GPU}"
RESERVATIONS_GPU: "${RESERVATIONS_GPU}"
MAX_TIME_GPU: "${MAX_TIME_GPU}"
volumes:
- $READ_DIR:/app/work/data
- $WRITE_DIR:/app/work/mlex_store
# - ./src:/app/work/src
- ../mlex_file_manager/file_manager:/app/work/src/file_manager
ports:
- "127.0.0.1:8070:8070"
networks:
Expand Down
85 changes: 41 additions & 44 deletions src/app_layout.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import os

import dash_bootstrap_components as dbc
import dash_uploader as du
import diskcache
import plotly.graph_objects as go
from dash import Dash, dcc, html
Expand All @@ -14,30 +13,32 @@

load_dotenv(".env", override=True)

# GLOBAL VARIABLES
ALGORITHM_DATABASE = {
"PCA": "PCA",
"UMAP": "UMAP",
}

CLUSTER_ALGORITHM_DATABASE = {
"KMeans": "KMeans",
"DBSCAN": "DBSCAN",
"HDBSCAN": "HDBSCAN",
}

DATA_OPTION = [
{
"label": "Synthetic Shapes",
"value": f"{os.getcwd()}/data/example_shapes/Demoshapes.npz",
},
{
"label": "Latent representations from encoder-decoder model",
"value": f"{os.getcwd()}/data/example_latentrepresentation/f_vectors.parquet",
},
]
READ_DIR = "data"
UPLOAD_FOLDER_ROOT = "data/upload"
TILED_API_KEY = os.getenv("TILED_API_KEY", None)
READ_DIR = os.getenv("READ_DIR")
WRITE_DIR = os.getenv("WRITE_DIR")
API_KEY = os.getenv("API_KEY", None)
if API_KEY == "":
API_KEY = None

if os.path.exists(f"{os.getcwd()}/src/example_dataset"):
EXAMPLE_DATASETS = [
{
"label": "Synthetic Shapes",
"value": f"{os.getcwd()}/src/example_dataset/Demoshapes.npz",
}
]
else:
EXAMPLE_DATASETS = []

# SETUP DASH APP
cache = diskcache.Cache("./cache")
Expand All @@ -52,11 +53,8 @@

server = app.server

dash_file_explorer = FileManager(
READ_DIR, UPLOAD_FOLDER_ROOT, open_explorer=False, api_key=TILED_API_KEY
)
dash_file_explorer = FileManager(READ_DIR, open_explorer=False, api_key=API_KEY)
dash_file_explorer.init_callbacks(app)
du.configure_upload(app, UPLOAD_FOLDER_ROOT, use_upload_id=False)

# BEGIN DASH CODE
header = templates.header()
Expand Down Expand Up @@ -164,19 +162,13 @@
children=[
dbc.CardHeader(
[
dbc.Label("Upload your own zipped dataset", className="mr-2"),
dbc.Label("Select a Dataset", className="mr-2"),
dash_file_explorer.file_explorer,
dbc.Label("Or select Data Clinic modal", className="mr-2"),
dcc.Dropdown(
id="feature-vector-model-list",
clearable=False,
style={"margin-bottom": "1rem"},
),
dbc.Label("Or try Example Dataset", className="mr-2"),
dcc.Dropdown(
id="example-dataset-selection",
options=DATA_OPTION,
clearable=False,
options=EXAMPLE_DATASETS,
clearable=True,
style={"margin-bottom": "1rem"},
),
]
Expand Down Expand Up @@ -252,12 +244,18 @@
[
dbc.CardBody(
[
dbc.Label("Algorithm", className="mr-2"),
dbc.Label("Optional: Select Pre-trained Autoencoder", className="mr-2"),
dcc.Dropdown(
id="feature-vector-model-list",
clearable=True,
style={"margin-bottom": "1rem"},
),
html.Hr(),
dbc.Label("Dimension Reduction Algorithm", className="mr-2"),
dcc.Dropdown(
id="algo-dropdown",
options=[
{"label": entry, "value": entry}
for entry in ALGORITHM_DATABASE
{"label": entry, "value": entry} for entry in ALGORITHM_DATABASE
],
style={"min-width": "250px"},
value="PCA",
Expand Down Expand Up @@ -296,6 +294,8 @@
},
),
html.Hr(),
dbc.Alert(id="job-alert", is_open=False, dismissable=True),
html.Hr(),
html.Div(
[
dbc.Label("Select a job..."),
Expand Down Expand Up @@ -366,16 +366,9 @@
)

control_panel = dbc.Accordion(
[
algo_panel,
cluster_algo_panel
],
style={
'position': 'sticky',
'top': '10%',
'width': '100%'
}
)
[algo_panel, cluster_algo_panel],
style={"position": "sticky", "top": "10%", "width": "100%"},
)


# metadata
Expand All @@ -386,7 +379,7 @@
# Store for user created contents
dcc.Store(id="image-length", data=0),
dcc.Store(id="user-upload-data-dir", data=None),
dcc.Store(id="dataset-options", data=DATA_OPTION),
dcc.Store(id="dataset-options", data=EXAMPLE_DATASETS),
dcc.Store(id="run-counter", data=0),
dcc.Store(id="experiment-id", data=None),
# data_label_schema, latent vectors, clusters
Expand All @@ -408,8 +401,12 @@
children=[
dbc.Row(
[
dbc.Col(control_panel, width=4, style={'display': 'flex', 'margin-top': '1em'}),
dbc.Col(image_panel, width=8)
dbc.Col(
control_panel,
width=4,
style={"display": "flex", "margin-top": "1em"},
),
dbc.Col(image_panel, width=8),
]
),
dbc.Row(dbc.Col(modal)),
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
Loading

0 comments on commit fb254a6

Please sign in to comment.