Skip to content

Commit

Permalink
Refactor demo notebook structure by converting a code cell to markdow…
Browse files Browse the repository at this point in the history
…n for improved documentation. Removed redundant markdown cell and updated execution metadata for clarity. This enhances the user experience by providing better context and organization within the notebook.
  • Loading branch information
xuwenyihust committed Dec 10, 2024
1 parent 0cf6d19 commit ce98b64
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 115 deletions.
109 changes: 12 additions & 97 deletions examples/[email protected]/demo.ipynb
Original file line number Diff line number Diff line change
@@ -1,8 +1,19 @@
{
"cells": [
{
"cell_type": "code",
"cell_type": "markdown",
"isExecuted": true,
"metadata": {},
"source": [
"# Demo Notebook\n",
"\n",
"- This is just a demo notebook\n",
"- For testing only"
]
},
{
"cell_type": "code",
"isExecuted": false,
"lastExecutionResult": "success",
"lastExecutionTime": "2024-12-10 03:27:50",
"metadata": {},
Expand Down Expand Up @@ -64,102 +75,6 @@
"\n",
"spark\n"
]
},
{
"cell_type": "markdown",
"isExecuted": true,
"metadata": {},
"source": [
"# Demo Notebook\n",
"\n",
"- This is just a demo notebook\n",
"- For testing only"
]
},
{
"cell_type": "code",
"execution_count": null,
"isExecuted": false,
"lastExecutionResult": "success",
"lastExecutionTime": "2024-08-04 15:29:17",
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"\n",
" <div style=\"border: 1px solid #e8e8e8; padding: 10px;\">\n",
" <h3>Spark Session Information</h3>\n",
" <p><strong>Application ID:</strong> app-20240804152430-0000</p>\n",
" <p><strong>Spark UI:</strong> <a href=\"http://localhost:18080/history/app-20240804152430-0000\">http://localhost:18080/history/app-20240804152430-0000</a></p>\n",
" </div>\n",
" "
],
"text/plain": [
"Custom Spark Session (App ID: app-20240804152430-0000) - UI: http://66eef2d0ade3:4040"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# SparkSession is already defined in `spark` variable\n",
"spark"
]
},
{
"cell_type": "code",
"execution_count": 1,
"isExecuted": false,
"metadata": {},
"outputs": [
{
"ename": "NameError",
"evalue": "name 'a' is not defined",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[0;32mIn[1], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[43ma\u001b[49m \u001b[38;5;241m+\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m2233666777888\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n",
"\u001b[0;31mNameError\u001b[0m: name 'a' is not defined"
]
}
],
"source": [
"print(a + \"2233666777888\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"isExecuted": false,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"66\n",
"77\n"
]
}
],
"source": [
"print(66)\n",
"print(77)"
]
},
{
"cell_type": "code",
"execution_count": null,
"isExecuted": false,
"metadata": {},
"outputs": [],
"source": [
"spark.stop()"
]
}
],
"metadata": {
Expand Down
20 changes: 2 additions & 18 deletions webapp/src/models/SparkModel.js
Original file line number Diff line number Diff line change
Expand Up @@ -62,25 +62,9 @@ class SparkModel {

// Create a cell with Spark initialization code that uses the config
const sparkInitCode = `
from pyspark.sql import SparkSession
spark = SparkSession.builder\\
.appName("${sparkAppId}")\\
.master("spark://spark-master:7077")\\
.config("spark.jars.packages", "io.delta:delta-spark_2.12:3.0.0")\\
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")\\
.config("spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog")\\
.config("spark.eventLog.enabled", "true")\\
.config("spark.eventLog.dir", "/opt/data/spark-events")\\
.config("spark.history.fs.logDirectory", "/opt/data/spark-events")\\
.config("spark.sql.warehouse.dir", "/opt/data/spark-warehouse")\\
.config("spark.executor.memory", "${sparkConfig['spark.executor.memory']}")\\
.config("spark.executor.cores", ${sparkConfig['spark.executor.cores']})\\
.config("spark.executor.instances", ${sparkConfig['spark.executor.instances']})\\
.config("spark.driver.memory", "${sparkConfig['spark.driver.memory']}")\\
.config("spark.driver.cores", ${sparkConfig['spark.driver.cores']})\\
.getOrCreate()
from startup import create_spark_dev
spark = create_spark_dev()
spark
`;

Expand Down

0 comments on commit ce98b64

Please sign in to comment.