Skip to content

Commit ce98b64

Browse files
committed
Refactor demo notebook structure by converting a code cell to markdown for improved documentation. Removed redundant markdown cell and updated execution metadata for clarity. This enhances the user experience by providing better context and organization within the notebook.
1 parent 0cf6d19 commit ce98b64

File tree

2 files changed

+14
-115
lines changed

2 files changed

+14
-115
lines changed

examples/[email protected]/demo.ipynb

Lines changed: 12 additions & 97 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,19 @@
11
{
22
"cells": [
33
{
4-
"cell_type": "code",
4+
"cell_type": "markdown",
55
"isExecuted": true,
6+
"metadata": {},
7+
"source": [
8+
"# Demo Notebook\n",
9+
"\n",
10+
"- This is just a demo notebook\n",
11+
"- For testing only"
12+
]
13+
},
14+
{
15+
"cell_type": "code",
16+
"isExecuted": false,
617
"lastExecutionResult": "success",
718
"lastExecutionTime": "2024-12-10 03:27:50",
819
"metadata": {},
@@ -64,102 +75,6 @@
6475
"\n",
6576
"spark\n"
6677
]
67-
},
68-
{
69-
"cell_type": "markdown",
70-
"isExecuted": true,
71-
"metadata": {},
72-
"source": [
73-
"# Demo Notebook\n",
74-
"\n",
75-
"- This is just a demo notebook\n",
76-
"- For testing only"
77-
]
78-
},
79-
{
80-
"cell_type": "code",
81-
"execution_count": null,
82-
"isExecuted": false,
83-
"lastExecutionResult": "success",
84-
"lastExecutionTime": "2024-08-04 15:29:17",
85-
"metadata": {},
86-
"outputs": [
87-
{
88-
"data": {
89-
"text/html": [
90-
"\n",
91-
" <div style=\"border: 1px solid #e8e8e8; padding: 10px;\">\n",
92-
" <h3>Spark Session Information</h3>\n",
93-
" <p><strong>Application ID:</strong> app-20240804152430-0000</p>\n",
94-
" <p><strong>Spark UI:</strong> <a href=\"http://localhost:18080/history/app-20240804152430-0000\">http://localhost:18080/history/app-20240804152430-0000</a></p>\n",
95-
" </div>\n",
96-
" "
97-
],
98-
"text/plain": [
99-
"Custom Spark Session (App ID: app-20240804152430-0000) - UI: http://66eef2d0ade3:4040"
100-
]
101-
},
102-
"execution_count": 4,
103-
"metadata": {},
104-
"output_type": "execute_result"
105-
}
106-
],
107-
"source": [
108-
"# SparkSession is already defined in `spark` variable\n",
109-
"spark"
110-
]
111-
},
112-
{
113-
"cell_type": "code",
114-
"execution_count": 1,
115-
"isExecuted": false,
116-
"metadata": {},
117-
"outputs": [
118-
{
119-
"ename": "NameError",
120-
"evalue": "name 'a' is not defined",
121-
"output_type": "error",
122-
"traceback": [
123-
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
124-
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
125-
"Cell \u001b[0;32mIn[1], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[43ma\u001b[49m \u001b[38;5;241m+\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m2233666777888\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n",
126-
"\u001b[0;31mNameError\u001b[0m: name 'a' is not defined"
127-
]
128-
}
129-
],
130-
"source": [
131-
"print(a + \"2233666777888\")"
132-
]
133-
},
134-
{
135-
"cell_type": "code",
136-
"execution_count": null,
137-
"isExecuted": false,
138-
"metadata": {},
139-
"outputs": [
140-
{
141-
"name": "stdout",
142-
"output_type": "stream",
143-
"text": [
144-
"66\n",
145-
"77\n"
146-
]
147-
}
148-
],
149-
"source": [
150-
"print(66)\n",
151-
"print(77)"
152-
]
153-
},
154-
{
155-
"cell_type": "code",
156-
"execution_count": null,
157-
"isExecuted": false,
158-
"metadata": {},
159-
"outputs": [],
160-
"source": [
161-
"spark.stop()"
162-
]
16378
}
16479
],
16580
"metadata": {

webapp/src/models/SparkModel.js

Lines changed: 2 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -62,25 +62,9 @@ class SparkModel {
6262

6363
// Create a cell with Spark initialization code that uses the config
6464
const sparkInitCode = `
65-
from pyspark.sql import SparkSession
66-
67-
spark = SparkSession.builder\\
68-
.appName("${sparkAppId}")\\
69-
.master("spark://spark-master:7077")\\
70-
.config("spark.jars.packages", "io.delta:delta-spark_2.12:3.0.0")\\
71-
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")\\
72-
.config("spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog")\\
73-
.config("spark.eventLog.enabled", "true")\\
74-
.config("spark.eventLog.dir", "/opt/data/spark-events")\\
75-
.config("spark.history.fs.logDirectory", "/opt/data/spark-events")\\
76-
.config("spark.sql.warehouse.dir", "/opt/data/spark-warehouse")\\
77-
.config("spark.executor.memory", "${sparkConfig['spark.executor.memory']}")\\
78-
.config("spark.executor.cores", ${sparkConfig['spark.executor.cores']})\\
79-
.config("spark.executor.instances", ${sparkConfig['spark.executor.instances']})\\
80-
.config("spark.driver.memory", "${sparkConfig['spark.driver.memory']}")\\
81-
.config("spark.driver.cores", ${sparkConfig['spark.driver.cores']})\\
82-
.getOrCreate()
65+
from startup import create_spark_dev
8366
67+
spark = create_spark_dev()
8468
spark
8569
`;
8670

0 commit comments

Comments
 (0)