diff --git a/examples/user_0@gmail.com/demo.ipynb b/examples/user_0@gmail.com/demo.ipynb index 9fb949e..286c381 100644 --- a/examples/user_0@gmail.com/demo.ipynb +++ b/examples/user_0@gmail.com/demo.ipynb @@ -1,8 +1,19 @@ { "cells": [ { - "cell_type": "code", + "cell_type": "markdown", "isExecuted": true, + "metadata": {}, + "source": [ + "# Demo Notebook\n", + "\n", + "- This is just a demo notebook\n", + "- For testing only" + ] + }, + { + "cell_type": "code", + "isExecuted": false, "lastExecutionResult": "success", "lastExecutionTime": "2024-12-10 03:27:50", "metadata": {}, @@ -64,102 +75,6 @@ "\n", "spark\n" ] - }, - { - "cell_type": "markdown", - "isExecuted": true, - "metadata": {}, - "source": [ - "# Demo Notebook\n", - "\n", - "- This is just a demo notebook\n", - "- For testing only" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "isExecuted": false, - "lastExecutionResult": "success", - "lastExecutionTime": "2024-08-04 15:29:17", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "
\n", - "

Spark Session Information

\n", - "

Application ID: app-20240804152430-0000

\n", - "

Spark UI: http://localhost:18080/history/app-20240804152430-0000

\n", - "
\n", - " " - ], - "text/plain": [ - "Custom Spark Session (App ID: app-20240804152430-0000) - UI: http://66eef2d0ade3:4040" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# SparkSession is already defined in `spark` variable\n", - "spark" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "isExecuted": false, - "metadata": {}, - "outputs": [ - { - "ename": "NameError", - "evalue": "name 'a' is not defined", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[1], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[43ma\u001b[49m \u001b[38;5;241m+\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m2233666777888\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", - "\u001b[0;31mNameError\u001b[0m: name 'a' is not defined" - ] - } - ], - "source": [ - "print(a + \"2233666777888\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "isExecuted": false, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "66\n", - "77\n" - ] - } - ], - "source": [ - "print(66)\n", - "print(77)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "isExecuted": false, - "metadata": {}, - "outputs": [], - "source": [ - "spark.stop()" - ] } ], "metadata": { diff --git a/webapp/src/models/SparkModel.js b/webapp/src/models/SparkModel.js index bbce577..e34c85c 100644 --- a/webapp/src/models/SparkModel.js +++ b/webapp/src/models/SparkModel.js @@ -62,25 +62,9 @@ class SparkModel { // Create a cell with Spark initialization code that uses the config const sparkInitCode = ` -from pyspark.sql import SparkSession - -spark = SparkSession.builder\\ - .appName("${sparkAppId}")\\ - .master("spark://spark-master:7077")\\ - .config("spark.jars.packages", "io.delta:delta-spark_2.12:3.0.0")\\ - .config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")\\ - .config("spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog")\\ - .config("spark.eventLog.enabled", "true")\\ - .config("spark.eventLog.dir", "/opt/data/spark-events")\\ - .config("spark.history.fs.logDirectory", "/opt/data/spark-events")\\ - .config("spark.sql.warehouse.dir", "/opt/data/spark-warehouse")\\ - .config("spark.executor.memory", "${sparkConfig['spark.executor.memory']}")\\ - .config("spark.executor.cores", ${sparkConfig['spark.executor.cores']})\\ - .config("spark.executor.instances", ${sparkConfig['spark.executor.instances']})\\ - .config("spark.driver.memory", "${sparkConfig['spark.driver.memory']}")\\ - .config("spark.driver.cores", ${sparkConfig['spark.driver.cores']})\\ - .getOrCreate() +from startup import create_spark_dev +spark = create_spark_dev() spark `;