From 4bf2fbe3c3a3068c01b7f2cd3e5053c34f0b5f43 Mon Sep 17 00:00:00 2001 From: cccs-eric Date: Wed, 1 Nov 2023 08:00:38 -0400 Subject: [PATCH 1/7] GH deploy build --- README.md | 4 ++++ azure-pipelines.yml | 6 ++++++ 2 files changed, 10 insertions(+) diff --git a/README.md b/README.md index a01550b..100ee82 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,9 @@ # jupyterlab-sql-editor +## User Guide + +A user guide is hosted [here](https://cybercentrecanada.github.io/jupyterlab-sql-editor/). + ## Installation Follow the installation instructions in [CONTRIBUTING](./CONTRIBUTING.md) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 89d6259..74e3155 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -83,3 +83,9 @@ stages: pip install twine python -m twine upload --skip-existing -r "cccs-pypi" --config-file $(PYPIRC_PATH) $(Pipeline.Workspace)/dist/* --verbose displayName: 'Publish artifact to PyPI.' + - job: PublishDocumentationToGithub + dependsOn: BuildPythonArtifact + steps: + - script: | + mkdocs gh-deploy --force + displayName: 'Publish documentation site to GitHub.' From 1862f2a9fb4679417b70bfa10a76167356cc2dec Mon Sep 17 00:00:00 2001 From: cccs-eric Date: Wed, 1 Nov 2023 10:21:12 -0400 Subject: [PATCH 2/7] Add missing mkdocs dependency --- azure-pipelines.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 74e3155..907a7c7 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -87,5 +87,6 @@ stages: dependsOn: BuildPythonArtifact steps: - script: | + pip install mkdocs mkdocs gh-deploy --force displayName: 'Publish documentation site to GitHub.' From 42432affc0df7521b75599f0ae785a00f77bbf79 Mon Sep 17 00:00:00 2001 From: cccs-eric Date: Wed, 1 Nov 2023 15:40:32 -0400 Subject: [PATCH 3/7] Fix build --- azure-pipelines.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 907a7c7..06f63f9 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -86,6 +86,8 @@ stages: - job: PublishDocumentationToGithub dependsOn: BuildPythonArtifact steps: + - checkout: self + persistCredentials: true - script: | pip install mkdocs mkdocs gh-deploy --force From bc6bc19275211169cef7917941601b478d87cccb Mon Sep 17 00:00:00 2001 From: cccs-eric Date: Wed, 1 Nov 2023 18:17:39 -0400 Subject: [PATCH 4/7] Fix build --- docs/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.md b/docs/index.md index d689ef4..f6c1a8c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,6 +1,6 @@ # jupyterlab-sql-editor -A JupyterLab extension providing the following features via `%%sparksql` and `%%trino` magics: +A JupyterLab extension providing the following features via `%sparksql` and `%trino` magics: - SQL formatter - Automatic extraction of database schemas From 48130bf26a8e8508fd8288f9be6e01b2a6b0a346 Mon Sep 17 00:00:00 2001 From: cccs-eric Date: Thu, 2 Nov 2023 16:16:18 -0400 Subject: [PATCH 5/7] Add notebook examples --- {example => docs/example}/JinjaTemplate.ipynb | 0 {example => docs/example}/Spark.ipynb | 0 {example => docs/example}/SparkDataframe.ipynb | 0 .../example}/SparkSQLEscapeControlChars.ipynb | 0 {example => docs/example}/SparkSyntaxDemo.ipynb | 14 +++++++++++--- .../example}/SupersetJinjaTestHarness.ipynb | 0 {example => docs/example}/Trino.ipynb | 0 {example => docs/example}/contacts.json | 0 {example => docs/example}/conversations.json | 0 mkdocs.yml | 11 ++++++++--- setup.py | 2 ++ 11 files changed, 21 insertions(+), 6 deletions(-) rename {example => docs/example}/JinjaTemplate.ipynb (100%) rename {example => docs/example}/Spark.ipynb (100%) rename {example => docs/example}/SparkDataframe.ipynb (100%) rename {example => docs/example}/SparkSQLEscapeControlChars.ipynb (100%) rename {example => docs/example}/SparkSyntaxDemo.ipynb (90%) rename {example => docs/example}/SupersetJinjaTestHarness.ipynb (100%) rename {example => docs/example}/Trino.ipynb (100%) rename {example => docs/example}/contacts.json (100%) rename {example => docs/example}/conversations.json (100%) diff --git a/example/JinjaTemplate.ipynb b/docs/example/JinjaTemplate.ipynb similarity index 100% rename from example/JinjaTemplate.ipynb rename to docs/example/JinjaTemplate.ipynb diff --git a/example/Spark.ipynb b/docs/example/Spark.ipynb similarity index 100% rename from example/Spark.ipynb rename to docs/example/Spark.ipynb diff --git a/example/SparkDataframe.ipynb b/docs/example/SparkDataframe.ipynb similarity index 100% rename from example/SparkDataframe.ipynb rename to docs/example/SparkDataframe.ipynb diff --git a/example/SparkSQLEscapeControlChars.ipynb b/docs/example/SparkSQLEscapeControlChars.ipynb similarity index 100% rename from example/SparkSQLEscapeControlChars.ipynb rename to docs/example/SparkSQLEscapeControlChars.ipynb diff --git a/example/SparkSyntaxDemo.ipynb b/docs/example/SparkSyntaxDemo.ipynb similarity index 90% rename from example/SparkSyntaxDemo.ipynb rename to docs/example/SparkSyntaxDemo.ipynb index f29e704..e46363b 100644 --- a/example/SparkSyntaxDemo.ipynb +++ b/docs/example/SparkSyntaxDemo.ipynb @@ -1,5 +1,13 @@ { "cells": [ + { + "cell_type": "markdown", + "id": "2eba0bf1", + "metadata": {}, + "source": [ + "# Spark Syntax Demo Notebook" + ] + }, { "cell_type": "code", "execution_count": null, @@ -20,7 +28,7 @@ "metadata": {}, "outputs": [], "source": [ - "%sparksql -d df SELECT * from student where x=1" + "%sparksql -d df SELECT * from student where x=1\n" ] }, { @@ -30,7 +38,7 @@ "metadata": {}, "outputs": [], "source": [ - "%sparksql --dataframe df -c --eager -v MY_VIEW --limit 12 -f adir/out.json -t 60 SELECT * from student -- line magic using no argument options like --eager" + "%sparksql --dataframe df -c --eager -v MY_VIEW --limit 12 -f adir/out.json -t 60 SELECT * from student -- line magic using no argument options like --eager\n" ] }, { @@ -76,7 +84,7 @@ "# back to python\n", "print(sql)\n", "\n", - "spark.sql(sql).show()" + "spark.sql(sql).show()\n" ] }, { diff --git a/example/SupersetJinjaTestHarness.ipynb b/docs/example/SupersetJinjaTestHarness.ipynb similarity index 100% rename from example/SupersetJinjaTestHarness.ipynb rename to docs/example/SupersetJinjaTestHarness.ipynb diff --git a/example/Trino.ipynb b/docs/example/Trino.ipynb similarity index 100% rename from example/Trino.ipynb rename to docs/example/Trino.ipynb diff --git a/example/contacts.json b/docs/example/contacts.json similarity index 100% rename from example/contacts.json rename to docs/example/contacts.json diff --git a/example/conversations.json b/docs/example/conversations.json similarity index 100% rename from example/conversations.json rename to docs/example/conversations.json diff --git a/mkdocs.yml b/mkdocs.yml index 1f0afa7..09f1059 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -3,7 +3,12 @@ site_url: https://cybercentrecanada.github.io/jupyterlab-sql-editor site_dir: site nav: - Home: index.md - - Sparksql: sparksql.md - - Trino: trino.md + - sparksql magic: sparksql.md + - trino magic: trino.md + - Notebook: example/SparkSyntaxDemo.ipynb theme: - name: readthedocs + name: material +plugins: + - search + - mkdocs-jupyter: + execute: false diff --git a/setup.py b/setup.py index 1db3109..eaa2c18 100644 --- a/setup.py +++ b/setup.py @@ -56,6 +56,8 @@ "sqlparse", "trino", "mkdocs", + "mkdocs-material", + "mkdocs-jupyter", ], zip_safe=False, include_package_data=True, From ac4c22765c69c5671f5bd35d076fa911405fbcd5 Mon Sep 17 00:00:00 2001 From: cccs-eric Date: Fri, 3 Nov 2023 09:35:17 -0400 Subject: [PATCH 6/7] Add notebooks examples --- ...rk.ipynb => SparkConfigurationUsage.ipynb} | 56 +++++++++---------- docs/example/SparkDataframe.ipynb | 24 +++----- docs/example/SparkSQLEscapeControlChars.ipynb | 20 +++---- docs/example/SparkSyntaxDemo.ipynb | 10 +--- docs/example/SupersetJinjaTestHarness.ipynb | 20 +++++-- ...no.ipynb => TrinoConfigurationUsage.ipynb} | 42 +++++++------- ...emplate.ipynb => TrinoJinjaTemplate.ipynb} | 24 ++++---- docs/sparksql.md | 3 - docs/trino.md | 3 + mkdocs.yml | 15 ++++- 10 files changed, 109 insertions(+), 108 deletions(-) rename docs/example/{Spark.ipynb => SparkConfigurationUsage.ipynb} (98%) rename docs/example/{Trino.ipynb => TrinoConfigurationUsage.ipynb} (97%) rename docs/example/{JinjaTemplate.ipynb => TrinoJinjaTemplate.ipynb} (99%) diff --git a/docs/example/Spark.ipynb b/docs/example/SparkConfigurationUsage.ipynb similarity index 98% rename from docs/example/Spark.ipynb rename to docs/example/SparkConfigurationUsage.ipynb index b7abb0e..c314d14 100644 --- a/docs/example/Spark.ipynb +++ b/docs/example/SparkConfigurationUsage.ipynb @@ -1,5 +1,13 @@ { "cells": [ + { + "cell_type": "markdown", + "id": "c9b06840", + "metadata": {}, + "source": [ + "# Configuration and Usage" + ] + }, { "cell_type": "code", "execution_count": 1, @@ -15,7 +23,7 @@ }, "outputs": [], "source": [ - "from pyspark.sql import SparkSession" + "from pyspark.sql import SparkSession\n" ] }, { @@ -63,7 +71,7 @@ "outputs": [], "source": [ "from IPython.core.interactiveshell import InteractiveShell\n", - "InteractiveShell.ast_node_interactivity = 'all'" + "InteractiveShell.ast_node_interactivity = 'all'\n" ] }, { @@ -82,7 +90,7 @@ }, "outputs": [], "source": [ - "%load_ext jupyterlab_sql_editor.ipython_magic.sparksql" + "%load_ext jupyterlab_sql_editor.ipython_magic.sparksql\n" ] }, { @@ -101,7 +109,7 @@ "outputs": [], "source": [ "%config SparkSql.cacheTTL=3600\n", - "%config SparkSql.outputFile=\"/tmp/sparkdb.schema.json\"" + "%config SparkSql.outputFile=\"/tmp/sparkdb.schema.json\"\n" ] }, { @@ -143,7 +151,7 @@ "source": [ "df = spark.read.json(\"file:/path/to/contacts.json\")\n", "df.createOrReplaceTempView(\"CONTACTS_TABLE\")\n", - "df.printSchema()" + "df.printSchema()\n" ] }, { @@ -179,7 +187,7 @@ "source": [ "df = spark.read.json(\"file:/path/to/conversations.json\")\n", "df.createOrReplaceTempView(\"MESSAGES_TABLE\")\n", - "df.printSchema()" + "df.printSchema()\n" ] }, { @@ -207,7 +215,7 @@ } ], "source": [ - "%sparksql --refresh all" + "%sparksql --refresh all\n" ] }, { @@ -280,7 +288,7 @@ } ], "source": [ - "%sparksql SHOW TABLES" + "%sparksql SHOW TABLES\n" ] }, { @@ -563,7 +571,7 @@ "SELECT\n", " *\n", "FROM\n", - " contacts_table AS con" + " contacts_table AS con\n" ] }, { @@ -601,7 +609,7 @@ "source": [ "%%sparksql --view the_exploded_table --output skip\n", "SELECT\n", - " *, \n", + " *,\n", " explode(con.phoneNumbers) as phoneNumber\n", "FROM\n", " contacts_table AS con\n" @@ -678,7 +686,7 @@ } ], "source": [ - "%sparksql SHOW TABLES" + "%sparksql SHOW TABLES\n" ] }, { @@ -1073,7 +1081,7 @@ "source": [ "%%sparksql --dataframe the_exploded_dataframe --output skip\n", "SELECT\n", - " *, \n", + " *,\n", " explode(con.phoneNumbers) as phoneNumber\n", "FROM\n", " contacts_table AS con\n" @@ -1116,7 +1124,7 @@ } ], "source": [ - "the_exploded_dataframe.select('phoneNumber').show()" + "the_exploded_dataframe.select('phoneNumber').show()\n" ] }, { @@ -1173,7 +1181,7 @@ " contacts_table AS con\n", "--end-sparksql\n", "'''\n", - "print(sql)" + "print(sql)\n" ] }, { @@ -1187,13 +1195,13 @@ "sql = '''\n", "--start-sparksql\n", "SELECT\n", - " *, \n", + " *,\n", " explode(con.phoneNumbers) as phoneNumber\n", "FROM\n", " contacts_table AS con\n", "--end-sparksql\n", "'''\n", - "print(sql)" + "print(sql)\n" ] }, { @@ -1225,7 +1233,7 @@ } ], "source": [ - "spark.sql(sql).show()" + "spark.sql(sql).show()\n" ] }, { @@ -1290,7 +1298,7 @@ } ], "source": [ - "%%sparksql?" + "%%sparksql?\n" ] }, { @@ -1389,7 +1397,7 @@ " TRANSFORM(SEQUENCE(1, 512), x -> rand()) AS data -- array of 512 floats\n", "FROM\n", "RANGE\n", - " (1, 400000, 1, 100) \n", + " (1, 400000, 1, 100)\n", "UNION\n", "SELECT\n", " id,\n", @@ -1401,7 +1409,7 @@ " TRANSFORM(SEQUENCE(1, 512), x -> rand()) AS data -- array of 512 floats\n", "FROM\n", "RANGE\n", - " (1, 40000, 1, 100) \n" + " (1, 40000, 1, 100)\n" ] }, { @@ -1484,14 +1492,6 @@ " contacts_table AS con\n", " INNER JOIN messages_table AS mes ON mes.`first Name` = con.`first Name`\n" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "39fbee24-f69b-474a-903d-bf38d170ee0d", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/docs/example/SparkDataframe.ipynb b/docs/example/SparkDataframe.ipynb index d42214a..fadc161 100644 --- a/docs/example/SparkDataframe.ipynb +++ b/docs/example/SparkDataframe.ipynb @@ -13,7 +13,7 @@ "import ipywidgets as widgets\n", "out = widgets.Output()\n", "with out:\n", - " spark = SparkSession.builder.getOrCreate()" + " spark = SparkSession.builder.getOrCreate()\n" ] }, { @@ -37,7 +37,7 @@ ], "source": [ "df = spark.sql(\"SELECT id, uuid() FROM RANGE (1, 1000)\")\n", - "df" + "df\n" ] }, { @@ -51,7 +51,7 @@ "source": [ "from jupyterlab_sql_editor.ipython.sparkdf import register_display\n", "from jupyterlab_sql_editor.outputters.outputters import _display_results\n", - "register_display()" + "register_display()\n" ] }, { @@ -114,7 +114,7 @@ "source": [ "# change default display behaviour\n", "df = spark.sql(\"SELECT id, uuid() FROM RANGE (1, 1000)\")\n", - "df" + "df\n" ] }, { @@ -126,7 +126,7 @@ }, "outputs": [], "source": [ - "pdf = df.limit(1).toPandas()" + "pdf = df.limit(1).toPandas()\n" ] }, { @@ -156,7 +156,7 @@ ], "source": [ "# _display_results lets you configure the output\n", - "_display_results(pdf, output=\"html\", show_nonprinting=False)" + "_display_results(pdf, output=\"html\", show_nonprinting=False)\n" ] }, { @@ -181,7 +181,7 @@ } ], "source": [ - "_display_results(pdf, output=\"text\")" + "_display_results(pdf, output=\"text\")\n" ] }, { @@ -210,16 +210,8 @@ ], "source": [ "df = spark.read.json(\"file:/path/to/contacts.json\")\n", - "_display_results(pdf, output=\"json\")" + "_display_results(pdf, output=\"json\")\n" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1447b3e6-955b-4269-bc04-6395a9673036", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/docs/example/SparkSQLEscapeControlChars.ipynb b/docs/example/SparkSQLEscapeControlChars.ipynb index cb9c483..60662e9 100644 --- a/docs/example/SparkSQLEscapeControlChars.ipynb +++ b/docs/example/SparkSQLEscapeControlChars.ipynb @@ -1,5 +1,13 @@ { "cells": [ + { + "cell_type": "markdown", + "id": "8ba62d82", + "metadata": {}, + "source": [ + "# Escaping Control Characters" + ] + }, { "cell_type": "code", "execution_count": null, @@ -13,7 +21,7 @@ "\n", "spark = SparkSession.builder.getOrCreate()\n", "\n", - "%load_ext jupyterlab_sql_editor.ipython_magic.sparksql" + "%load_ext jupyterlab_sql_editor.ipython_magic.sparksql\n" ] }, { @@ -53,7 +61,7 @@ " '\\\\\\\\t' AS two_backslash_and_t,\n", " '\\\\\\\\\\t' AS two_backslash_and_tab\n", "--end-sparksql\n", - "''').show()" + "''').show()\n" ] }, { @@ -191,14 +199,6 @@ " '\\\\\\\\t' AS two_backslash_and_t,\n", " '\\\\\\\\\\t' AS two_backslash_and_tab\n" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "383afdca", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/docs/example/SparkSyntaxDemo.ipynb b/docs/example/SparkSyntaxDemo.ipynb index e46363b..086f582 100644 --- a/docs/example/SparkSyntaxDemo.ipynb +++ b/docs/example/SparkSyntaxDemo.ipynb @@ -5,7 +5,7 @@ "id": "2eba0bf1", "metadata": {}, "source": [ - "# Spark Syntax Demo Notebook" + "# Spark Syntax Demo" ] }, { @@ -86,14 +86,6 @@ "\n", "spark.sql(sql).show()\n" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "cb776ce4", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/docs/example/SupersetJinjaTestHarness.ipynb b/docs/example/SupersetJinjaTestHarness.ipynb index 73bb4ad..d7f067c 100644 --- a/docs/example/SupersetJinjaTestHarness.ipynb +++ b/docs/example/SupersetJinjaTestHarness.ipynb @@ -1,5 +1,13 @@ { "cells": [ + { + "cell_type": "markdown", + "id": "0a9b898a", + "metadata": {}, + "source": [ + "# Superset Tests Harness using Jinja" + ] + }, { "cell_type": "code", "execution_count": 2, @@ -76,10 +84,10 @@ ], "source": [ "%%trino --limit 3 --output grid\n", - "SELECT \n", + "SELECT\n", " *\n", "FROM\n", - " tpch.tiny.orders" + " tpch.tiny.orders\n" ] }, { @@ -101,7 +109,7 @@ "def filter_values(column: str, default=None, remove_filter: bool = False):\n", " return VALUE_LIST\n", "\n", - "VALUE_LIST = ['Clerk#00000036', 'Clerk#000000779']" + "VALUE_LIST = ['Clerk#00000036', 'Clerk#000000779']\n" ] }, { @@ -310,7 +318,7 @@ ], "source": [ "%%trino --limit 1 --jinja --output sql\n", - "SELECT \n", + "SELECT\n", " *\n", "FROM\n", " tpch.tiny.orders\n", @@ -367,7 +375,7 @@ " if len(values) > 0:\n", " quoted_values = [quote_value(v) for v in values]\n", " return \",\".join(quoted_values)\n", - " return None" + " return None\n" ] }, { @@ -577,7 +585,7 @@ ], "source": [ "%%trino --limit 1 --jinja --output sql\n", - "SELECT \n", + "SELECT\n", " *\n", "FROM\n", " tpch.tiny.orders\n", diff --git a/docs/example/Trino.ipynb b/docs/example/TrinoConfigurationUsage.ipynb similarity index 97% rename from docs/example/Trino.ipynb rename to docs/example/TrinoConfigurationUsage.ipynb index 66fd4c4..5c1f2d0 100644 --- a/docs/example/Trino.ipynb +++ b/docs/example/TrinoConfigurationUsage.ipynb @@ -1,11 +1,19 @@ { "cells": [ + { + "cell_type": "markdown", + "id": "09e9cd2c", + "metadata": {}, + "source": [ + "# Configuration and Usage" + ] + }, { "cell_type": "markdown", "id": "7d7cef9a-4185-4374-8a0e-5af2139bde7a", "metadata": {}, "source": [ - " Normally IPython only displays the output of the last statement. However it can be handy to run multiple sql magics in a single cell and see the output of each execution. Setting `ast_node_interactivity` to `all` will enable that.\n" + "Normally IPython only displays the output of the last statement. However it can be handy to run multiple sql magics in a single cell and see the output of each execution. Setting `ast_node_interactivity` to `all` will enable that.\n" ] }, { @@ -17,7 +25,7 @@ "source": [ "# Display all cell outputs in notebook\n", "from IPython.core.interactiveshell import InteractiveShell\n", - "InteractiveShell.ast_node_interactivity = 'all'" + "InteractiveShell.ast_node_interactivity = 'all'\n" ] }, { @@ -27,7 +35,7 @@ "metadata": {}, "outputs": [], "source": [ - "%load_ext jupyterlab_sql_editor.ipython_magic.trino" + "%load_ext jupyterlab_sql_editor.ipython_magic.trino\n" ] }, { @@ -59,14 +67,15 @@ "tags": [] }, "source": [ - " In production environment you will want to pass in an authentiction\n", + "In production environment you will want to pass in an authentiction\n", "\n", "```python\n", " import trino\n", " %config Trino.auth=trino.auth.BasicAuthentication(\"principal id\", \"password\")\n", " %config Trino.user=None\n", - "```\n", - " See https://github.com/trinodb/trino-python-client/blob/master/trino/auth.py for more details\n" + "``` \n", + "\n", + "See https://github.com/trinodb/trino-python-client/blob/master/trino/auth.py for more details" ] }, { @@ -85,7 +94,7 @@ } ], "source": [ - "%trino --refresh all" + "%trino --refresh all\n" ] }, { @@ -118,7 +127,7 @@ } ], "source": [ - "%trino SELECT 'hello'" + "%trino SELECT 'hello'\n" ] }, { @@ -130,7 +139,7 @@ }, "outputs": [], "source": [ - "#%trino SHOW CATALOGS" + "#%trino SHOW CATALOGS\n" ] }, { @@ -417,7 +426,7 @@ "source": [ "%%trino --catalog tpch --schema sf1000\n", "\n", - "SELECT * FROM lineitem" + "SELECT * FROM lineitem\n" ] }, { @@ -521,8 +530,7 @@ " ARRAY[1, null, 4] as e,\n", " ARRAY[ARRAY[1,2],ARRAY[5,4]] as f,\n", " CAST(ROW(1,23,456) as ROW(k1 INT, k2 INT, k3 INT)) as g,\n", - " CAST(ROW(1,'abc',true,null) as ROW(k1 INT, k2 VARCHAR, k3 BOOLEAN, k4 VARCHAR)) as h\n", - " \n" + " CAST(ROW(1,'abc',true,null) as ROW(k1 INT, k2 VARCHAR, k3 BOOLEAN, k4 VARCHAR)) as h\n" ] }, { @@ -581,16 +589,8 @@ } ], "source": [ - "%%trino?" + "%%trino?\n" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "83ba187a-1449-4ab5-8e8c-270c08e78433", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/docs/example/JinjaTemplate.ipynb b/docs/example/TrinoJinjaTemplate.ipynb similarity index 99% rename from docs/example/JinjaTemplate.ipynb rename to docs/example/TrinoJinjaTemplate.ipynb index 9c68906..e0f7fd5 100644 --- a/docs/example/JinjaTemplate.ipynb +++ b/docs/example/TrinoJinjaTemplate.ipynb @@ -1,5 +1,13 @@ { "cells": [ + { + "cell_type": "markdown", + "id": "35b6c709", + "metadata": {}, + "source": [ + "# Jinja Templating with trino" + ] + }, { "cell_type": "code", "execution_count": 1, @@ -31,7 +39,7 @@ "metadata": {}, "outputs": [], "source": [ - "table_name = \"tpch.tiny.orders\"" + "table_name = \"tpch.tiny.orders\"\n" ] }, { @@ -401,7 +409,7 @@ "outputs": [], "source": [ "def get_filters():\n", - " return 1" + " return 1\n" ] }, { @@ -459,7 +467,7 @@ ], "source": [ "%%trino --limit 1 --output grid\n", - "SELECT \n", + "SELECT\n", " *\n", "FROM (\n", " SELECT\n", @@ -467,7 +475,7 @@ " *\n", " FROM\n", " tpch.tiny.orders\n", - ")" + ")\n" ] }, { @@ -723,14 +731,6 @@ } ], "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "37f57243-2f37-43b5-8ffe-a6a109e495cc", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/docs/sparksql.md b/docs/sparksql.md index 3deb655..7461f6d 100644 --- a/docs/sparksql.md +++ b/docs/sparksql.md @@ -57,9 +57,6 @@ print(sql) ## Capture your Spark query as a Dataframe or a temporary view ![](img/args.png) -## Use jinja templating to create re-usable SQL -![](img/jinja.png) - ## Usage Parameter usage example: diff --git a/docs/trino.md b/docs/trino.md index b8731ac..b44c931 100644 --- a/docs/trino.md +++ b/docs/trino.md @@ -15,6 +15,9 @@ A JupyterLab extension providing the following features via `%%sparksql` and `%% - cell magic - Python strings +## Use jinja templating to create re-usable SQL +![](img/jinja.png) + ## Usage Parameter usage example: diff --git a/mkdocs.yml b/mkdocs.yml index 09f1059..37a65ba 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -3,9 +3,18 @@ site_url: https://cybercentrecanada.github.io/jupyterlab-sql-editor site_dir: site nav: - Home: index.md - - sparksql magic: sparksql.md - - trino magic: trino.md - - Notebook: example/SparkSyntaxDemo.ipynb + - 'sparksql magic': + - Usage: sparksql.md + - Examples: + - example/SparkConfigurationUsage.ipynb + - example/SparkSyntaxDemo.ipynb + - example/SparkSQLEscapeControlChars.ipynb + - 'trino magic': + - Usage: trino.md + - Examples: + - example/TrinoConfigurationUsage.ipynb + - example/TrinoJinjaTemplate.ipynb + - example/SupersetJinjaTestHarness.ipynb theme: name: material plugins: From 9c23c033766f5d14b0f9d6f2b805f31a0d396dac Mon Sep 17 00:00:00 2001 From: cccs-eric Date: Fri, 3 Nov 2023 14:03:35 -0400 Subject: [PATCH 7/7] Add notebooks examples --- docs/example/SparkConfigurationUsage.ipynb | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/docs/example/SparkConfigurationUsage.ipynb b/docs/example/SparkConfigurationUsage.ipynb index c314d14..ad9a84c 100644 --- a/docs/example/SparkConfigurationUsage.ipynb +++ b/docs/example/SparkConfigurationUsage.ipynb @@ -52,7 +52,7 @@ "id": "5fe87250-6936-4de6-bacf-0490a5812516", "metadata": {}, "source": [ - " Normally IPython only displays the output of the last statement. However it can be handy to run multiple sql magics in a single cell and see the output of each execution. Setting `ast_node_interactivity` to `all` will enable that.\n" + "Normally IPython only displays the output of the last statement. However it can be handy to run multiple sql magics in a single cell and see the output of each execution. Setting `ast_node_interactivity` to `all` will enable that.\n" ] }, { @@ -579,7 +579,12 @@ "id": "07b3ac2f-3750-4c24-84e7-27c61d3a131d", "metadata": {}, "source": [ - "# Create a temporary view with the --view option" + "# Efficient query result reuse across cells\n", + "\n", + "A popular use-case is about capturing the output of a cell and use it later in another query. This can be done using a view. Here are the steps required to create a view and then referencing it \n", + "vi `sparksql`.\n", + "\n", + "## Create a temporary view with the --view option" ] }, { @@ -694,7 +699,7 @@ "id": "4f145195-e8c9-4771-a4ae-0ffbe479234c", "metadata": {}, "source": [ - "# Use temporary view in subsequent queries with autocomplet suggestions" + "## Use temporary view in subsequent queries with autocomplete suggestions" ] }, {