generated from databricks-industry-solutions/industry-solutions-blueprints
-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
0 parents
commit a01a311
Showing
27 changed files
with
85,735 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
name: AWS integration test PR | ||
|
||
on: | ||
pull_request: | ||
|
||
jobs: | ||
run-databricks-notebook: | ||
runs-on: ubuntu-latest | ||
steps: | ||
- name: Checkout repo | ||
uses: actions/checkout@v2 | ||
- name: Run a databricks notebook | ||
uses: databricks/run-notebook@v0 | ||
with: | ||
local-notebook-path: RUNME.py | ||
git-commit: ${{ github.event.pull_request.head.sha }} | ||
databricks-host: https://e2-demo-west.cloud.databricks.com | ||
databricks-token: ${{ secrets.DEPLOYMENT_TARGET_TOKEN_AWS }} | ||
new-cluster-json: > | ||
{ | ||
"num_workers": 0, | ||
"spark_version": "10.4.x-scala2.12", | ||
"node_type_id": "i3.xlarge", | ||
"aws_attributes": { | ||
"availability": "ON_DEMAND" | ||
}, | ||
"spark_conf": { | ||
"spark.master": "local[*, 4]", | ||
"spark.databricks.cluster.profile": "singleNode" | ||
}, | ||
"custom_tags": { | ||
"ResourceClass": "SingleNode" | ||
} | ||
} | ||
notebook-params-json: > | ||
{ | ||
"run_job": "True" | ||
} | ||
access-control-list-json: > | ||
[ | ||
{ | ||
"group_name": "users", | ||
"permission_level": "CAN_VIEW" | ||
} | ||
] | ||
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
name: AWS integration test push | ||
|
||
on: | ||
workflow_dispatch: | ||
push: | ||
branches: | ||
- main | ||
- web-sync | ||
|
||
jobs: | ||
run-databricks-notebook: | ||
runs-on: ubuntu-latest | ||
steps: | ||
- name: Checkout repo | ||
uses: actions/checkout@v2 | ||
- name: Run a databricks notebook | ||
uses: databricks/run-notebook@v0 | ||
with: | ||
local-notebook-path: RUNME.py | ||
git-commit: ${{ github.sha }} | ||
databricks-host: https://e2-demo-west.cloud.databricks.com | ||
databricks-token: ${{ secrets.DEPLOYMENT_TARGET_TOKEN_AWS }} | ||
new-cluster-json: > | ||
{ | ||
"num_workers": 0, | ||
"spark_version": "10.4.x-scala2.12", | ||
"node_type_id": "i3.xlarge", | ||
"aws_attributes": { | ||
"availability": "ON_DEMAND" | ||
}, | ||
"spark_conf": { | ||
"spark.master": "local[*, 4]", | ||
"spark.databricks.cluster.profile": "singleNode" | ||
}, | ||
"custom_tags": { | ||
"ResourceClass": "SingleNode" | ||
} | ||
} | ||
notebook-params-json: > | ||
{ | ||
"run_job": "True" | ||
} | ||
access-control-list-json: > | ||
[ | ||
{ | ||
"group_name": "users", | ||
"permission_level": "CAN_VIEW" | ||
} | ||
] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
name: GCP integration test PR | ||
|
||
on: | ||
pull_request: | ||
|
||
jobs: | ||
run-databricks-notebook: | ||
runs-on: ubuntu-latest | ||
steps: | ||
- name: Checkout repo | ||
uses: actions/checkout@v2 | ||
- name: Run a databricks notebook | ||
uses: databricks/run-notebook@v0 | ||
with: | ||
local-notebook-path: RUNME.py | ||
git-commit: ${{ github.event.pull_request.head.sha }} | ||
databricks-host: https://416411475796958.8.gcp.databricks.com | ||
databricks-token: ${{ secrets.DEPLOYMENT_TARGET_TOKEN_GCP }} | ||
new-cluster-json: > | ||
{ | ||
"num_workers": 0, | ||
"spark_version": "10.4.x-scala2.12", | ||
"node_type_id": "n1-highmem-4", | ||
"gcp_attributes": { | ||
"availability": "ON_DEMAND_GCP" | ||
}, | ||
"spark_conf": { | ||
"spark.master": "local[*, 4]", | ||
"spark.databricks.cluster.profile": "singleNode" | ||
}, | ||
"custom_tags": { | ||
"ResourceClass": "SingleNode" | ||
} | ||
} | ||
notebook-params-json: > | ||
{ | ||
"run_job": "True" | ||
} | ||
access-control-list-json: > | ||
[ | ||
{ | ||
"group_name": "users", | ||
"permission_level": "CAN_VIEW" | ||
} | ||
] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
name: GCP integration test push | ||
|
||
on: | ||
workflow_dispatch: | ||
push: | ||
branches: | ||
- main | ||
- web-sync | ||
|
||
jobs: | ||
run-databricks-notebook: | ||
runs-on: ubuntu-latest | ||
steps: | ||
- name: Checkout repo | ||
uses: actions/checkout@v2 | ||
- name: Run a databricks notebook | ||
uses: databricks/run-notebook@v0 | ||
with: | ||
local-notebook-path: RUNME.py | ||
git-commit: ${{ github.sha }} | ||
databricks-host: https://416411475796958.8.gcp.databricks.com | ||
databricks-token: ${{ secrets.DEPLOYMENT_TARGET_TOKEN_GCP }} | ||
new-cluster-json: > | ||
{ | ||
"num_workers": 0, | ||
"spark_version": "10.4.x-scala2.12", | ||
"node_type_id": "n1-highmem-4", | ||
"gcp_attributes": { | ||
"availability": "ON_DEMAND_GCP" | ||
}, | ||
"spark_conf": { | ||
"spark.master": "local[*, 4]", | ||
"spark.databricks.cluster.profile": "singleNode" | ||
}, | ||
"custom_tags": { | ||
"ResourceClass": "SingleNode" | ||
} | ||
} | ||
notebook-params-json: > | ||
{ | ||
"run_job": "True" | ||
} | ||
access-control-list-json: > | ||
[ | ||
{ | ||
"group_name": "users", | ||
"permission_level": "CAN_VIEW" | ||
} | ||
] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
name: MSA integration test PR | ||
on: | ||
pull_request: | ||
|
||
jobs: | ||
run-databricks-notebook: | ||
runs-on: ubuntu-latest | ||
steps: | ||
- name: Checkout repo | ||
uses: actions/checkout@v2 | ||
- name: Run a databricks notebook | ||
uses: databricks/run-notebook@v0 | ||
with: | ||
local-notebook-path: RUNME.py | ||
git-commit: ${{ github.event.pull_request.head.sha }} | ||
databricks-host: https://adb-984752964297111.11.azuredatabricks.net | ||
databricks-token: ${{ secrets.DEPLOYMENT_TARGET_TOKEN_MSA }} | ||
new-cluster-json: > | ||
{ | ||
"num_workers": 0, | ||
"spark_version": "10.4.x-scala2.12", | ||
"node_type_id": "Standard_DS3_v2", | ||
"azure_attributes": { | ||
"availability": "ON_DEMAND_AZURE" | ||
}, | ||
"spark_conf": { | ||
"spark.master": "local[*, 4]", | ||
"spark.databricks.cluster.profile": "singleNode" | ||
}, | ||
"custom_tags": { | ||
"ResourceClass": "SingleNode" | ||
} | ||
} | ||
notebook-params-json: > | ||
{ | ||
"run_job": "True" | ||
} | ||
access-control-list-json: > | ||
[ | ||
{ | ||
"group_name": "users", | ||
"permission_level": "CAN_VIEW" | ||
} | ||
] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
name: MSA integration test push | ||
on: | ||
workflow_dispatch: | ||
push: | ||
branches: | ||
- main | ||
- web-sync | ||
|
||
jobs: | ||
run-databricks-notebook: | ||
runs-on: ubuntu-latest | ||
steps: | ||
- name: Checkout repo | ||
uses: actions/checkout@v2 | ||
- name: Run a databricks notebook | ||
uses: databricks/run-notebook@v0 | ||
with: | ||
local-notebook-path: RUNME.py | ||
git-commit: ${{ github.sha }} | ||
databricks-host: https://adb-984752964297111.11.azuredatabricks.net | ||
databricks-token: ${{ secrets.DEPLOYMENT_TARGET_TOKEN_MSA }} | ||
new-cluster-json: > | ||
{ | ||
"num_workers": 0, | ||
"spark_version": "10.4.x-scala2.12", | ||
"node_type_id": "Standard_D3_v2", | ||
"azure_attributes": { | ||
"availability": "ON_DEMAND_AZURE" | ||
}, | ||
"spark_conf": { | ||
"spark.master": "local[*, 4]", | ||
"spark.databricks.cluster.profile": "singleNode" | ||
}, | ||
"custom_tags": { | ||
"ResourceClass": "SingleNode" | ||
} | ||
} | ||
notebook-params-json: > | ||
{ | ||
"run_job": "True" | ||
} | ||
access-control-list-json: > | ||
[ | ||
{ | ||
"group_name": "users", | ||
"permission_level": "CAN_VIEW" | ||
} | ||
] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,85 @@ | ||
name: publish | ||
|
||
env: | ||
DB_PROFILES: ${{ secrets.DB_PROFILES }} | ||
|
||
on: | ||
workflow_dispatch: | ||
inputs: | ||
db_profile: | ||
type: string | ||
description: 'Databricks environment to publish HTML from' | ||
default: 'FIELD' | ||
db_path: | ||
type: string | ||
description: 'Repository path on databricks environment' | ||
required: true | ||
split_markdown: | ||
description: 'Splitting HTML by markdown' | ||
type: choice | ||
required: true | ||
default: 'false' | ||
options: | ||
- 'true' | ||
- 'false' | ||
|
||
permissions: | ||
contents: read | ||
pages: write | ||
id-token: write | ||
|
||
concurrency: | ||
group: "pages" | ||
cancel-in-progress: false | ||
|
||
jobs: | ||
release: | ||
runs-on: html_publisher | ||
steps: | ||
|
||
- name: Checkout project | ||
uses: actions/checkout@v2 | ||
|
||
- name: Set up Python | ||
uses: actions/setup-python@v1 | ||
with: | ||
python-version: "3.9" | ||
|
||
- name: Install dependencies | ||
run: | | ||
pip install git+https://github.com/databricks-industry-solutions/industry-solutions-release | ||
- name: Package solution accelerator | ||
run: | | ||
import os | ||
import configparser | ||
import io | ||
from databricks.solutions import Accelerator | ||
config = configparser.ConfigParser() | ||
config.read_file(io.StringIO(os.environ['DB_PROFILES'])) | ||
if '${{ github.event.inputs.db_profile }}' not in config.sections(): | ||
raise Exception('Provided DB_PROFILE is not supported') | ||
config = config['${{ github.event.inputs.db_profile }}'] | ||
split_markdown = '${{ github.event.inputs.split_markdown }}' == 'true' | ||
Accelerator( | ||
db_host=config['host'], | ||
db_token=config['token'], | ||
db_path='${{ github.event.inputs.db_path }}', | ||
db_name='${{ github.event.repository.name }}', | ||
markdown=split_markdown, | ||
).release() | ||
shell: python | ||
|
||
- name: Upload artifact | ||
uses: actions/upload-pages-artifact@v3 | ||
with: | ||
path: 'site' | ||
|
||
- name: Deploy to GitHub Pages | ||
id: deployment | ||
uses: actions/deploy-pages@v4 |
Oops, something went wrong.