Skip to content

Commit

Permalink
removing history
Browse files Browse the repository at this point in the history
  • Loading branch information
zavoraad committed Aug 9, 2024
0 parents commit a01a311
Show file tree
Hide file tree
Showing 27 changed files with 85,735 additions and 0 deletions.
46 changes: 46 additions & 0 deletions .github/workflows/integration-test-aws-pr.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
name: AWS integration test PR

on:
pull_request:

jobs:
run-databricks-notebook:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v2
- name: Run a databricks notebook
uses: databricks/run-notebook@v0
with:
local-notebook-path: RUNME.py
git-commit: ${{ github.event.pull_request.head.sha }}
databricks-host: https://e2-demo-west.cloud.databricks.com
databricks-token: ${{ secrets.DEPLOYMENT_TARGET_TOKEN_AWS }}
new-cluster-json: >
{
"num_workers": 0,
"spark_version": "10.4.x-scala2.12",
"node_type_id": "i3.xlarge",
"aws_attributes": {
"availability": "ON_DEMAND"
},
"spark_conf": {
"spark.master": "local[*, 4]",
"spark.databricks.cluster.profile": "singleNode"
},
"custom_tags": {
"ResourceClass": "SingleNode"
}
}
notebook-params-json: >
{
"run_job": "True"
}
access-control-list-json: >
[
{
"group_name": "users",
"permission_level": "CAN_VIEW"
}
]
49 changes: 49 additions & 0 deletions .github/workflows/integration-test-aws-push.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
name: AWS integration test push

on:
workflow_dispatch:
push:
branches:
- main
- web-sync

jobs:
run-databricks-notebook:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v2
- name: Run a databricks notebook
uses: databricks/run-notebook@v0
with:
local-notebook-path: RUNME.py
git-commit: ${{ github.sha }}
databricks-host: https://e2-demo-west.cloud.databricks.com
databricks-token: ${{ secrets.DEPLOYMENT_TARGET_TOKEN_AWS }}
new-cluster-json: >
{
"num_workers": 0,
"spark_version": "10.4.x-scala2.12",
"node_type_id": "i3.xlarge",
"aws_attributes": {
"availability": "ON_DEMAND"
},
"spark_conf": {
"spark.master": "local[*, 4]",
"spark.databricks.cluster.profile": "singleNode"
},
"custom_tags": {
"ResourceClass": "SingleNode"
}
}
notebook-params-json: >
{
"run_job": "True"
}
access-control-list-json: >
[
{
"group_name": "users",
"permission_level": "CAN_VIEW"
}
]
45 changes: 45 additions & 0 deletions .github/workflows/integration-test-gcp-pr.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
name: GCP integration test PR

on:
pull_request:

jobs:
run-databricks-notebook:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v2
- name: Run a databricks notebook
uses: databricks/run-notebook@v0
with:
local-notebook-path: RUNME.py
git-commit: ${{ github.event.pull_request.head.sha }}
databricks-host: https://416411475796958.8.gcp.databricks.com
databricks-token: ${{ secrets.DEPLOYMENT_TARGET_TOKEN_GCP }}
new-cluster-json: >
{
"num_workers": 0,
"spark_version": "10.4.x-scala2.12",
"node_type_id": "n1-highmem-4",
"gcp_attributes": {
"availability": "ON_DEMAND_GCP"
},
"spark_conf": {
"spark.master": "local[*, 4]",
"spark.databricks.cluster.profile": "singleNode"
},
"custom_tags": {
"ResourceClass": "SingleNode"
}
}
notebook-params-json: >
{
"run_job": "True"
}
access-control-list-json: >
[
{
"group_name": "users",
"permission_level": "CAN_VIEW"
}
]
49 changes: 49 additions & 0 deletions .github/workflows/integration-test-gcp-push.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
name: GCP integration test push

on:
workflow_dispatch:
push:
branches:
- main
- web-sync

jobs:
run-databricks-notebook:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v2
- name: Run a databricks notebook
uses: databricks/run-notebook@v0
with:
local-notebook-path: RUNME.py
git-commit: ${{ github.sha }}
databricks-host: https://416411475796958.8.gcp.databricks.com
databricks-token: ${{ secrets.DEPLOYMENT_TARGET_TOKEN_GCP }}
new-cluster-json: >
{
"num_workers": 0,
"spark_version": "10.4.x-scala2.12",
"node_type_id": "n1-highmem-4",
"gcp_attributes": {
"availability": "ON_DEMAND_GCP"
},
"spark_conf": {
"spark.master": "local[*, 4]",
"spark.databricks.cluster.profile": "singleNode"
},
"custom_tags": {
"ResourceClass": "SingleNode"
}
}
notebook-params-json: >
{
"run_job": "True"
}
access-control-list-json: >
[
{
"group_name": "users",
"permission_level": "CAN_VIEW"
}
]
45 changes: 45 additions & 0 deletions .github/workflows/integration-test-msa-pr.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
name: MSA integration test PR
on:
pull_request:

jobs:
run-databricks-notebook:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v2
- name: Run a databricks notebook
uses: databricks/run-notebook@v0
with:
local-notebook-path: RUNME.py
git-commit: ${{ github.event.pull_request.head.sha }}
databricks-host: https://adb-984752964297111.11.azuredatabricks.net
databricks-token: ${{ secrets.DEPLOYMENT_TARGET_TOKEN_MSA }}
new-cluster-json: >
{
"num_workers": 0,
"spark_version": "10.4.x-scala2.12",
"node_type_id": "Standard_DS3_v2",
"azure_attributes": {
"availability": "ON_DEMAND_AZURE"
},
"spark_conf": {
"spark.master": "local[*, 4]",
"spark.databricks.cluster.profile": "singleNode"
},
"custom_tags": {
"ResourceClass": "SingleNode"
}
}
notebook-params-json: >
{
"run_job": "True"
}
access-control-list-json: >
[
{
"group_name": "users",
"permission_level": "CAN_VIEW"
}
]
48 changes: 48 additions & 0 deletions .github/workflows/integration-test-msa-push.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
name: MSA integration test push
on:
workflow_dispatch:
push:
branches:
- main
- web-sync

jobs:
run-databricks-notebook:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v2
- name: Run a databricks notebook
uses: databricks/run-notebook@v0
with:
local-notebook-path: RUNME.py
git-commit: ${{ github.sha }}
databricks-host: https://adb-984752964297111.11.azuredatabricks.net
databricks-token: ${{ secrets.DEPLOYMENT_TARGET_TOKEN_MSA }}
new-cluster-json: >
{
"num_workers": 0,
"spark_version": "10.4.x-scala2.12",
"node_type_id": "Standard_D3_v2",
"azure_attributes": {
"availability": "ON_DEMAND_AZURE"
},
"spark_conf": {
"spark.master": "local[*, 4]",
"spark.databricks.cluster.profile": "singleNode"
},
"custom_tags": {
"ResourceClass": "SingleNode"
}
}
notebook-params-json: >
{
"run_job": "True"
}
access-control-list-json: >
[
{
"group_name": "users",
"permission_level": "CAN_VIEW"
}
]
85 changes: 85 additions & 0 deletions .github/workflows/publish.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
name: publish

env:
DB_PROFILES: ${{ secrets.DB_PROFILES }}

on:
workflow_dispatch:
inputs:
db_profile:
type: string
description: 'Databricks environment to publish HTML from'
default: 'FIELD'
db_path:
type: string
description: 'Repository path on databricks environment'
required: true
split_markdown:
description: 'Splitting HTML by markdown'
type: choice
required: true
default: 'false'
options:
- 'true'
- 'false'

permissions:
contents: read
pages: write
id-token: write

concurrency:
group: "pages"
cancel-in-progress: false

jobs:
release:
runs-on: html_publisher
steps:

- name: Checkout project
uses: actions/checkout@v2

- name: Set up Python
uses: actions/setup-python@v1
with:
python-version: "3.9"

- name: Install dependencies
run: |
pip install git+https://github.com/databricks-industry-solutions/industry-solutions-release
- name: Package solution accelerator
run: |
import os
import configparser
import io
from databricks.solutions import Accelerator
config = configparser.ConfigParser()
config.read_file(io.StringIO(os.environ['DB_PROFILES']))
if '${{ github.event.inputs.db_profile }}' not in config.sections():
raise Exception('Provided DB_PROFILE is not supported')
config = config['${{ github.event.inputs.db_profile }}']
split_markdown = '${{ github.event.inputs.split_markdown }}' == 'true'
Accelerator(
db_host=config['host'],
db_token=config['token'],
db_path='${{ github.event.inputs.db_path }}',
db_name='${{ github.event.repository.name }}',
markdown=split_markdown,
).release()
shell: python

- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: 'site'

- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4
Loading

0 comments on commit a01a311

Please sign in to comment.