diff --git a/labs.yml b/labs.yml index 0522a38c1..bbce48a93 100644 --- a/labs.yml +++ b/labs.yml @@ -55,6 +55,20 @@ commands: - name: configure-database-profiler description: Configure database profiler + - name: create-profiler-dashboard + description: (Experimental) Upload the profiler results as a Databricks dashboard. + flags: + - name: extract-file + description: Path Location of the Profiler Extract File + - name: source-tech + description: Name of the Source System Technology that was Profiled + - name: volume-path + description: Unity Catalog Volume to upload the profiler extract + - name: catalog-name + description: (Optional) Name of the Catalog that extract data will be uploaded to + - name: schema-name + description: (Optional) Name of the Schema that the extract tables will be uploaded to + - name: install-transpile description: Install & optionally configure 'transpile' dependencies flags: diff --git a/src/databricks/labs/lakebridge/cli.py b/src/databricks/labs/lakebridge/cli.py index 2b370b82e..ac6d80b8c 100644 --- a/src/databricks/labs/lakebridge/cli.py +++ b/src/databricks/labs/lakebridge/cli.py @@ -827,6 +827,23 @@ def analyze( logger.debug(f"User: {ctx.current_user}") +@lakebridge.command() +def create_profiler_dashboard( + *, + w: WorkspaceClient, + extract_file: str, + source_tech: str, + volume_path: str, + catalog_name: str, + schema_name: str, +) -> None: + """Deploys a profiler summary as a Databricks dashboard""" + ctx = ApplicationContext(w) + ctx.add_user_agent_extra("cmd", "create-profiler-dashboard") + ctx.dashboard_manager.upload_duckdb_to_uc_volume(extract_file, volume_path) + ctx.dashboard_manager.create_profiler_summary_dashboard(source_tech, catalog_name, schema_name) + + if __name__ == "__main__": app = lakebridge logger = app.get_logger()