diff --git a/src/_nebari/constants.py b/src/_nebari/constants.py index 6e57519fee..6e7ab3880a 100644 --- a/src/_nebari/constants.py +++ b/src/_nebari/constants.py @@ -25,7 +25,6 @@ # DOCS -DO_ENV_DOCS = "https://www.nebari.dev/docs/how-tos/nebari-do" AZURE_ENV_DOCS = "https://www.nebari.dev/docs/how-tos/nebari-azure" AWS_ENV_DOCS = "https://www.nebari.dev/docs/how-tos/nebari-aws" GCP_ENV_DOCS = "https://www.nebari.dev/docs/how-tos/nebari-gcp" @@ -34,4 +33,3 @@ AWS_DEFAULT_REGION = "us-east-1" AZURE_DEFAULT_REGION = "Central US" GCP_DEFAULT_REGION = "us-central1" -DO_DEFAULT_REGION = "nyc3" diff --git a/src/_nebari/initialize.py b/src/_nebari/initialize.py index 7745df2a98..4b41f2c5a1 100644 --- a/src/_nebari/initialize.py +++ b/src/_nebari/initialize.py @@ -11,18 +11,12 @@ from _nebari import constants from _nebari.provider import git from _nebari.provider.cicd import github -from _nebari.provider.cloud import ( - amazon_web_services, - azure_cloud, - digital_ocean, - google_cloud, -) +from _nebari.provider.cloud import amazon_web_services, azure_cloud, google_cloud from _nebari.provider.oauth.auth0 import create_client from _nebari.stages.bootstrap import CiEnum from _nebari.stages.infrastructure import ( DEFAULT_AWS_NODE_GROUPS, DEFAULT_AZURE_NODE_GROUPS, - DEFAULT_DO_NODE_GROUPS, DEFAULT_GCP_NODE_GROUPS, node_groups_to_dict, ) @@ -117,22 +111,7 @@ def render_config( ), } - if cloud_provider == ProviderEnum.do: - do_region = region or constants.DO_DEFAULT_REGION - do_kubernetes_versions = kubernetes_version or get_latest_kubernetes_version( - digital_ocean.kubernetes_versions() - ) - config["digital_ocean"] = { - "kubernetes_version": do_kubernetes_versions, - "region": do_region, - "node_groups": node_groups_to_dict(DEFAULT_DO_NODE_GROUPS), - } - - config["theme"]["jupyterhub"][ - "hub_subtitle" - ] = f"{WELCOME_HEADER_TEXT} on Digital Ocean" - - elif cloud_provider == ProviderEnum.gcp: + if cloud_provider == ProviderEnum.gcp: gcp_region = region or constants.GCP_DEFAULT_REGION gcp_kubernetes_version = kubernetes_version or get_latest_kubernetes_version( google_cloud.kubernetes_versions(gcp_region) @@ -245,16 +224,7 @@ def github_auto_provision(config: pydantic.BaseModel, owner: str, repo: str): try: # Secrets - if config.provider == ProviderEnum.do: - for name in { - "AWS_ACCESS_KEY_ID", - "AWS_SECRET_ACCESS_KEY", - "SPACES_ACCESS_KEY_ID", - "SPACES_SECRET_ACCESS_KEY", - "DIGITALOCEAN_TOKEN", - }: - github.update_secret(owner, repo, name, os.environ[name]) - elif config.provider == ProviderEnum.aws: + if config.provider == ProviderEnum.aws: for name in { "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", diff --git a/src/_nebari/provider/cicd/github.py b/src/_nebari/provider/cicd/github.py index d091d1d027..92d3b853e9 100644 --- a/src/_nebari/provider/cicd/github.py +++ b/src/_nebari/provider/cicd/github.py @@ -117,12 +117,6 @@ def gha_env_vars(config: schema.Main): env_vars["ARM_CLIENT_SECRET"] = "${{ secrets.ARM_CLIENT_SECRET }}" env_vars["ARM_SUBSCRIPTION_ID"] = "${{ secrets.ARM_SUBSCRIPTION_ID }}" env_vars["ARM_TENANT_ID"] = "${{ secrets.ARM_TENANT_ID }}" - elif config.provider == schema.ProviderEnum.do: - env_vars["AWS_ACCESS_KEY_ID"] = "${{ secrets.AWS_ACCESS_KEY_ID }}" - env_vars["AWS_SECRET_ACCESS_KEY"] = "${{ secrets.AWS_SECRET_ACCESS_KEY }}" - env_vars["SPACES_ACCESS_KEY_ID"] = "${{ secrets.SPACES_ACCESS_KEY_ID }}" - env_vars["SPACES_SECRET_ACCESS_KEY"] = "${{ secrets.SPACES_SECRET_ACCESS_KEY }}" - env_vars["DIGITALOCEAN_TOKEN"] = "${{ secrets.DIGITALOCEAN_TOKEN }}" elif config.provider == schema.ProviderEnum.gcp: env_vars["GOOGLE_CREDENTIALS"] = "${{ secrets.GOOGLE_CREDENTIALS }}" env_vars["PROJECT_ID"] = "${{ secrets.PROJECT_ID }}" diff --git a/src/_nebari/provider/cloud/amazon_web_services.py b/src/_nebari/provider/cloud/amazon_web_services.py index 68dfcb133c..dee4df891c 100644 --- a/src/_nebari/provider/cloud/amazon_web_services.py +++ b/src/_nebari/provider/cloud/amazon_web_services.py @@ -24,25 +24,19 @@ def check_credentials() -> None: @functools.lru_cache() def aws_session( - region: Optional[str] = None, digitalocean_region: Optional[str] = None + region: Optional[str] = None, ) -> boto3.Session: """Create a boto3 session.""" - if digitalocean_region: - aws_access_key_id = os.environ["SPACES_ACCESS_KEY_ID"] - aws_secret_access_key = os.environ["SPACES_SECRET_ACCESS_KEY"] - region = digitalocean_region - aws_session_token = None - else: - check_credentials() - aws_access_key_id = os.environ["AWS_ACCESS_KEY_ID"] - aws_secret_access_key = os.environ["AWS_SECRET_ACCESS_KEY"] - aws_session_token = os.environ.get("AWS_SESSION_TOKEN") - - if not region: - raise ValueError( - "Please specify `region` in the nebari-config.yaml or if initializing the nebari-config, set the region via the " - "`--region` flag or via the AWS_DEFAULT_REGION environment variable.\n" - ) + check_credentials() + aws_access_key_id = os.environ["AWS_ACCESS_KEY_ID"] + aws_secret_access_key = os.environ["AWS_SECRET_ACCESS_KEY"] + aws_session_token = os.environ.get("AWS_SESSION_TOKEN") + + if not region: + raise ValueError( + "Please specify `region` in the nebari-config.yaml or if initializing the nebari-config, set the region via the " + "`--region` flag or via the AWS_DEFAULT_REGION environment variable.\n" + ) return boto3.Session( region_name=region, @@ -712,21 +706,17 @@ def aws_delete_s3_objects( bucket_name: str, endpoint: Optional[str] = None, region: Optional[str] = None, - digitalocean_region: Optional[str] = None, ): """ Delete all objects in the S3 bucket. - NOTE: This method is shared with Digital Ocean as their "Spaces" is S3 compatible and uses the same API. - Parameters: bucket_name (str): S3 bucket name - endpoint (str): S3 endpoint URL (required for Digital Ocean spaces) + endpoint (str): S3 endpoint URL region (str): AWS region - digitalocean_region (str): Digital Ocean region """ - session = aws_session(region=region, digitalocean_region=digitalocean_region) + session = aws_session(region=region) s3 = session.client("s3", endpoint_url=endpoint) try: @@ -779,22 +769,18 @@ def aws_delete_s3_bucket( bucket_name: str, endpoint: Optional[str] = None, region: Optional[str] = None, - digitalocean_region: Optional[str] = None, ): """ Delete S3 bucket. - NOTE: This method is shared with Digital Ocean as their "Spaces" is S3 compatible and uses the same API. - Parameters: bucket_name (str): S3 bucket name - endpoint (str): S3 endpoint URL (required for Digital Ocean spaces) + endpoint (str): S3 endpoint URL region (str): AWS region - digitalocean_region (str): Digital Ocean region """ - aws_delete_s3_objects(bucket_name, endpoint, region, digitalocean_region) + aws_delete_s3_objects(bucket_name, endpoint, region) - session = aws_session(region=region, digitalocean_region=digitalocean_region) + session = aws_session(region=region) s3 = session.client("s3", endpoint_url=endpoint) try: diff --git a/src/_nebari/provider/cloud/digital_ocean.py b/src/_nebari/provider/cloud/digital_ocean.py deleted file mode 100644 index 3e4a507be6..0000000000 --- a/src/_nebari/provider/cloud/digital_ocean.py +++ /dev/null @@ -1,131 +0,0 @@ -import functools -import os -import tempfile -import typing - -import kubernetes.client -import kubernetes.config -import requests - -from _nebari.constants import DO_ENV_DOCS -from _nebari.provider.cloud.amazon_web_services import aws_delete_s3_bucket -from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version -from _nebari.utils import check_environment_variables, set_do_environment -from nebari import schema - - -def check_credentials() -> None: - required_variables = { - "DIGITALOCEAN_TOKEN", - "SPACES_ACCESS_KEY_ID", - "SPACES_SECRET_ACCESS_KEY", - } - check_environment_variables(required_variables, DO_ENV_DOCS) - - -def digital_ocean_request(url, method="GET", json=None): - BASE_DIGITALOCEAN_URL = "https://api.digitalocean.com/v2/" - - for name in {"DIGITALOCEAN_TOKEN"}: - if name not in os.environ: - raise ValueError( - f"Digital Ocean api requests require environment variable={name} defined" - ) - - headers = {"Authorization": f'Bearer {os.environ["DIGITALOCEAN_TOKEN"]}'} - - method_map = { - "GET": requests.get, - "DELETE": requests.delete, - } - - response = method_map[method]( - f"{BASE_DIGITALOCEAN_URL}{url}", headers=headers, json=json - ) - response.raise_for_status() - return response - - -@functools.lru_cache() -def _kubernetes_options(): - return digital_ocean_request("kubernetes/options").json() - - -def instances(): - return _kubernetes_options()["options"]["sizes"] - - -def regions(): - return _kubernetes_options()["options"]["regions"] - - -def kubernetes_versions() -> typing.List[str]: - """Return list of available kubernetes supported by cloud provider. Sorted from oldest to latest.""" - supported_kubernetes_versions = sorted( - [_["slug"].split("-")[0] for _ in _kubernetes_options()["options"]["versions"]] - ) - filtered_versions = filter_by_highest_supported_k8s_version( - supported_kubernetes_versions - ) - return [f"{v}-do.0" for v in filtered_versions] - - -def digital_ocean_get_cluster_id(cluster_name): - clusters = digital_ocean_request("kubernetes/clusters").json()[ - "kubernetes_clusters" - ] - - cluster_id = None - for cluster in clusters: - if cluster["name"] == cluster_name: - cluster_id = cluster["id"] - break - - return cluster_id - - -def digital_ocean_get_kubeconfig(cluster_id: str): - kubeconfig_content = digital_ocean_request( - f"kubernetes/clusters/{cluster_id}/kubeconfig" - ).content - - with tempfile.NamedTemporaryFile(delete=False) as temp_kubeconfig: - temp_kubeconfig.write(kubeconfig_content) - - return temp_kubeconfig.name - - -def digital_ocean_delete_kubernetes_cluster(cluster_name: str): - cluster_id = digital_ocean_get_cluster_id(cluster_name) - digital_ocean_request(f"kubernetes/clusters/{cluster_id}", method="DELETE") - - -def digital_ocean_cleanup(config: schema.Main): - """Delete all Digital Ocean resources created by Nebari.""" - - name = config.project_name - namespace = config.namespace - - cluster_name = f"{name}-{namespace}" - tf_state_bucket = f"{cluster_name}-terraform-state" - do_spaces_endpoint = "https://nyc3.digitaloceanspaces.com" - - cluster_id = digital_ocean_get_cluster_id(cluster_name) - if cluster_id is None: - return - - kubernetes.config.load_kube_config(digital_ocean_get_kubeconfig(cluster_id)) - api = kubernetes.client.CoreV1Api() - - labels = {"component": "singleuser-server", "app": "jupyterhub"} - - api.delete_collection_namespaced_pod( - namespace=namespace, - label_selector=",".join([f"{k}={v}" for k, v in labels.items()]), - ) - - set_do_environment() - aws_delete_s3_bucket( - tf_state_bucket, digitalocean=True, endpoint=do_spaces_endpoint - ) - digital_ocean_delete_kubernetes_cluster(cluster_name) diff --git a/src/_nebari/stages/infrastructure/__init__.py b/src/_nebari/stages/infrastructure/__init__.py index eeba113c34..976f15a571 100644 --- a/src/_nebari/stages/infrastructure/__init__.py +++ b/src/_nebari/stages/infrastructure/__init__.py @@ -12,12 +12,7 @@ from _nebari import constants from _nebari.provider import terraform -from _nebari.provider.cloud import ( - amazon_web_services, - azure_cloud, - digital_ocean, - google_cloud, -) +from _nebari.provider.cloud import amazon_web_services, azure_cloud, google_cloud from _nebari.stages.base import NebariTerraformStage from _nebari.stages.kubernetes_services import SharedFsEnum from _nebari.stages.tf_objects import NebariTerraformState @@ -43,22 +38,6 @@ class ExistingInputVars(schema.Base): kube_context: str -class DigitalOceanNodeGroup(schema.Base): - instance: str - min_nodes: int - max_nodes: int - - -class DigitalOceanInputVars(schema.Base): - name: str - environment: str - region: str - tags: List[str] - kubernetes_version: str - node_groups: Dict[str, DigitalOceanNodeGroup] - kubeconfig_filename: str = get_kubeconfig_filename() - - class GCPNodeGroupInputVars(schema.Base): name: str instance_type: str @@ -224,11 +203,6 @@ def _calculate_node_groups(config: schema.Main): group: {"key": "azure-node-pool", "value": group} for group in ["general", "user", "worker"] } - elif config.provider == schema.ProviderEnum.do: - return { - group: {"key": "doks.digitalocean.com/node-pool", "value": group} - for group in ["general", "user", "worker"] - } elif config.provider == schema.ProviderEnum.existing: return config.existing.model_dump()["node_selectors"] else: @@ -267,67 +241,6 @@ class KeyValueDict(schema.Base): value: str -class DigitalOceanNodeGroup(schema.Base): - """Representation of a node group with Digital Ocean - - - Kubernetes limits: https://docs.digitalocean.com/products/kubernetes/details/limits/ - - Available instance types: https://slugs.do-api.dev/ - """ - - instance: str - min_nodes: Annotated[int, Field(ge=1)] = 1 - max_nodes: Annotated[int, Field(ge=1)] = 1 - - -DEFAULT_DO_NODE_GROUPS = { - "general": DigitalOceanNodeGroup(instance="g-8vcpu-32gb", min_nodes=1, max_nodes=1), - "user": DigitalOceanNodeGroup(instance="g-4vcpu-16gb", min_nodes=1, max_nodes=5), - "worker": DigitalOceanNodeGroup(instance="g-4vcpu-16gb", min_nodes=1, max_nodes=5), -} - - -class DigitalOceanProvider(schema.Base): - region: str - kubernetes_version: Optional[str] = None - # Digital Ocean image slugs are listed here https://slugs.do-api.dev/ - node_groups: Dict[str, DigitalOceanNodeGroup] = DEFAULT_DO_NODE_GROUPS - tags: Optional[List[str]] = [] - - @model_validator(mode="before") - @classmethod - def _check_input(cls, data: Any) -> Any: - digital_ocean.check_credentials() - - # check if region is valid - available_regions = set(_["slug"] for _ in digital_ocean.regions()) - if data["region"] not in available_regions: - raise ValueError( - f"Digital Ocean region={data['region']} is not one of {available_regions}" - ) - - # check if kubernetes version is valid - available_kubernetes_versions = digital_ocean.kubernetes_versions() - if len(available_kubernetes_versions) == 0: - raise ValueError( - "Request to Digital Ocean for available Kubernetes versions failed." - ) - if data["kubernetes_version"] is None: - data["kubernetes_version"] = available_kubernetes_versions[-1] - elif data["kubernetes_version"] not in available_kubernetes_versions: - raise ValueError( - f"\nInvalid `kubernetes-version` provided: {data['kubernetes_version']}.\nPlease select from one of the following supported Kubernetes versions: {available_kubernetes_versions} or omit flag to use latest Kubernetes version available." - ) - - available_instances = {_["slug"] for _ in digital_ocean.instances()} - if "node_groups" in data: - for _, node_group in data["node_groups"].items(): - if node_group["instance"] not in available_instances: - raise ValueError( - f"Digital Ocean instance {node_group.instance} not one of available instance types={available_instances}" - ) - return data - - class GCPIPAllocationPolicy(schema.Base): cluster_secondary_range_name: str services_secondary_range_name: str @@ -644,7 +557,6 @@ class ExistingProvider(schema.Base): schema.ProviderEnum.gcp: GoogleCloudPlatformProvider, schema.ProviderEnum.aws: AmazonWebServicesProvider, schema.ProviderEnum.azure: AzureProvider, - schema.ProviderEnum.do: DigitalOceanProvider, } provider_enum_name_map: Dict[schema.ProviderEnum, str] = { @@ -653,7 +565,6 @@ class ExistingProvider(schema.Base): schema.ProviderEnum.gcp: "google_cloud_platform", schema.ProviderEnum.aws: "amazon_web_services", schema.ProviderEnum.azure: "azure", - schema.ProviderEnum.do: "digital_ocean", } provider_name_abbreviation_map: Dict[str, str] = { @@ -664,7 +575,6 @@ class ExistingProvider(schema.Base): schema.ProviderEnum.gcp: node_groups_to_dict(DEFAULT_GCP_NODE_GROUPS), schema.ProviderEnum.aws: node_groups_to_dict(DEFAULT_AWS_NODE_GROUPS), schema.ProviderEnum.azure: node_groups_to_dict(DEFAULT_AZURE_NODE_GROUPS), - schema.ProviderEnum.do: node_groups_to_dict(DEFAULT_DO_NODE_GROUPS), } @@ -674,7 +584,6 @@ class InputSchema(schema.Base): google_cloud_platform: Optional[GoogleCloudPlatformProvider] = None amazon_web_services: Optional[AmazonWebServicesProvider] = None azure: Optional[AzureProvider] = None - digital_ocean: Optional[DigitalOceanProvider] = None @model_validator(mode="before") @classmethod @@ -692,7 +601,7 @@ def check_provider(cls, data: Any) -> Any: # so we need to check for it explicitly here, and set the `pre` to True # TODO: this is a workaround, check if there is a better way to do this in Pydantic v2 raise ValueError( - f"'{provider}' is not a valid enumeration member; permitted: local, existing, do, aws, gcp, azure" + f"'{provider}' is not a valid enumeration member; permitted: local, existing, aws, gcp, azure" ) else: set_providers = [ @@ -799,10 +708,6 @@ def tf_objects(self) -> List[Dict]: ), NebariTerraformState(self.name, self.config), ] - elif self.config.provider == schema.ProviderEnum.do: - return [ - NebariTerraformState(self.name, self.config), - ] elif self.config.provider == schema.ProviderEnum.azure: return [ NebariTerraformState(self.name, self.config), @@ -826,15 +731,6 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): return ExistingInputVars( kube_context=self.config.existing.kube_context ).model_dump() - elif self.config.provider == schema.ProviderEnum.do: - return DigitalOceanInputVars( - name=self.config.escaped_project_name, - environment=self.config.namespace, - region=self.config.digital_ocean.region, - tags=self.config.digital_ocean.tags, - kubernetes_version=self.config.digital_ocean.kubernetes_version, - node_groups=self.config.digital_ocean.node_groups, - ).model_dump() elif self.config.provider == schema.ProviderEnum.gcp: return GCPInputVars( name=self.config.escaped_project_name, diff --git a/src/_nebari/stages/infrastructure/template/do/main.tf b/src/_nebari/stages/infrastructure/template/do/main.tf deleted file mode 100644 index 30a7aa2966..0000000000 --- a/src/_nebari/stages/infrastructure/template/do/main.tf +++ /dev/null @@ -1,25 +0,0 @@ -module "kubernetes" { - source = "./modules/kubernetes" - - name = "${var.name}-${var.environment}" - - region = var.region - kubernetes_version = var.kubernetes_version - - node_groups = [ - for name, config in var.node_groups : { - name = name - auto_scale = true - size = config.instance - min_nodes = config.min_nodes - max_nodes = config.max_nodes - } - ] - - tags = concat([ - "provision::terraform", - "project::${var.name}", - "namespace::${var.environment}", - "owner::nebari", - ], var.tags) -} diff --git a/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/locals.tf b/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/locals.tf deleted file mode 100644 index d88a874c5c..0000000000 --- a/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/locals.tf +++ /dev/null @@ -1,5 +0,0 @@ -locals { - master_node_group = var.node_groups[0] - - additional_node_groups = slice(var.node_groups, 1, length(var.node_groups)) -} diff --git a/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/main.tf b/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/main.tf deleted file mode 100644 index 0d1ce76a35..0000000000 --- a/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/main.tf +++ /dev/null @@ -1,35 +0,0 @@ -resource "digitalocean_kubernetes_cluster" "main" { - name = var.name - region = var.region - - # Grab the latest from `doctl kubernetes options versions` - version = var.kubernetes_version - - node_pool { - name = local.master_node_group.name - # List available regions `doctl kubernetes options sizes` - size = lookup(local.master_node_group, "size", "s-1vcpu-2gb") - node_count = lookup(local.master_node_group, "node_count", 1) - } - - tags = var.tags -} - -resource "digitalocean_kubernetes_node_pool" "main" { - count = length(local.additional_node_groups) - - cluster_id = digitalocean_kubernetes_cluster.main.id - - name = local.additional_node_groups[count.index].name - size = lookup(local.additional_node_groups[count.index], "size", "s-1vcpu-2gb") - - auto_scale = lookup(local.additional_node_groups[count.index], "auto_scale", true) - min_nodes = lookup(local.additional_node_groups[count.index], "min_nodes", 1) - max_nodes = lookup(local.additional_node_groups[count.index], "max_nodes", 1) - - labels = { - "nebari.dev/node_group" : local.additional_node_groups[count.index].name - } - - tags = var.tags -} diff --git a/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/outputs.tf b/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/outputs.tf deleted file mode 100644 index e2e1c2c6be..0000000000 --- a/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/outputs.tf +++ /dev/null @@ -1,16 +0,0 @@ -output "credentials" { - description = "Credentials needs to connect to kubernetes instance" - value = { - endpoint = digitalocean_kubernetes_cluster.main.endpoint - token = digitalocean_kubernetes_cluster.main.kube_config[0].token - cluster_ca_certificate = base64decode( - digitalocean_kubernetes_cluster.main.kube_config[0].cluster_ca_certificate - ) - } -} - - -output "kubeconfig" { - description = "Kubeconfig for connecting to kubernetes cluster" - value = digitalocean_kubernetes_cluster.main.kube_config.0.raw_config -} diff --git a/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/variables.tf b/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/variables.tf deleted file mode 100644 index 67843a7820..0000000000 --- a/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/variables.tf +++ /dev/null @@ -1,29 +0,0 @@ -variable "name" { - description = "Prefix name to assign to digital ocean kubernetes cluster" - type = string -} - -variable "tags" { - description = "Additional tags to apply to each kubernetes resource" - type = set(string) - default = [] -} - -# `doctl kubernetes options regions` -variable "region" { - description = "Region to deploy digital ocean kubernetes resource" - type = string - default = "nyc1" -} - -# `doctl kubernetes options versions` -variable "kubernetes_version" { - description = "Version of digital ocean kubernetes resource" - type = string - default = "1.18.8-do.0" -} - -variable "node_groups" { - description = "List of node groups to include in digital ocean kubernetes cluster" - type = list(map(any)) -} diff --git a/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/versions.tf b/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/versions.tf deleted file mode 100644 index b320a102dd..0000000000 --- a/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/versions.tf +++ /dev/null @@ -1,9 +0,0 @@ -terraform { - required_providers { - digitalocean = { - source = "digitalocean/digitalocean" - version = "2.29.0" - } - } - required_version = ">= 1.0" -} diff --git a/src/_nebari/stages/infrastructure/template/do/modules/registry/main.tf b/src/_nebari/stages/infrastructure/template/do/modules/registry/main.tf deleted file mode 100644 index 14e6896030..0000000000 --- a/src/_nebari/stages/infrastructure/template/do/modules/registry/main.tf +++ /dev/null @@ -1,4 +0,0 @@ -resource "digitalocean_container_registry" "registry" { - name = var.name - subscription_tier_slug = "starter" -} diff --git a/src/_nebari/stages/infrastructure/template/do/modules/registry/variable.tf b/src/_nebari/stages/infrastructure/template/do/modules/registry/variable.tf deleted file mode 100644 index fce96bef08..0000000000 --- a/src/_nebari/stages/infrastructure/template/do/modules/registry/variable.tf +++ /dev/null @@ -1,4 +0,0 @@ -variable "name" { - description = "Prefix name to git container registry" - type = string -} diff --git a/src/_nebari/stages/infrastructure/template/do/modules/registry/versions.tf b/src/_nebari/stages/infrastructure/template/do/modules/registry/versions.tf deleted file mode 100644 index b320a102dd..0000000000 --- a/src/_nebari/stages/infrastructure/template/do/modules/registry/versions.tf +++ /dev/null @@ -1,9 +0,0 @@ -terraform { - required_providers { - digitalocean = { - source = "digitalocean/digitalocean" - version = "2.29.0" - } - } - required_version = ">= 1.0" -} diff --git a/src/_nebari/stages/infrastructure/template/do/outputs.tf b/src/_nebari/stages/infrastructure/template/do/outputs.tf deleted file mode 100644 index 53aae17634..0000000000 --- a/src/_nebari/stages/infrastructure/template/do/outputs.tf +++ /dev/null @@ -1,21 +0,0 @@ -output "kubernetes_credentials" { - description = "Parameters needed to connect to kubernetes cluster" - sensitive = true - value = { - host = module.kubernetes.credentials.endpoint - cluster_ca_certificate = module.kubernetes.credentials.cluster_ca_certificate - token = module.kubernetes.credentials.token - } -} - -resource "local_file" "kubeconfig" { - count = var.kubeconfig_filename != null ? 1 : 0 - - content = module.kubernetes.kubeconfig - filename = var.kubeconfig_filename -} - -output "kubeconfig_filename" { - description = "filename for nebari kubeconfig" - value = var.kubeconfig_filename -} diff --git a/src/_nebari/stages/infrastructure/template/do/providers.tf b/src/_nebari/stages/infrastructure/template/do/providers.tf deleted file mode 100644 index a877aca363..0000000000 --- a/src/_nebari/stages/infrastructure/template/do/providers.tf +++ /dev/null @@ -1,3 +0,0 @@ -provider "digitalocean" { - -} diff --git a/src/_nebari/stages/infrastructure/template/do/variables.tf b/src/_nebari/stages/infrastructure/template/do/variables.tf deleted file mode 100644 index b31a1ab039..0000000000 --- a/src/_nebari/stages/infrastructure/template/do/variables.tf +++ /dev/null @@ -1,40 +0,0 @@ -variable "name" { - description = "Prefix name to assign to nebari resources" - type = string -} - -variable "environment" { - description = "Environment to create Kubernetes resources" - type = string -} - -variable "region" { - description = "DigitalOcean region" - type = string -} - -variable "tags" { - description = "DigitalOcean tags to assign to resources" - type = list(string) - default = [] -} - -variable "kubernetes_version" { - description = "DigitalOcean kubernetes version" - type = string -} - -variable "node_groups" { - description = "DigitalOcean node groups" - type = map(object({ - instance = string - min_nodes = number - max_nodes = number - })) -} - -variable "kubeconfig_filename" { - description = "Kubernetes kubeconfig written to filesystem" - type = string - default = null -} diff --git a/src/_nebari/stages/infrastructure/template/do/versions.tf b/src/_nebari/stages/infrastructure/template/do/versions.tf deleted file mode 100644 index b320a102dd..0000000000 --- a/src/_nebari/stages/infrastructure/template/do/versions.tf +++ /dev/null @@ -1,9 +0,0 @@ -terraform { - required_providers { - digitalocean = { - source = "digitalocean/digitalocean" - version = "2.29.0" - } - } - required_version = ">= 1.0" -} diff --git a/src/_nebari/stages/kubernetes_ingress/__init__.py b/src/_nebari/stages/kubernetes_ingress/__init__.py index ea5f8fa335..df70e12b1e 100644 --- a/src/_nebari/stages/kubernetes_ingress/__init__.py +++ b/src/_nebari/stages/kubernetes_ingress/__init__.py @@ -43,7 +43,6 @@ def provision_ingress_dns( record_name = ".".join(record_name) zone_name = ".".join(zone_name) if config.provider in { - schema.ProviderEnum.do, schema.ProviderEnum.gcp, schema.ProviderEnum.azure, }: diff --git a/src/_nebari/stages/terraform_state/__init__.py b/src/_nebari/stages/terraform_state/__init__.py index e0f643ed3d..690e556b0b 100644 --- a/src/_nebari/stages/terraform_state/__init__.py +++ b/src/_nebari/stages/terraform_state/__init__.py @@ -22,12 +22,6 @@ from nebari.hookspecs import NebariStage, hookimpl -class DigitalOceanInputVars(schema.Base): - name: str - namespace: str - region: str - - class GCPInputVars(schema.Base): name: str namespace: str @@ -117,14 +111,7 @@ def stage_prefix(self): return pathlib.Path("stages") / self.name / self.config.provider.value def state_imports(self) -> List[Tuple[str, str]]: - if self.config.provider == schema.ProviderEnum.do: - return [ - ( - "module.terraform-state.module.spaces.digitalocean_spaces_bucket.main", - f"{self.config.digital_ocean.region},{self.config.project_name}-{self.config.namespace}-terraform-state", - ) - ] - elif self.config.provider == schema.ProviderEnum.gcp: + if self.config.provider == schema.ProviderEnum.gcp: return [ ( "module.terraform-state.module.gcs.google_storage_bucket.static-site", @@ -191,13 +178,7 @@ def tf_objects(self) -> List[Dict]: return resources def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): - if self.config.provider == schema.ProviderEnum.do: - return DigitalOceanInputVars( - name=self.config.project_name, - namespace=self.config.namespace, - region=self.config.digital_ocean.region, - ).model_dump() - elif self.config.provider == schema.ProviderEnum.gcp: + if self.config.provider == schema.ProviderEnum.gcp: return GCPInputVars( name=self.config.project_name, namespace=self.config.namespace, @@ -240,15 +221,6 @@ def deploy( # terraform show command, inside check_immutable_fields with super().deploy(stage_outputs, disable_prompt, terraform_init=False): env_mapping = {} - # DigitalOcean terraform remote state using Spaces Bucket - # assumes aws credentials thus we set them to match spaces credentials - if self.config.provider == schema.ProviderEnum.do: - env_mapping.update( - { - "AWS_ACCESS_KEY_ID": os.environ["SPACES_ACCESS_KEY_ID"], - "AWS_SECRET_ACCESS_KEY": os.environ["SPACES_SECRET_ACCESS_KEY"], - } - ) with modified_environ(**env_mapping): yield @@ -310,15 +282,6 @@ def destroy( ): with super().destroy(stage_outputs, status): env_mapping = {} - # DigitalOcean terraform remote state using Spaces Bucket - # assumes aws credentials thus we set them to match spaces credentials - if self.config.provider == schema.ProviderEnum.do: - env_mapping.update( - { - "AWS_ACCESS_KEY_ID": os.environ["SPACES_ACCESS_KEY_ID"], - "AWS_SECRET_ACCESS_KEY": os.environ["SPACES_SECRET_ACCESS_KEY"], - } - ) with modified_environ(**env_mapping): yield diff --git a/src/_nebari/stages/terraform_state/template/do/main.tf b/src/_nebari/stages/terraform_state/template/do/main.tf deleted file mode 100644 index a6db74f74d..0000000000 --- a/src/_nebari/stages/terraform_state/template/do/main.tf +++ /dev/null @@ -1,35 +0,0 @@ -variable "name" { - description = "Prefix name to assign to Nebari resources" - type = string -} - -variable "namespace" { - description = "Namespace to create Kubernetes resources" - type = string -} - -variable "region" { - description = "Region for Digital Ocean deployment" - type = string -} - -provider "digitalocean" { - -} - -module "terraform-state" { - source = "./modules/terraform-state" - - name = "${var.name}-${var.namespace}" - region = var.region -} - -terraform { - required_providers { - digitalocean = { - source = "digitalocean/digitalocean" - version = "2.29.0" - } - } - required_version = ">= 1.0" -} diff --git a/src/_nebari/stages/terraform_state/template/do/modules/spaces/main.tf b/src/_nebari/stages/terraform_state/template/do/modules/spaces/main.tf deleted file mode 100644 index fc2d34c604..0000000000 --- a/src/_nebari/stages/terraform_state/template/do/modules/spaces/main.tf +++ /dev/null @@ -1,12 +0,0 @@ -resource "digitalocean_spaces_bucket" "main" { - name = var.name - region = var.region - - force_destroy = var.force_destroy - - acl = (var.public ? "public-read" : "private") - - versioning { - enabled = false - } -} diff --git a/src/_nebari/stages/terraform_state/template/do/modules/spaces/variables.tf b/src/_nebari/stages/terraform_state/template/do/modules/spaces/variables.tf deleted file mode 100644 index db24a3dce5..0000000000 --- a/src/_nebari/stages/terraform_state/template/do/modules/spaces/variables.tf +++ /dev/null @@ -1,21 +0,0 @@ -variable "name" { - description = "Prefix name for bucket resource" - type = string -} - -variable "region" { - description = "Region for Digital Ocean bucket" - type = string -} - -variable "force_destroy" { - description = "force_destroy all bucket contents when bucket is deleted" - type = bool - default = false -} - -variable "public" { - description = "Digital Ocean s3 bucket is exposed publicly" - type = bool - default = false -} diff --git a/src/_nebari/stages/terraform_state/template/do/modules/spaces/versions.tf b/src/_nebari/stages/terraform_state/template/do/modules/spaces/versions.tf deleted file mode 100644 index b320a102dd..0000000000 --- a/src/_nebari/stages/terraform_state/template/do/modules/spaces/versions.tf +++ /dev/null @@ -1,9 +0,0 @@ -terraform { - required_providers { - digitalocean = { - source = "digitalocean/digitalocean" - version = "2.29.0" - } - } - required_version = ">= 1.0" -} diff --git a/src/_nebari/stages/terraform_state/template/do/modules/terraform-state/main.tf b/src/_nebari/stages/terraform_state/template/do/modules/terraform-state/main.tf deleted file mode 100644 index e3445f362d..0000000000 --- a/src/_nebari/stages/terraform_state/template/do/modules/terraform-state/main.tf +++ /dev/null @@ -1,9 +0,0 @@ -module "spaces" { - source = "../spaces" - - name = "${var.name}-terraform-state" - region = var.region - public = false - - force_destroy = true -} diff --git a/src/_nebari/stages/terraform_state/template/do/modules/terraform-state/variables.tf b/src/_nebari/stages/terraform_state/template/do/modules/terraform-state/variables.tf deleted file mode 100644 index 8010647d39..0000000000 --- a/src/_nebari/stages/terraform_state/template/do/modules/terraform-state/variables.tf +++ /dev/null @@ -1,9 +0,0 @@ -variable "name" { - description = "Prefix name for terraform state" - type = string -} - -variable "region" { - description = "Region for terraform state" - type = string -} diff --git a/src/_nebari/stages/terraform_state/template/do/modules/terraform-state/versions.tf b/src/_nebari/stages/terraform_state/template/do/modules/terraform-state/versions.tf deleted file mode 100644 index b320a102dd..0000000000 --- a/src/_nebari/stages/terraform_state/template/do/modules/terraform-state/versions.tf +++ /dev/null @@ -1,9 +0,0 @@ -terraform { - required_providers { - digitalocean = { - source = "digitalocean/digitalocean" - version = "2.29.0" - } - } - required_version = ">= 1.0" -} diff --git a/src/_nebari/stages/tf_objects.py b/src/_nebari/stages/tf_objects.py index 04c6d434aa..c593eee4b0 100644 --- a/src/_nebari/stages/tf_objects.py +++ b/src/_nebari/stages/tf_objects.py @@ -69,16 +69,6 @@ def NebariTerraformState(directory: str, nebari_config: schema.Main): bucket=f"{nebari_config.escaped_project_name}-{nebari_config.namespace}-terraform-state", prefix=f"terraform/{nebari_config.escaped_project_name}/{directory}", ) - elif nebari_config.provider == "do": - return TerraformBackend( - "s3", - endpoint=f"{nebari_config.digital_ocean.region}.digitaloceanspaces.com", - region="us-west-1", # fake aws region required by terraform - bucket=f"{nebari_config.escaped_project_name}-{nebari_config.namespace}-terraform-state", - key=f"terraform/{nebari_config.escaped_project_name}-{nebari_config.namespace}/{directory}.tfstate", - skip_credentials_validation=True, - skip_metadata_api_check=True, - ) elif nebari_config.provider == "azure": return TerraformBackend( "azurerm", diff --git a/src/_nebari/subcommands/init.py b/src/_nebari/subcommands/init.py index 0b5f36bc7e..e794841ea7 100644 --- a/src/_nebari/subcommands/init.py +++ b/src/_nebari/subcommands/init.py @@ -13,16 +13,10 @@ from _nebari.constants import ( AWS_DEFAULT_REGION, AZURE_DEFAULT_REGION, - DO_DEFAULT_REGION, GCP_DEFAULT_REGION, ) from _nebari.initialize import render_config -from _nebari.provider.cloud import ( - amazon_web_services, - azure_cloud, - digital_ocean, - google_cloud, -) +from _nebari.provider.cloud import amazon_web_services, azure_cloud, google_cloud from _nebari.stages.bootstrap import CiEnum from _nebari.stages.kubernetes_keycloak import AuthenticationEnum from _nebari.stages.terraform_state import TerraformStateEnum @@ -44,18 +38,13 @@ CREATE_GCP_CREDS = ( "https://cloud.google.com/iam/docs/creating-managing-service-accounts" ) -CREATE_DO_CREDS = ( - "https://docs.digitalocean.com/reference/api/create-personal-access-token" -) CREATE_AZURE_CREDS = "https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/guides/service_principal_client_secret#creating-a-service-principal-in-the-azure-portal" CREATE_AUTH0_CREDS = "https://auth0.com/docs/get-started/auth0-overview/create-applications/machine-to-machine-apps" CREATE_GITHUB_OAUTH_CREDS = "https://docs.github.com/en/developers/apps/building-oauth-apps/creating-an-oauth-app" AWS_REGIONS = "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html#concepts-regions" GCP_REGIONS = "https://cloud.google.com/compute/docs/regions-zones" AZURE_REGIONS = "https://azure.microsoft.com/en-us/explore/global-infrastructure/geographies/#overview" -DO_REGIONS = ( - "https://docs.digitalocean.com/products/platform/availability-matrix/#regions" -) + # links to Nebari docs DOCS_HOME = "https://nebari.dev/docs/" @@ -78,7 +67,6 @@ CLOUD_PROVIDER_FULL_NAME = { "Local": ProviderEnum.local.name, "Existing": ProviderEnum.existing.name, - "Digital Ocean": ProviderEnum.do.name, "Amazon Web Services": ProviderEnum.aws.name, "Google Cloud Platform": ProviderEnum.gcp.name, "Microsoft Azure": ProviderEnum.azure.name, @@ -120,8 +108,6 @@ def get_region_docs(cloud_provider: str): return GCP_REGIONS elif cloud_provider == ProviderEnum.azure.value.lower(): return AZURE_REGIONS - elif cloud_provider == ProviderEnum.do.value.lower(): - return DO_REGIONS def handle_init(inputs: InitInputs, config_schema: BaseModel): @@ -312,36 +298,6 @@ def check_cloud_provider_creds(cloud_provider: ProviderEnum, disable_prompt: boo hide_input=True, ) - # DO - elif cloud_provider == ProviderEnum.do.value.lower() and ( - not os.environ.get("DIGITALOCEAN_TOKEN") - or not os.environ.get("SPACES_ACCESS_KEY_ID") - or not os.environ.get("SPACES_SECRET_ACCESS_KEY") - ): - rich.print( - MISSING_CREDS_TEMPLATE.format( - provider="Digital Ocean", link_to_docs=CREATE_DO_CREDS - ) - ) - - os.environ["DIGITALOCEAN_TOKEN"] = typer.prompt( - "Paste your DIGITALOCEAN_TOKEN", - hide_input=True, - ) - os.environ["SPACES_ACCESS_KEY_ID"] = typer.prompt( - "Paste your SPACES_ACCESS_KEY_ID", - hide_input=True, - ) - os.environ["SPACES_SECRET_ACCESS_KEY"] = typer.prompt( - "Paste your SPACES_SECRET_ACCESS_KEY", - hide_input=True, - ) - # Set spaces credentials. Spaces are API compatible with s3 - # Setting spaces credentials to AWS credentials allows us to - # reuse s3 code - os.environ["AWS_ACCESS_KEY_ID"] = os.getenv("SPACES_ACCESS_KEY_ID") - os.environ["AWS_SECRET_ACCESS_KEY"] = os.getenv("SPACES_SECRET_ACCESS_KEY") - # AZURE elif cloud_provider == ProviderEnum.azure.value.lower() and ( not os.environ.get("ARM_CLIENT_ID") @@ -421,20 +377,6 @@ def check_cloud_provider_kubernetes_version( raise ValueError( f"Invalid Kubernetes version `{kubernetes_version}`. Please refer to the GCP docs for a list of valid versions: {versions}" ) - elif cloud_provider == ProviderEnum.do.value.lower(): - versions = digital_ocean.kubernetes_versions() - - if not kubernetes_version or kubernetes_version == LATEST: - kubernetes_version = get_latest_kubernetes_version(versions) - rich.print( - DEFAULT_KUBERNETES_VERSION_MSG.format( - kubernetes_version=kubernetes_version - ) - ) - if kubernetes_version not in versions: - raise ValueError( - f"Invalid Kubernetes version `{kubernetes_version}`. Please refer to the DO docs for a list of valid versions: {versions}" - ) return kubernetes_version @@ -464,15 +406,7 @@ def check_cloud_provider_region(region: str, cloud_provider: str) -> str: raise ValueError( f"Invalid region `{region}`. Please refer to the GCP docs for a list of valid regions: {GCP_REGIONS}" ) - elif cloud_provider == ProviderEnum.do.value.lower(): - if not region: - region = DO_DEFAULT_REGION - rich.print(DEFAULT_REGION_MSG.format(region=region)) - if region not in set(_["slug"] for _ in digital_ocean.regions()): - raise ValueError( - f"Invalid region `{region}`. Please refer to the DO docs for a list of valid regions: {DO_REGIONS}" - ) return region @@ -598,10 +532,10 @@ def init( cloud_provider, disable_prompt ) - # Digital Ocean deprecation warning -- Nebari 2024.7.1 - if inputs.cloud_provider == ProviderEnum.do.value.lower(): + # DigitalOcean is no longer supported + if inputs.cloud_provider == "do": rich.print( - ":warning: Digital Ocean support is being deprecated and support will be removed in the future. :warning:\n" + ":warning: DigitalOcean is no longer supported. You'll need to deploy to an existing k8s cluster if you plan to use Nebari on DigitalOcean :warning:\n" ) inputs.region = check_cloud_provider_region(region, inputs.cloud_provider) diff --git a/src/_nebari/upgrade.py b/src/_nebari/upgrade.py index 41f6bb2304..b29bec03c8 100644 --- a/src/_nebari/upgrade.py +++ b/src/_nebari/upgrade.py @@ -1198,7 +1198,7 @@ class Upgrade_2024_7_1(UpgradeStep): def _version_specific_upgrade( self, config, start_version, config_filename: Path, *args, **kwargs ): - if config.get("provider", "") == ProviderEnum.do.value: + if config.get("provider", "") == "do": rich.print("\n ⚠️ Deprecation Warning ⚠️") rich.print( "-> Digital Ocean support is currently being deprecated and will be removed in a future release.", @@ -1246,7 +1246,7 @@ def _version_specific_upgrade( ), ) rich.print("") - elif config.get("provider", "") == ProviderEnum.do.value: + elif config.get("provider", "") == "do": rich.print("\n ⚠️ Deprecation Warning ⚠️") rich.print( "-> Digital Ocean support is currently being deprecated and will be removed in a future release.", diff --git a/src/_nebari/utils.py b/src/_nebari/utils.py index 5f0877666a..f3d62f353d 100644 --- a/src/_nebari/utils.py +++ b/src/_nebari/utils.py @@ -286,11 +286,6 @@ def random_secure_string( return "".join(secrets.choice(chars) for i in range(length)) -def set_do_environment(): - os.environ["AWS_ACCESS_KEY_ID"] = os.environ["SPACES_ACCESS_KEY_ID"] - os.environ["AWS_SECRET_ACCESS_KEY"] = os.environ["SPACES_SECRET_ACCESS_KEY"] - - def set_docker_image_tag() -> str: """Set docker image tag for `jupyterlab`, `jupyterhub`, and `dask-worker`.""" return os.environ.get("NEBARI_IMAGE_TAG", constants.DEFAULT_NEBARI_IMAGE_TAG) @@ -348,7 +343,6 @@ def get_provider_config_block_name(provider): PROVIDER_CONFIG_NAMES = { "aws": "amazon_web_services", "azure": "azure", - "do": "digital_ocean", "gcp": "google_cloud_platform", } diff --git a/src/nebari/schema.py b/src/nebari/schema.py index 6a809842d7..b45af521be 100644 --- a/src/nebari/schema.py +++ b/src/nebari/schema.py @@ -35,7 +35,6 @@ class Base(pydantic.BaseModel): class ProviderEnum(str, enum.Enum): local = "local" existing = "existing" - do = "do" aws = "aws" gcp = "gcp" azure = "azure" diff --git a/tests/tests_integration/README.md b/tests/tests_integration/README.md index 759a70a594..79c037a390 100644 --- a/tests/tests_integration/README.md +++ b/tests/tests_integration/README.md @@ -3,26 +3,6 @@ These tests are designed to test things on Nebari deployed on cloud. - -## Digital Ocean - -```bash -DIGITALOCEAN_TOKEN -NEBARI_K8S_VERSION -SPACES_ACCESS_KEY_ID -SPACES_SECRET_ACCESS_KEY -CLOUDFLARE_TOKEN -``` - -Assuming you're in the `tests_integration` directory, run: - -```bash -pytest -vvv -s --cloud do -``` - -This will deploy on Nebari on Digital Ocean, run tests on the deployment -and then teardown the cluster. - ## Amazon Web Services ```bash diff --git a/tests/tests_integration/conftest.py b/tests/tests_integration/conftest.py index 4a64fd4274..b4b7a9af79 100644 --- a/tests/tests_integration/conftest.py +++ b/tests/tests_integration/conftest.py @@ -7,5 +7,5 @@ # argparse under-the-hood def pytest_addoption(parser): parser.addoption( - "--cloud", action="store", help="Cloud to deploy on: aws/do/gcp/azure" + "--cloud", action="store", help="Cloud to deploy on: aws/gcp/azure" ) diff --git a/tests/tests_integration/deployment_fixtures.py b/tests/tests_integration/deployment_fixtures.py index f5752d4c24..4ece916667 100644 --- a/tests/tests_integration/deployment_fixtures.py +++ b/tests/tests_integration/deployment_fixtures.py @@ -16,10 +16,8 @@ from _nebari.destroy import destroy_configuration from _nebari.provider.cloud.amazon_web_services import aws_cleanup from _nebari.provider.cloud.azure_cloud import azure_cleanup -from _nebari.provider.cloud.digital_ocean import digital_ocean_cleanup from _nebari.provider.cloud.google_cloud import gcp_cleanup from _nebari.render import render_template -from _nebari.utils import set_do_environment from nebari import schema from tests.common.config_mod_utils import add_gpu_config, add_preemptible_node_group from tests.tests_unit.utils import render_config_partial @@ -98,10 +96,7 @@ def _cleanup_nebari(config: schema.Main): cloud_provider = config.provider - if cloud_provider == schema.ProviderEnum.do.value.lower(): - logger.info("Forcefully clean up Digital Ocean resources") - digital_ocean_cleanup(config) - elif cloud_provider == schema.ProviderEnum.aws.lower(): + if cloud_provider == schema.ProviderEnum.aws.lower(): logger.info("Forcefully clean up AWS resources") aws_cleanup(config) elif cloud_provider == schema.ProviderEnum.gcp.lower(): @@ -119,9 +114,6 @@ def deploy(request): cloud = request.config.getoption("--cloud") # initialize - if cloud == "do": - set_do_environment() - deployment_dir = _get_or_create_deployment_directory(cloud) config = render_config_partial( project_name=deployment_dir.name, diff --git a/tests/tests_unit/cli_validate/do.happy.yaml b/tests/tests_unit/cli_validate/do.happy.yaml deleted file mode 100644 index 0ec2a7b528..0000000000 --- a/tests/tests_unit/cli_validate/do.happy.yaml +++ /dev/null @@ -1,25 +0,0 @@ -provider: do -namespace: dev -nebari_version: 2023.7.2.dev23+g53d17964.d20230824 -project_name: test -domain: test.example.com -ci_cd: - type: none -terraform_state: - type: local -security: - keycloak: - initial_root_password: m1s25vc4k43dxbk5jaxubxcq39n4vmjq - authentication: - type: password -theme: - jupyterhub: - hub_title: Nebari - test - welcome: Welcome! Learn about Nebari's features and configurations in the - documentation. If you have any questions or feedback, reach the team on - Nebari's support - forums. - hub_subtitle: Your open source data science platform, hosted on Azure -certificate: - type: lets-encrypt - acme_email: test@example.com diff --git a/tests/tests_unit/qhub-config-yaml-files-for-upgrade/qhub-config-aws-310-customauth.yaml b/tests/tests_unit/qhub-config-yaml-files-for-upgrade/qhub-config-aws-310-customauth.yaml index f68e36a002..28877bf1bc 100644 --- a/tests/tests_unit/qhub-config-yaml-files-for-upgrade/qhub-config-aws-310-customauth.yaml +++ b/tests/tests_unit/qhub-config-yaml-files-for-upgrade/qhub-config-aws-310-customauth.yaml @@ -32,7 +32,7 @@ storage: theme: jupyterhub: hub_title: Nebari - do-pytest - hub_subtitle: Autoscaling Compute Environment on Digital Ocean + hub_subtitle: Autoscaling Compute Environment on AWS welcome: Welcome to do.nebari.dev. It is maintained by Quansight staff. The hub's configuration is stored in a github repository based on https://github.com/Quansight/nebari/. diff --git a/tests/tests_unit/qhub-config-yaml-files-for-upgrade/qhub-config-aws-310.yaml b/tests/tests_unit/qhub-config-yaml-files-for-upgrade/qhub-config-aws-310.yaml index 69eca528a1..874de58b61 100644 --- a/tests/tests_unit/qhub-config-yaml-files-for-upgrade/qhub-config-aws-310.yaml +++ b/tests/tests_unit/qhub-config-yaml-files-for-upgrade/qhub-config-aws-310.yaml @@ -29,7 +29,7 @@ storage: theme: jupyterhub: hub_title: Nebari - do-pytest - hub_subtitle: Autoscaling Compute Environment on Digital Ocean + hub_subtitle: Autoscaling Compute Environment on AWS welcome: Welcome to do.nebari.dev. It is maintained by Quansight staff. The hub's configuration is stored in a github repository based on https://github.com/Quansight/nebari/. diff --git a/tests/tests_unit/test_cli_upgrade.py b/tests/tests_unit/test_cli_upgrade.py index aa79838bee..8cbac5d573 100644 --- a/tests/tests_unit/test_cli_upgrade.py +++ b/tests/tests_unit/test_cli_upgrade.py @@ -18,13 +18,11 @@ "aws": ["1.20"], "azure": ["1.20"], "gcp": ["1.20"], - "do": ["1.21.5-do.0"], } MOCK_CLOUD_REGIONS = { "aws": ["us-east-1"], "azure": [AZURE_DEFAULT_REGION], "gcp": ["us-central1"], - "do": ["nyc3"], } @@ -106,7 +104,7 @@ def test_cli_upgrade_2023_4_1_to_2023_5_1(monkeypatch: pytest.MonkeyPatch): @pytest.mark.parametrize( "provider", - ["aws", "azure", "do", "gcp"], + ["aws", "azure", "gcp"], ) def test_cli_upgrade_2023_5_1_to_2023_7_1( monkeypatch: pytest.MonkeyPatch, provider: str @@ -434,9 +432,6 @@ def test_cli_upgrade_to_2023_10_1_cdsdashboard_removed(monkeypatch: pytest.Monke ("azure", "compatible"), ("azure", "incompatible"), ("azure", "invalid"), - ("do", "compatible"), - ("do", "incompatible"), - ("do", "invalid"), ("gcp", "compatible"), ("gcp", "incompatible"), ("gcp", "invalid"), @@ -452,11 +447,6 @@ def test_cli_upgrade_to_2023_10_1_kubernetes_validations( kubernetes_configs = { "aws": {"incompatible": "1.19", "compatible": "1.26", "invalid": "badname"}, "azure": {"incompatible": "1.23", "compatible": "1.26", "invalid": "badname"}, - "do": { - "incompatible": "1.19.2-do.3", - "compatible": "1.26.0-do.custom", - "invalid": "badname", - }, "gcp": {"incompatible": "1.23", "compatible": "1.26", "invalid": "badname"}, } diff --git a/tests/tests_unit/test_links.py b/tests/tests_unit/test_links.py index a393391ce9..6e8529149e 100644 --- a/tests/tests_unit/test_links.py +++ b/tests/tests_unit/test_links.py @@ -1,10 +1,9 @@ import pytest import requests -from _nebari.constants import AWS_ENV_DOCS, AZURE_ENV_DOCS, DO_ENV_DOCS, GCP_ENV_DOCS +from _nebari.constants import AWS_ENV_DOCS, AZURE_ENV_DOCS, GCP_ENV_DOCS LINKS_TO_TEST = [ - DO_ENV_DOCS, AWS_ENV_DOCS, GCP_ENV_DOCS, AZURE_ENV_DOCS, diff --git a/tests/tests_unit/test_schema.py b/tests/tests_unit/test_schema.py index b88a32eec7..5c21aef8d6 100644 --- a/tests/tests_unit/test_schema.py +++ b/tests/tests_unit/test_schema.py @@ -62,12 +62,11 @@ def test_render_schema(nebari_config): "fake", pytest.raises( ValueError, - match="'fake' is not a valid enumeration member; permitted: local, existing, do, aws, gcp, azure", + match="'fake' is not a valid enumeration member; permitted: local, existing, aws, gcp, azure", ), ), ("aws", nullcontext()), ("gcp", nullcontext()), - ("do", nullcontext()), ("azure", nullcontext()), ("existing", nullcontext()), ("local", nullcontext()), diff --git a/tests/tests_unit/utils.py b/tests/tests_unit/utils.py index 82dffdcd3c..eddc66f52f 100644 --- a/tests/tests_unit/utils.py +++ b/tests/tests_unit/utils.py @@ -15,7 +15,6 @@ ) INIT_INPUTS = [ # project, namespace, domain, cloud_provider, ci_provider, auth_provider - ("pytestdo", "dev", "do.nebari.dev", "do", "github-actions", "github"), ("pytestaws", "dev", "aws.nebari.dev", "aws", "github-actions", "github"), ("pytestgcp", "dev", "gcp.nebari.dev", "gcp", "github-actions", "github"), ("pytestazure", "dev", "azure.nebari.dev", "azure", "github-actions", "github"), diff --git a/tests/utils.py b/tests/utils.py index 82dffdcd3c..eddc66f52f 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -15,7 +15,6 @@ ) INIT_INPUTS = [ # project, namespace, domain, cloud_provider, ci_provider, auth_provider - ("pytestdo", "dev", "do.nebari.dev", "do", "github-actions", "github"), ("pytestaws", "dev", "aws.nebari.dev", "aws", "github-actions", "github"), ("pytestgcp", "dev", "gcp.nebari.dev", "gcp", "github-actions", "github"), ("pytestazure", "dev", "azure.nebari.dev", "azure", "github-actions", "github"),