diff --git a/.github/workflows/e2e-suite.yml b/.github/workflows/e2e-suite.yml index c2cc93845..eae65d1ab 100644 --- a/.github/workflows/e2e-suite.yml +++ b/.github/workflows/e2e-suite.yml @@ -294,7 +294,7 @@ jobs: steps: - name: Notify Slack id: main_message - uses: slackapi/slack-github-action@v2.0.0 + uses: slackapi/slack-github-action@v2.1.0 with: method: chat.postMessage token: ${{ secrets.SLACK_BOT_TOKEN }} @@ -326,7 +326,7 @@ jobs: - name: Test summary thread if: success() - uses: slackapi/slack-github-action@v2.0.0 + uses: slackapi/slack-github-action@v2.1.0 with: method: chat.postMessage token: ${{ secrets.SLACK_BOT_TOKEN }} diff --git a/.github/workflows/nightly-smoke-tests.yml b/.github/workflows/nightly-smoke-tests.yml index 372de28c4..638b84f8d 100644 --- a/.github/workflows/nightly-smoke-tests.yml +++ b/.github/workflows/nightly-smoke-tests.yml @@ -46,7 +46,7 @@ jobs: - name: Notify Slack if: always() && github.repository == 'linode/linode-cli' # Run even if integration tests fail and only on main repository - uses: slackapi/slack-github-action@v2.0.0 + uses: slackapi/slack-github-action@v2.1.0 with: method: chat.postMessage token: ${{ secrets.SLACK_BOT_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3dae5132f..23880e0fd 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,7 +11,7 @@ jobs: steps: - name: Notify Slack - Main Message id: main_message - uses: slackapi/slack-github-action@v2.0.0 + uses: slackapi/slack-github-action@v2.1.0 with: method: chat.postMessage token: ${{ secrets.SLACK_BOT_TOKEN }} @@ -67,7 +67,7 @@ jobs: result-encoding: string - name: Build and push to DockerHub - uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # pin@v6.16.0 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # pin@v6.18.0 with: context: . file: Dockerfile diff --git a/linodecli/api_request.py b/linodecli/api_request.py index 3e1bc6361..1d66207ad 100644 --- a/linodecli/api_request.py +++ b/linodecli/api_request.py @@ -23,6 +23,7 @@ ExplicitNullValue, OpenAPIOperation, ) +from .baked.util import get_path_segments from .helpers import handle_url_overrides if TYPE_CHECKING: @@ -364,13 +365,15 @@ def _build_request_body( if v is None or k in param_names: continue + path_segments = get_path_segments(k) + cur = expanded_json - for part in k.split(".")[:-1]: + for part in path_segments[:-1]: if part not in cur: cur[part] = {} cur = cur[part] - cur[k.split(".")[-1]] = v + cur[path_segments[-1]] = v return json.dumps(_traverse_request_body(expanded_json)) diff --git a/linodecli/baked/operation.py b/linodecli/baked/operation.py index 6a427c0f2..cf93279a1 100644 --- a/linodecli/baked/operation.py +++ b/linodecli/baked/operation.py @@ -25,6 +25,7 @@ OpenAPIRequestArg, ) from linodecli.baked.response import OpenAPIResponse +from linodecli.baked.util import unescape_arg_segment from linodecli.exit_codes import ExitCodes from linodecli.output.output_handler import OutputHandler from linodecli.overrides import OUTPUT_OVERRIDES @@ -649,6 +650,9 @@ def _add_args_post_put( if arg.read_only: continue + arg_name_unescaped = unescape_arg_segment(arg.name) + arg_path_unescaped = unescape_arg_segment(arg.path) + arg_type = ( arg.item_type if arg.datatype == "array" else arg.datatype ) @@ -660,15 +664,17 @@ def _add_args_post_put( if arg.datatype == "array": # special handling for input arrays parser.add_argument( - "--" + arg.path, - metavar=arg.name, + "--" + arg_path_unescaped, + dest=arg.path, + metavar=arg_name_unescaped, action=ArrayAction, type=arg_type_handler, ) elif arg.is_child: parser.add_argument( - "--" + arg.path, - metavar=arg.name, + "--" + arg_path_unescaped, + dest=arg.path, + metavar=arg_name_unescaped, action=ListArgumentAction, type=arg_type_handler, ) @@ -677,7 +683,7 @@ def _add_args_post_put( if arg.datatype == "string" and arg.format == "password": # special case - password input parser.add_argument( - "--" + arg.path, + "--" + arg_path_unescaped, nargs="?", action=PasswordPromptAction, ) @@ -687,15 +693,17 @@ def _add_args_post_put( "ssl-key", ): parser.add_argument( - "--" + arg.path, - metavar=arg.name, + "--" + arg_path_unescaped, + dest=arg.path, + metavar=arg_name_unescaped, action=OptionalFromFileAction, type=arg_type_handler, ) else: parser.add_argument( - "--" + arg.path, - metavar=arg.name, + "--" + arg_path_unescaped, + dest=arg.path, + metavar=arg_name_unescaped, type=arg_type_handler, ) diff --git a/linodecli/baked/request.py b/linodecli/baked/request.py index e429895ac..1f3f0af4e 100644 --- a/linodecli/baked/request.py +++ b/linodecli/baked/request.py @@ -9,7 +9,10 @@ from linodecli.baked.parsing import simplify_description from linodecli.baked.response import OpenAPIResponse -from linodecli.baked.util import _aggregate_schema_properties +from linodecli.baked.util import ( + _aggregate_schema_properties, + escape_arg_segment, +) class OpenAPIRequestArg: @@ -152,6 +155,8 @@ def _parse_request_model( return args for k, v in properties.items(): + k = escape_arg_segment(k) + # Handle nested objects which aren't read-only and have properties if ( v.type == "object" diff --git a/linodecli/baked/util.py b/linodecli/baked/util.py index fbf9744ae..f229adb10 100644 --- a/linodecli/baked/util.py +++ b/linodecli/baked/util.py @@ -2,8 +2,9 @@ Provides various utility functions for use in baking logic. """ +import re from collections import defaultdict -from typing import Any, Dict, Set, Tuple +from typing import Any, Dict, List, Set, Tuple from openapi3.schemas import Schema @@ -51,3 +52,58 @@ def _handle_schema(_schema: Schema): # We only want to mark fields that are required by ALL subschema as required set(key for key, count in required.items() if count == schema_count), ) + + +ESCAPED_PATH_DELIMITER_PATTERN = re.compile(r"(? str: + """ + Escapes periods in a segment by prefixing them with a backslash. + + :param segment: The input string segment to escape. + :return: The escaped segment with periods replaced by '\\.'. + """ + return segment.replace(".", "\\.") + + +def unescape_arg_segment(segment: str) -> str: + """ + Reverses the escaping of periods in a segment, turning '\\.' back into '.'. + + :param segment: The input string segment to unescape. + :return: The unescaped segment with '\\.' replaced by '.'. + """ + return segment.replace("\\.", ".") + + +def get_path_segments(path: str) -> List[str]: + """ + Splits a path string into segments using a delimiter pattern, + and unescapes any escaped delimiters in the resulting segments. + + :param path: The full path string to split and unescape. + :return: A list of unescaped path segments. + """ + return [ + unescape_arg_segment(seg) + for seg in ESCAPED_PATH_DELIMITER_PATTERN.split(path) + ] + + +def get_terminal_keys(data: Dict[str, Any]) -> List[str]: + """ + Recursively retrieves all terminal (non-dict) keys from a nested dictionary. + + :param data: The input dictionary, possibly nested. + :return: A list of all terminal keys (keys whose values are not dictionaries). + """ + ret = [] + + for k, v in data.items(): + if isinstance(v, dict): + ret.extend(get_terminal_keys(v)) # recurse into nested dicts + else: + ret.append(k) # terminal key + + return ret diff --git a/linodecli/output/output_handler.py b/linodecli/output/output_handler.py index 2960887c2..cf1b0c332 100644 --- a/linodecli/output/output_handler.py +++ b/linodecli/output/output_handler.py @@ -16,6 +16,7 @@ from rich.table import Column, Table from linodecli.baked.response import OpenAPIResponse, OpenAPIResponseAttr +from linodecli.baked.util import get_terminal_keys class OutputMode(Enum): @@ -328,15 +329,30 @@ def _json_output(self, header, data, to): Prints data in JSON format """ # Special handling for JSON headers. - # We're only interested in the last part of the column name. - header = [v.split(".")[-1] for v in header] + # We're only interested in the last part of the column name, unless the last + # part is a dotted key. If the last part is a dotted key, include the entire dotted key. content = [] if len(data) and isinstance(data[0], dict): # we got delimited json in + parsed_header = [] + terminal_keys = get_terminal_keys(data[0]) + + for v in header: + parts = v.split(".") + if ( + len(parts) >= 2 + and ".".join([parts[-2], parts[-1]]) in terminal_keys + ): + parsed_header.append(".".join([parts[-2], parts[-1]])) + else: + parsed_header.append(parts[-1]) + # parse down to the value we display for row in data: - content.append(self._select_json_elements(header, row)) + content.append(self._select_json_elements(parsed_header, row)) else: # this is a list + header = [v.split(".")[-1] for v in header] + for row in data: content.append(dict(zip(header, row))) diff --git a/linodecli/overrides.py b/linodecli/overrides.py index 245190cd6..dff913877 100644 --- a/linodecli/overrides.py +++ b/linodecli/overrides.py @@ -47,6 +47,30 @@ def inner(func): return inner +@output_override("databases", "mysql-config-view", OutputMode.table) +def handle_databases_mysql_config_view( + operation, output_handler, json_data +) -> bool: + # pylint: disable=unused-argument + """ + Override the output of 'linode-cli databases mysql-config-view' + to properly display the mysql engine config. + """ + return databases_mysql_config_view_output(json_data) + + +@output_override("databases", "postgres-config-view", OutputMode.table) +def handle_databases_postgres_config_view( + operation, output_handler, json_data +) -> bool: + # pylint: disable=unused-argument + """ + Override the output of 'linode-cli databases postgres-config-view' + to properly display the postgresql engine config. + """ + return databases_postgres_config_view_output(json_data) + + @output_override("domains", "zone-file", OutputMode.delimited) def handle_domains_zone_file(operation, output_handler, json_data) -> bool: # pylint: disable=unused-argument @@ -291,3 +315,97 @@ def pg_view_output(json_data) -> bool: console.print(output) return False + + +def add_param_row(output, param_name, param_data): + """ + Construct and add a row to the output table for DB Config view overrides. + """ + param_type = str(param_data.get("type", "")) + example = str(param_data.get("example", "")) + minimum = str(param_data.get("minimum", "")) + maximum = str(param_data.get("maximum", "")) + min_length = str(param_data.get("minLength", "")) + max_length = str(param_data.get("maxLength", "")) + pattern = str(param_data.get("pattern", "")) + requires_restart = "YES" if param_data.get("requires_restart") else "NO" + description = param_data.get("description", "") + + output.add_row( + param_name, + param_type, + example, + minimum, + maximum, + min_length, + max_length, + pattern, + requires_restart, + Align(description, align="left"), + ) + + +def databases_mysql_config_view_output(json_data) -> bool: + """ + Parse and format the MySQL configuration output table. + """ + output = Table(header_style="bold", show_lines=True) + + output.add_column("Parameter", style="bold") + output.add_column("Type", justify="center") + output.add_column("Example", justify="center") + output.add_column("Min", justify="center") + output.add_column("Max", justify="center") + output.add_column("Min Length", justify="center") + output.add_column("Max Length", justify="center") + output.add_column("Pattern", justify="center") + output.add_column("Requires Restart", justify="center") + output.add_column("Description", style="dim") + + for field, params in json_data.items(): + if field in ["binlog_retention_period"]: + add_param_row(output, field, params) + else: + for key, val in params.items(): + param_name = f"{field}.{key}" + add_param_row(output, param_name, val) + + console = Console() + console.print(output) + + return False + + +def databases_postgres_config_view_output(json_data) -> bool: + """ + Parse and format the PostgreSQL configuration output table. + """ + output = Table(header_style="bold", show_lines=True) + + output.add_column("Parameter", style="bold") + output.add_column("Type", justify="center") + output.add_column("Example", justify="center") + output.add_column("Min", justify="center") + output.add_column("Max", justify="center") + output.add_column("Min Length", justify="center") + output.add_column("Max Length", justify="center") + output.add_column("Pattern", justify="center") + output.add_column("Requires Restart", justify="center") + output.add_column("Description", style="dim") + + for field, params in json_data.items(): + if field in [ + "pg_stat_monitor_enable", + "shared_buffers_percentage", + "work_mem", + ]: + add_param_row(output, field, params) + else: + for key, val in params.items(): + param_name = f"{field}.{key}" + add_param_row(output, param_name, val) + + console = Console() + console.print(output) + + return False diff --git a/tests/integration/database/test_database_engine_config.py b/tests/integration/database/test_database_engine_config.py new file mode 100644 index 000000000..3694e3726 --- /dev/null +++ b/tests/integration/database/test_database_engine_config.py @@ -0,0 +1,773 @@ +import json + +import pytest + +from tests.integration.helpers import ( + delete_target_id, + exec_test_command, + get_random_text, +) + +BASE_CMD = ["linode-cli", "databases"] + + +def get_expected_keys_pg_engine_config(): + # Basic checks for pg config keys + return [ + "autovacuum_analyze_scale_factor", + "autovacuum_analyze_threshold", + "autovacuum_max_workers", + "autovacuum_naptime", + "autovacuum_vacuum_cost_delay", + "autovacuum_vacuum_cost_limit", + "autovacuum_vacuum_scale_factor", + "autovacuum_vacuum_threshold", + "bgwriter_delay", + "bgwriter_flush_after", + "bgwriter_lru_maxpages", + "bgwriter_lru_multiplier", + "deadlock_timeout", + "default_toast_compression", + "idle_in_transaction_session_timeout", + "jit", + "max_files_per_process", + "max_locks_per_transaction", + "max_logical_replication_workers", + "max_parallel_workers", + "max_parallel_workers_per_gather", + "max_pred_locks_per_transaction", + "max_replication_slots", + "max_slot_wal_keep_size", + "max_stack_depth", + "max_standby_archive_delay", + "max_standby_streaming_delay", + "max_wal_senders", + "max_worker_processes", + "password_encryption", + "temp_file_limit", + "timezone", + "track_activity_query_size", + "track_commit_timestamp", + "track_functions", + "track_io_timing", + "wal_sender_timeout", + "wal_writer_delay", + ] + + +@pytest.fixture(scope="session") +def postgresql_db_engine_config(linode_cloud_firewall): + label = get_random_text(5) + "-postgresql-db" + database = json.loads( + exec_test_command( + BASE_CMD + + [ + "postgresql-create", + "--engine", + "postgresql/16", + "--label", + label, + "--region", + "us-east", + "--type", + "g6-standard-2", + "--allow_list", + "172.232.164.239", + "--cluster_size", + "3", + "--ssl_connection", + "true", + "--engine_config.pg.autovacuum_analyze_scale_factor", + "1", + "--engine_config.pg.autovacuum_analyze_threshold", + "2147483647", + "--engine_config.pg.autovacuum_max_workers", + "20", + "--engine_config.pg.autovacuum_naptime", + "86400", + "--engine_config.pg.autovacuum_vacuum_cost_delay", + "100", + "--engine_config.pg.autovacuum_vacuum_cost_limit", + "10000", + "--engine_config.pg.autovacuum_vacuum_scale_factor", + "1", + "--engine_config.pg.autovacuum_vacuum_threshold", + "2147483647", + "--engine_config.pg.bgwriter_delay", + "200", + "--engine_config.pg.bgwriter_flush_after", + "512", + "--engine_config.pg.bgwriter_lru_maxpages", + "100", + "--engine_config.pg.bgwriter_lru_multiplier", + "2.5", + "--engine_config.pg.deadlock_timeout", + "1000", + "--engine_config.pg.default_toast_compression", + "lz4", + "--engine_config.pg.idle_in_transaction_session_timeout", + "604800000", + "--engine_config.pg.jit", + "true", + "--engine_config.pg.max_files_per_process", + "1024", + "--engine_config.pg.max_locks_per_transaction", + "1024", + "--engine_config.pg.max_logical_replication_workers", + "64", + "--engine_config.pg.max_parallel_workers", + "96", + "--engine_config.pg.max_parallel_workers_per_gather", + "96", + "--engine_config.pg.max_pred_locks_per_transaction", + "5120", + "--engine_config.pg.max_replication_slots", + "64", + "--engine_config.pg.max_slot_wal_keep_size", + "1000000", + "--engine_config.pg.max_stack_depth", + "2097152", + "--engine_config.pg.max_standby_archive_delay", + "1", + "--engine_config.pg.max_standby_streaming_delay", + "10", + "--engine_config.pg.max_wal_senders", + "20", + "--engine_config.pg.max_worker_processes", + "96", + "--engine_config.pg.password_encryption", + "scram-sha-256", + "--engine_config.pg.pg_partman_bgw.interval", + "3600", + "--engine_config.pg.pg_partman_bgw.role", + "pg_partman_bgw", + "--engine_config.pg.pg_stat_monitor.pgsm_enable_query_plan", + "true", + "--engine_config.pg.pg_stat_monitor.pgsm_max_buckets", + "10", + "--engine_config.pg.pg_stat_statements.track", + "top", + "--engine_config.pg.temp_file_limit", + "5000000", + "--engine_config.pg.timezone", + "Europe/Helsinki", + "--engine_config.pg.track_activity_query_size", + "1024", + "--engine_config.pg.track_commit_timestamp", + "on", + "--engine_config.pg.track_functions", + "none", + "--engine_config.pg.track_io_timing", + "off", + "--engine_config.pg.wal_sender_timeout", + "60000", + "--engine_config.pg.wal_writer_delay", + "200", + "--engine_config.pg_stat_monitor_enable", + "true", + "--engine_config.pglookout.max_failover_replication_time_lag", + "10", + "--json", + ] + ) + .stdout.decode() + .rstrip() + )[0] + + yield database["id"] + + delete_target_id("databases", str(database["id"]), "postgresql-delete") + + +# POSTGRESQL +def test_postgresql_engine_config_view(): + pg_configs = json.loads( + exec_test_command( + BASE_CMD + + [ + "postgres-config-view", + "--json", + ] + ) + .stdout.decode() + .rstrip() + ) + + assert "pg" in pg_configs[0] + pg_config = pg_configs[0]["pg"] + + assert "autovacuum_analyze_scale_factor" in pg_config + assert pg_config["autovacuum_analyze_scale_factor"]["type"] == "number" + assert pg_config["autovacuum_analyze_scale_factor"]["minimum"] == 0.0 + assert pg_config["autovacuum_analyze_scale_factor"]["maximum"] == 1.0 + + assert "autovacuum_analyze_threshold" in pg_config + assert pg_config["autovacuum_analyze_threshold"]["type"] == "integer" + assert pg_config["autovacuum_analyze_threshold"]["minimum"] == 0 + assert pg_config["autovacuum_analyze_threshold"]["maximum"] == 2147483647 + + assert "autovacuum_max_workers" in pg_config + assert pg_config["autovacuum_max_workers"]["type"] == "integer" + assert pg_config["autovacuum_max_workers"]["minimum"] == 1 + assert pg_config["autovacuum_max_workers"]["maximum"] == 20 + assert pg_config["autovacuum_max_workers"]["requires_restart"] is True + + assert "bgwriter_delay" in pg_config + assert pg_config["bgwriter_delay"]["type"] == "integer" + assert pg_config["bgwriter_delay"]["minimum"] == 10 + assert pg_config["bgwriter_delay"]["maximum"] == 10000 + + assert "default_toast_compression" in pg_config + assert pg_config["default_toast_compression"]["type"] == "string" + assert "lz4" in pg_config["default_toast_compression"]["enum"] + assert "pglz" in pg_config["default_toast_compression"]["enum"] + + assert "jit" in pg_config + assert pg_config["jit"]["type"] == "boolean" + + assert "max_files_per_process" in pg_config + assert pg_config["max_files_per_process"]["type"] == "integer" + assert pg_config["max_files_per_process"]["requires_restart"] is True + + assert "max_logical_replication_workers" in pg_config + assert pg_config["max_logical_replication_workers"]["type"] == "integer" + assert ( + pg_config["max_logical_replication_workers"]["requires_restart"] is True + ) + + assert "password_encryption" in pg_config + assert pg_config["password_encryption"]["type"] == "string" + assert "md5" in pg_config["password_encryption"]["enum"] + assert "scram-sha-256" in pg_config["password_encryption"]["enum"] + + assert "pg_partman_bgw.interval" in pg_config + assert pg_config["pg_partman_bgw.interval"]["type"] == "integer" + assert pg_config["pg_partman_bgw.interval"]["minimum"] == 3600 + + assert "pg_partman_bgw.role" in pg_config + assert pg_config["pg_partman_bgw.role"]["type"] == "string" + assert pg_config["pg_partman_bgw.role"]["maxLength"] == 64 + + assert "pg_stat_monitor.pgsm_enable_query_plan" in pg_config + assert ( + pg_config["pg_stat_monitor.pgsm_enable_query_plan"]["type"] == "boolean" + ) + assert ( + pg_config["pg_stat_monitor.pgsm_enable_query_plan"]["requires_restart"] + is True + ) + + +def test_postgresql_list_with_engine_config(postgresql_db_engine_config): + postgres_db_id = postgresql_db_engine_config + + postgres_dbs = json.loads( + exec_test_command( + BASE_CMD + + [ + "postgresql-list", + "--json", + ] + ) + .stdout.decode() + .rstrip() + ) + + # Find the DB with the matching ID + matching_db = next( + (db for db in postgres_dbs if db["id"] == postgres_db_id), None + ) + assert ( + matching_db is not None + ), f"P DB with id {postgres_db_id} not found in mysql-list" + + engine_config = matching_db["engine_config"] + assert "pg" in engine_config + pg_config = engine_config["pg"] + + expected_pg_keys = get_expected_keys_pg_engine_config() + + for key in expected_pg_keys: + assert key in pg_config + + assert "pg_stat_monitor_enable" in engine_config + assert isinstance(engine_config["pg_stat_monitor_enable"], bool) + + pglookout = engine_config["pglookout"] + assert isinstance(pglookout, dict) + assert "max_failover_replication_time_lag" in pglookout + assert isinstance(pglookout["max_failover_replication_time_lag"], int) + + +def test_postgresql_db_engine_config_view(postgresql_db_engine_config): + postgres_db_id = postgresql_db_engine_config + postgres_db = json.loads( + exec_test_command( + BASE_CMD + + [ + "postgresql-view", + str(postgres_db_id), + "--json", + ] + ) + .stdout.decode() + .rstrip() + ) + + db = postgres_db[0] + + engine_config = db["engine_config"] + assert "pg" in engine_config + pg_config = engine_config["pg"] + + expected_pg_keys = get_expected_keys_pg_engine_config() + + for key in expected_pg_keys: + assert key in pg_config + + +def test_postgresql_db_engine_config_update(postgresql_db_engine_config): + postgres_db_id = postgresql_db_engine_config + updated_db = json.loads( + exec_test_command( + BASE_CMD + + [ + "postgresql-update", + str(postgres_db_id), + "--engine_config.pg.autovacuum_analyze_scale_factor", + "1", + "--engine_config.pg.autovacuum_analyze_threshold", + "2147483647", + "--engine_config.pg.autovacuum_max_workers", + "15", + "--engine_config.pg.autovacuum_naptime", + "86400", + "--engine_config.pg.autovacuum_vacuum_cost_delay", + "100", + "--engine_config.pg.autovacuum_vacuum_cost_limit", + "10000", + "--engine_config.pg.autovacuum_vacuum_scale_factor", + "1", + "--engine_config.pg.autovacuum_vacuum_threshold", + "2147483647", + "--engine_config.pg.bgwriter_delay", + "200", + "--engine_config.pg.bgwriter_flush_after", + "512", + "--engine_config.pg.bgwriter_lru_maxpages", + "100", + "--engine_config.pg.bgwriter_lru_multiplier", + "3.5", + "--engine_config.pg.deadlock_timeout", + "1000", + "--engine_config.pg.default_toast_compression", + "lz4", + "--engine_config.pg.idle_in_transaction_session_timeout", + "604800000", + "--engine_config.pg.jit", + "true", + "--engine_config.pg.max_files_per_process", + "1024", + "--engine_config.pg.max_locks_per_transaction", + "1024", + "--engine_config.pg.max_logical_replication_workers", + "64", + "--engine_config.pg.max_parallel_workers", + "96", + "--engine_config.pg.max_parallel_workers_per_gather", + "96", + "--engine_config.pg.max_pred_locks_per_transaction", + "5120", + "--engine_config.pg.max_replication_slots", + "64", + "--engine_config.pg.max_slot_wal_keep_size", + "1000000", + "--engine_config.pg.max_stack_depth", + "2097152", + "--engine_config.pg.max_standby_archive_delay", + "2", + "--engine_config.pg.max_standby_streaming_delay", + "10", + "--engine_config.pg.max_wal_senders", + "20", + "--engine_config.pg.max_worker_processes", + "96", + "--engine_config.pg.password_encryption", + "scram-sha-256", + "--engine_config.pg.pg_partman_bgw.interval", + "7200", + "--engine_config.pg.pg_partman_bgw.role", + "pg_partman_bgw", + "--engine_config.pg.pg_stat_monitor.pgsm_enable_query_plan", + "true", + "--engine_config.pg.pg_stat_monitor.pgsm_max_buckets", + "10", + "--engine_config.pg.pg_stat_statements.track", + "top", + "--engine_config.pg.temp_file_limit", + "5000000", + "--engine_config.pg.timezone", + "Europe/Helsinki", + "--engine_config.pg.track_activity_query_size", + "1024", + "--engine_config.pg.track_commit_timestamp", + "on", + "--engine_config.pg.track_functions", + "none", + "--engine_config.pg.track_io_timing", + "off", + "--engine_config.pg.wal_sender_timeout", + "60000", + "--engine_config.pg.wal_writer_delay", + "200", + "--engine_config.pg_stat_monitor_enable", + "true", + "--engine_config.pglookout.max_failover_replication_time_lag", + "10", + "--json", + ] + ) + .stdout.decode() + .rstrip() + ) + + db = updated_db[0] + + engine_config = db["engine_config"] + assert "pg" in engine_config + pg_config = engine_config["pg"] + + expected_pg_keys = get_expected_keys_pg_engine_config() + + for key in expected_pg_keys: + assert key in pg_config + + assert pg_config["autovacuum_max_workers"] == 15 + assert pg_config["bgwriter_lru_multiplier"] == 3.5 + assert pg_config["pg_partman_bgw.interval"] == 7200 + + +@pytest.fixture(scope="session") +def mysql_db_engine_config(linode_cloud_firewall): + label = get_random_text(5) + "-mysql-db" + database = json.loads( + exec_test_command( + BASE_CMD + + [ + "mysql-create", + "--engine", + "mysql/8", + "--label", + label, + "--region", + "us-east", + "--type", + "g6-nanode-1", + "--ssl_connection", + "true", + "--engine_config.binlog_retention_period", + "86400", + "--engine_config.mysql.connect_timeout", + "10", + "--engine_config.mysql.default_time_zone", + "SYSTEM", + "--engine_config.mysql.group_concat_max_len", + "1024", + "--engine_config.mysql.information_schema_stats_expiry", + "900", + "--engine_config.mysql.innodb_change_buffer_max_size", + "25", + "--engine_config.mysql.innodb_flush_neighbors", + "1", + "--engine_config.mysql.innodb_ft_min_token_size", + "3", + "--engine_config.mysql.innodb_ft_server_stopword_table", + "mydb/stopword", + "--engine_config.mysql.innodb_lock_wait_timeout", + "50", + "--engine_config.mysql.innodb_log_buffer_size", + "16777216", + "--engine_config.mysql.innodb_online_alter_log_max_size", + "134217728", + "--engine_config.mysql.innodb_read_io_threads", + "4", + "--engine_config.mysql.innodb_rollback_on_timeout", + "true", + "--engine_config.mysql.innodb_thread_concurrency", + "8", + "--engine_config.mysql.innodb_write_io_threads", + "4", + "--engine_config.mysql.interactive_timeout", + "300", + "--engine_config.mysql.internal_tmp_mem_storage_engine", + "TempTable", + "--engine_config.mysql.max_allowed_packet", + "67108864", + "--engine_config.mysql.max_heap_table_size", + "16777216", + "--engine_config.mysql.net_buffer_length", + "8192", + "--engine_config.mysql.net_read_timeout", + "30", + "--engine_config.mysql.net_write_timeout", + "60", + "--engine_config.mysql.sql_mode", + "TRADITIONAL", + "--engine_config.mysql.sql_require_primary_key", + "true", + "--engine_config.mysql.tmp_table_size", + "16777216", + "--engine_config.mysql.wait_timeout", + "28800", + "--json", + ] + ) + .stdout.decode() + .rstrip() + )[0] + + yield database["id"] + + delete_target_id("databases", str(database["id"]), "mysql-delete") + + +# MYSQL + + +def test_mysql_engine_config_view(): + mysql_config = json.loads( + exec_test_command( + BASE_CMD + + [ + "mysql-config-view", + "--json", + ] + ) + .stdout.decode() + .rstrip() + ) + + assert "mysql" in mysql_config[0] + assert "binlog_retention_period" in mysql_config[0] + + binlog_retention = mysql_config[0]["binlog_retention_period"] + assert binlog_retention["type"] == "integer" + assert binlog_retention["minimum"] == 600 + assert binlog_retention["maximum"] == 86400 + assert binlog_retention["requires_restart"] is False + + mysql_settings = mysql_config[0]["mysql"] + assert "innodb_read_io_threads" in mysql_settings + read_io = mysql_settings["innodb_read_io_threads"] + assert read_io["requires_restart"] is True + assert read_io["type"] == "integer" + assert read_io["minimum"] == 1 + assert read_io["maximum"] == 64 + + assert "internal_tmp_mem_storage_engine" in mysql_settings + engine = mysql_settings["internal_tmp_mem_storage_engine"] + assert engine["type"] == "string" + assert "enum" in engine + assert set(engine["enum"]) == {"TempTable", "MEMORY"} + + +def test_mysql_list_with_engine_config(mysql_db_engine_config): + mysql_db_id = mysql_db_engine_config + + mysql_dbs = json.loads( + exec_test_command( + BASE_CMD + + [ + "mysql-list", + "--json", + ] + ) + .stdout.decode() + .rstrip() + ) + + # Find the DB with the matching ID + matching_db = next( + (db for db in mysql_dbs if db["id"] == mysql_db_id), None + ) + assert ( + matching_db is not None + ), f"MySQL DB with id {mysql_db_id} not found in mysql-list" + + config = matching_db["engine_config"] + mysql_config = config["mysql"] + + assert config["binlog_retention_period"] == 86400 + assert mysql_config["connect_timeout"] == 10 + assert mysql_config["default_time_zone"] == "SYSTEM" + assert mysql_config["group_concat_max_len"] == 1024 + assert mysql_config["information_schema_stats_expiry"] == 900 + assert mysql_config["innodb_change_buffer_max_size"] == 25 + assert mysql_config["innodb_flush_neighbors"] == 1 + assert mysql_config["innodb_ft_min_token_size"] == 3 + assert mysql_config["innodb_ft_server_stopword_table"] == "mydb/stopword" + assert mysql_config["innodb_lock_wait_timeout"] == 50 + assert mysql_config["innodb_log_buffer_size"] == 16777216 + assert mysql_config["innodb_online_alter_log_max_size"] == 134217728 + assert mysql_config["innodb_read_io_threads"] == 4 + assert mysql_config["innodb_rollback_on_timeout"] is True + assert mysql_config["innodb_thread_concurrency"] == 8 + assert mysql_config["innodb_write_io_threads"] == 4 + assert mysql_config["interactive_timeout"] == 300 + assert mysql_config["internal_tmp_mem_storage_engine"] == "TempTable" + assert mysql_config["max_allowed_packet"] == 67108864 + assert mysql_config["max_heap_table_size"] == 16777216 + assert mysql_config["net_buffer_length"] == 8192 + assert mysql_config["net_read_timeout"] == 30 + assert mysql_config["net_write_timeout"] == 60 + assert mysql_config["sql_mode"] == "TRADITIONAL" + assert mysql_config["sql_require_primary_key"] is True + assert mysql_config["tmp_table_size"] == 16777216 + assert mysql_config["wait_timeout"] == 28800 + + +def test_mysql_db_view_with_engine_config(mysql_db_engine_config): + mysql_db_id = mysql_db_engine_config + + mysql_db = json.loads( + exec_test_command( + BASE_CMD + + [ + "mysql-view", + str(mysql_db_id), + "--json", + ] + ) + .stdout.decode() + .rstrip() + ) + + db = mysql_db[0] + + assert db["engine"] == "mysql" + assert db["status"] in ["active", "provisioning"] + assert db["ssl_connection"] is True + assert db["platform"] == "rdbms-default" + assert db["label"].endswith("mysql-db") + assert db["type"].startswith("g6-") + engine_config = db.get("engine_config") + assert engine_config["binlog_retention_period"] == 86400 + mysql_config = engine_config.get("mysql") + assert mysql_config["innodb_lock_wait_timeout"] == 50 + assert mysql_config["sql_mode"] == "TRADITIONAL" + assert mysql_config["sql_require_primary_key"] is True + assert mysql_config["innodb_thread_concurrency"] == 8 + assert mysql_config["innodb_read_io_threads"] == 4 + assert mysql_config["group_concat_max_len"] == 1024 + assert "primary" in db["hosts"], "Expected 'primary' in hosts" + + +def test_mysql_db_engine_config_update(mysql_db_engine_config): + mysql_db_id = mysql_db_engine_config + + mysql_db = json.loads( + exec_test_command( + BASE_CMD + + [ + "mysql-update", + str(mysql_db_id), + "--engine_config.binlog_retention_period", + "86400", + "--engine_config.mysql.connect_timeout", + "15", + "--engine_config.mysql.default_time_zone", + "SYSTEM", + "--engine_config.mysql.group_concat_max_len", + "1024", + "--engine_config.mysql.information_schema_stats_expiry", + "1000", + "--engine_config.mysql.innodb_change_buffer_max_size", + "25", + "--engine_config.mysql.innodb_flush_neighbors", + "1", + "--engine_config.mysql.innodb_ft_min_token_size", + "3", + "--engine_config.mysql.innodb_ft_server_stopword_table", + "mydb/stopword-updated", + "--engine_config.mysql.innodb_lock_wait_timeout", + "50", + "--engine_config.mysql.innodb_log_buffer_size", + "16777216", + "--engine_config.mysql.innodb_online_alter_log_max_size", + "134217728", + "--engine_config.mysql.innodb_read_io_threads", + "4", + "--engine_config.mysql.innodb_rollback_on_timeout", + "true", + "--engine_config.mysql.innodb_thread_concurrency", + "8", + "--engine_config.mysql.innodb_write_io_threads", + "4", + "--engine_config.mysql.interactive_timeout", + "300", + "--engine_config.mysql.internal_tmp_mem_storage_engine", + "TempTable", + "--engine_config.mysql.max_allowed_packet", + "67108864", + "--engine_config.mysql.max_heap_table_size", + "16777216", + "--engine_config.mysql.net_buffer_length", + "8192", + "--engine_config.mysql.net_read_timeout", + "30", + "--engine_config.mysql.net_write_timeout", + "60", + "--engine_config.mysql.sql_mode", + "STRICT_ALL_TABLES", + "--engine_config.mysql.sql_require_primary_key", + "true", + "--engine_config.mysql.tmp_table_size", + "16777216", + "--engine_config.mysql.wait_timeout", + "28800", + "--json", + ] + ) + .stdout.decode() + .rstrip() + )[0] + + # Assertions for updated values + assert mysql_db["engine_config"]["binlog_retention_period"] == 86400 + mysql = mysql_db["engine_config"]["mysql"] + assert mysql["connect_timeout"] == 15 + assert mysql["default_time_zone"] == "SYSTEM" + assert mysql["group_concat_max_len"] == 1024 + assert mysql["information_schema_stats_expiry"] == 1000 + assert mysql["innodb_change_buffer_max_size"] == 25 + assert mysql["innodb_flush_neighbors"] == 1 + assert mysql["innodb_ft_min_token_size"] == 3 + assert mysql["innodb_ft_server_stopword_table"] == "mydb/stopword-updated" + assert mysql["innodb_lock_wait_timeout"] == 50 + assert mysql["innodb_log_buffer_size"] == 16777216 + assert mysql["innodb_online_alter_log_max_size"] == 134217728 + assert mysql["innodb_read_io_threads"] == 4 + assert mysql["innodb_rollback_on_timeout"] is True + assert mysql["innodb_thread_concurrency"] == 8 + assert mysql["innodb_write_io_threads"] == 4 + assert mysql["interactive_timeout"] == 300 + assert mysql["internal_tmp_mem_storage_engine"] == "TempTable" + assert mysql["max_allowed_packet"] == 67108864 + assert mysql["max_heap_table_size"] == 16777216 + assert mysql["net_buffer_length"] == 8192 + assert mysql["net_read_timeout"] == 30 + assert mysql["net_write_timeout"] == 60 + assert mysql["sql_mode"] == "STRICT_ALL_TABLES" + assert mysql["sql_require_primary_key"] is True + assert mysql["tmp_table_size"] == 16777216 + assert mysql["wait_timeout"] == 28800 + + # Assertions for values that should not change + assert mysql_db["label"].endswith("mysql-db") + assert mysql_db["region"] == "us-east" + assert mysql_db["cluster_size"] == 1 + assert mysql_db["engine"] == "mysql" + assert mysql_db["version"].startswith("8") + assert mysql_db["type"] == "g6-nanode-1" + assert mysql_db["ssl_connection"] is True + assert mysql_db["status"] in ["active", "provisioning"] diff --git a/tests/integration/linodes/test_interfaces.py b/tests/integration/linodes/test_interfaces.py index 28f522fae..7adf67472 100644 --- a/tests/integration/linodes/test_interfaces.py +++ b/tests/integration/linodes/test_interfaces.py @@ -1,5 +1,4 @@ import json -import time from typing import Any, Dict import pytest @@ -8,14 +7,10 @@ from tests.integration.helpers import delete_target_id, exec_test_command from tests.integration.linodes.helpers_linodes import ( BASE_CMD, - DEFAULT_LABEL, DEFAULT_RANDOM_PASS, DEFAULT_TEST_IMAGE, ) -timestamp = str(time.time_ns()) -linode_label = DEFAULT_LABEL + timestamp - @pytest.fixture def linode_with_vpc_interface_as_json(linode_cloud_firewall): diff --git a/tests/integration/obj/test_obj_quota.py b/tests/integration/obj/test_obj_quota.py index 665804004..308216c64 100644 --- a/tests/integration/obj/test_obj_quota.py +++ b/tests/integration/obj/test_obj_quota.py @@ -7,7 +7,7 @@ def get_quota_id(): response = ( - exec_test_command(CLI_CMD + ["object-storage-quotas", "--json"]) + exec_test_command(CLI_CMD + ["get-object-storage-quotas", "--json"]) .stdout.decode() .rstrip() ) @@ -22,7 +22,7 @@ def get_quota_id(): def test_obj_quotas_list(): response = ( - exec_test_command(CLI_CMD + ["object-storage-quotas", "--json"]) + exec_test_command(CLI_CMD + ["get-object-storage-quotas", "--json"]) .stdout.decode() .rstrip() ) @@ -49,7 +49,7 @@ def test_obj_quota_view(): response = ( exec_test_command( - CLI_CMD + ["object-storage-quota-view", quota_id, "--json"] + CLI_CMD + ["get-object-storage-quota", quota_id, "--json"] ) .stdout.decode() .rstrip() @@ -81,7 +81,7 @@ def test_obj_quota_usage_view(): response = ( exec_test_command( - CLI_CMD + ["object-storage-quota-usage-view", quota_id, "--json"] + CLI_CMD + ["get-object-storage-quota-usage", quota_id, "--json"] ) .stdout.decode() .rstrip()