From 492910343297bf2090dca58849ac909220ac965b Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 27 Nov 2024 06:56:21 +0000 Subject: [PATCH] feat: add the `EmptyFilter` type to the Data API v1alpha feat: add the `empty_filter` field to the `Filter` type feat: add `sampling_metadatas` field to the `ResponseMetaData` type PiperOrigin-RevId: 700547721 Source-Link: https://github.com/googleapis/googleapis/commit/f4d17434a39c05450ffd47b49425afb409fb185c Source-Link: https://github.com/googleapis/googleapis-gen/commit/7fe58852753e4e564a85d7cb9b6d05757555b9d0 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWFuYWx5dGljcy1kYXRhLy5Pd2xCb3QueWFtbCIsImgiOiI3ZmU1ODg1Mjc1M2U0ZTU2NGE4NWQ3Y2I5YjZkMDU3NTc1NTViOWQwIn0= --- .../google-analytics-data/v1alpha/.coveragerc | 13 + .../google-analytics-data/v1alpha/.flake8 | 33 + .../google-analytics-data/v1alpha/MANIFEST.in | 2 + .../google-analytics-data/v1alpha/README.rst | 49 + .../v1alpha/docs/_static/custom.css | 3 + .../v1alpha/docs/conf.py | 376 + .../data_v1alpha/alpha_analytics_data.rst | 10 + .../v1alpha/docs/data_v1alpha/services_.rst | 6 + .../v1alpha/docs/data_v1alpha/types_.rst | 6 + .../v1alpha/docs/index.rst | 7 + .../v1alpha/google/analytics/data/__init__.py | 237 + .../google/analytics/data/gapic_version.py | 16 + .../v1alpha/google/analytics/data/py.typed | 2 + .../google/analytics/data_v1alpha/__init__.py | 238 + .../data_v1alpha/gapic_metadata.json | 238 + .../analytics/data_v1alpha/gapic_version.py | 16 + .../google/analytics/data_v1alpha/py.typed | 2 + .../data_v1alpha/services/__init__.py | 15 + .../services/alpha_analytics_data/__init__.py | 22 + .../alpha_analytics_data/async_client.py | 1966 +++ .../services/alpha_analytics_data/client.py | 2314 +++ .../services/alpha_analytics_data/pagers.py | 432 + .../transports/README.rst | 9 + .../transports/__init__.py | 38 + .../alpha_analytics_data/transports/base.py | 347 + .../alpha_analytics_data/transports/grpc.py | 791 ++ .../transports/grpc_asyncio.py | 877 ++ .../alpha_analytics_data/transports/rest.py | 1729 +++ .../transports/rest_base.py | 672 + .../analytics/data_v1alpha/types/__init__.py | 232 + .../data_v1alpha/types/analytics_data_api.py | 1725 +++ .../analytics/data_v1alpha/types/data.py | 3210 +++++ .../google-analytics-data/v1alpha/mypy.ini | 3 + .../google-analytics-data/v1alpha/noxfile.py | 280 + ...alytics_data_create_audience_list_async.py | 60 + ...nalytics_data_create_audience_list_sync.py | 60 + ...ta_create_recurring_audience_list_async.py | 56 + ...ata_create_recurring_audience_list_sync.py | 56 + ...analytics_data_create_report_task_async.py | 56 + ..._analytics_data_create_report_task_sync.py | 56 + ..._analytics_data_get_audience_list_async.py | 52 + ...a_analytics_data_get_audience_list_sync.py | 52 + ...data_get_property_quotas_snapshot_async.py | 52 + ..._data_get_property_quotas_snapshot_sync.py | 52 + ..._data_get_recurring_audience_list_async.py | 52 + ...s_data_get_recurring_audience_list_sync.py | 52 + ...ha_analytics_data_get_report_task_async.py | 52 + ...pha_analytics_data_get_report_task_sync.py | 52 + ...nalytics_data_list_audience_lists_async.py | 53 + ...analytics_data_list_audience_lists_sync.py | 53 + ...ata_list_recurring_audience_lists_async.py | 53 + ...data_list_recurring_audience_lists_sync.py | 53 + ..._analytics_data_list_report_tasks_async.py | 53 + ...a_analytics_data_list_report_tasks_sync.py | 53 + ...nalytics_data_query_audience_list_async.py | 52 + ...analytics_data_query_audience_list_sync.py | 52 + ..._analytics_data_query_report_task_async.py | 52 + ...a_analytics_data_query_report_task_sync.py | 52 + ..._analytics_data_run_funnel_report_async.py | 51 + ...a_analytics_data_run_funnel_report_sync.py | 51 + ...s_data_sheet_export_audience_list_async.py | 52 + ...cs_data_sheet_export_audience_list_sync.py | 52 + ...etadata_google.analytics.data.v1alpha.json | 2285 +++ .../scripts/fixup_data_v1alpha_keywords.py | 189 + .../google-analytics-data/v1alpha/setup.py | 98 + .../v1alpha/testing/constraints-3.10.txt | 6 + .../v1alpha/testing/constraints-3.11.txt | 6 + .../v1alpha/testing/constraints-3.12.txt | 6 + .../v1alpha/testing/constraints-3.13.txt | 6 + .../v1alpha/testing/constraints-3.7.txt | 10 + .../v1alpha/testing/constraints-3.8.txt | 6 + .../v1alpha/testing/constraints-3.9.txt | 6 + .../v1alpha/tests/__init__.py | 16 + .../v1alpha/tests/unit/__init__.py | 16 + .../v1alpha/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/data_v1alpha/__init__.py | 16 + .../data_v1alpha/test_alpha_analytics_data.py | 11588 ++++++++++++++++ .../google-analytics-data/v1beta/.coveragerc | 13 + .../google-analytics-data/v1beta/.flake8 | 33 + .../google-analytics-data/v1beta/MANIFEST.in | 2 + .../google-analytics-data/v1beta/README.rst | 49 + .../v1beta/docs/_static/custom.css | 3 + .../google-analytics-data/v1beta/docs/conf.py | 376 + .../docs/data_v1beta/beta_analytics_data.rst | 10 + .../v1beta/docs/data_v1beta/services_.rst | 6 + .../v1beta/docs/data_v1beta/types_.rst | 6 + .../v1beta/docs/index.rst | 7 + .../v1beta/google/analytics/data/__init__.py | 149 + .../google/analytics/data/gapic_version.py | 16 + .../v1beta/google/analytics/data/py.typed | 2 + .../google/analytics/data_v1beta/__init__.py | 150 + .../analytics/data_v1beta/gapic_metadata.json | 193 + .../analytics/data_v1beta/gapic_version.py | 16 + .../google/analytics/data_v1beta/py.typed | 2 + .../data_v1beta/services/__init__.py | 15 + .../services/beta_analytics_data/__init__.py | 22 + .../beta_analytics_data/async_client.py | 1439 ++ .../services/beta_analytics_data/client.py | 1777 +++ .../services/beta_analytics_data/pagers.py | 162 + .../beta_analytics_data/transports/README.rst | 9 + .../transports/__init__.py | 38 + .../beta_analytics_data/transports/base.py | 302 + .../beta_analytics_data/transports/grpc.py | 672 + .../transports/grpc_asyncio.py | 743 + .../beta_analytics_data/transports/rest.py | 1404 ++ .../transports/rest_base.py | 531 + .../analytics/data_v1beta/types/__init__.py | 144 + .../data_v1beta/types/analytics_data_api.py | 1439 ++ .../analytics/data_v1beta/types/data.py | 2052 +++ .../google-analytics-data/v1beta/mypy.ini | 3 + .../google-analytics-data/v1beta/noxfile.py | 280 + ...tics_data_batch_run_pivot_reports_async.py | 51 + ...ytics_data_batch_run_pivot_reports_sync.py | 51 + ..._analytics_data_batch_run_reports_async.py | 51 + ...a_analytics_data_batch_run_reports_sync.py | 51 + ...nalytics_data_check_compatibility_async.py | 51 + ...analytics_data_check_compatibility_sync.py | 51 + ...ytics_data_create_audience_export_async.py | 60 + ...lytics_data_create_audience_export_sync.py | 60 + ...nalytics_data_get_audience_export_async.py | 52 + ...analytics_data_get_audience_export_sync.py | 52 + ..._beta_analytics_data_get_metadata_async.py | 52 + ...d_beta_analytics_data_get_metadata_sync.py | 52 + ...lytics_data_list_audience_exports_async.py | 53 + ...alytics_data_list_audience_exports_sync.py | 53 + ...lytics_data_query_audience_export_async.py | 52 + ...alytics_data_query_audience_export_sync.py | 52 + ...a_analytics_data_run_pivot_report_async.py | 51 + ...ta_analytics_data_run_pivot_report_sync.py | 51 + ...nalytics_data_run_realtime_report_async.py | 51 + ...analytics_data_run_realtime_report_sync.py | 51 + ...ed_beta_analytics_data_run_report_async.py | 51 + ...ted_beta_analytics_data_run_report_sync.py | 51 + ...metadata_google.analytics.data.v1beta.json | 1746 +++ .../scripts/fixup_data_v1beta_keywords.py | 186 + .../google-analytics-data/v1beta/setup.py | 98 + .../v1beta/testing/constraints-3.10.txt | 6 + .../v1beta/testing/constraints-3.11.txt | 6 + .../v1beta/testing/constraints-3.12.txt | 6 + .../v1beta/testing/constraints-3.13.txt | 6 + .../v1beta/testing/constraints-3.7.txt | 10 + .../v1beta/testing/constraints-3.8.txt | 6 + .../v1beta/testing/constraints-3.9.txt | 6 + .../v1beta/tests/__init__.py | 16 + .../v1beta/tests/unit/__init__.py | 16 + .../v1beta/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/data_v1beta/__init__.py | 16 + .../data_v1beta/test_beta_analytics_data.py | 7777 +++++++++++ 148 files changed, 54810 insertions(+) create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/.coveragerc create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/.flake8 create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/MANIFEST.in create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/README.rst create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/docs/_static/custom.css create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/docs/conf.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/docs/data_v1alpha/alpha_analytics_data.rst create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/docs/data_v1alpha/services_.rst create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/docs/data_v1alpha/types_.rst create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/docs/index.rst create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data/gapic_version.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data/py.typed create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/gapic_metadata.json create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/gapic_version.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/py.typed create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/pagers.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/README.rst create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest_base.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/types/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/types/analytics_data_api.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/types/data.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/mypy.ini create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/noxfile.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_audience_list_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_audience_list_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_recurring_audience_list_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_recurring_audience_list_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_report_task_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_report_task_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_recurring_audience_list_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_recurring_audience_list_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_report_task_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_report_task_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_audience_lists_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_audience_lists_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_recurring_audience_lists_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_recurring_audience_lists_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_report_tasks_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_report_tasks_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_audience_list_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_audience_list_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_report_task_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_report_task_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_funnel_report_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_funnel_report_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/scripts/fixup_data_v1alpha_keywords.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/setup.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/tests/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/tests/unit/gapic/data_v1alpha/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1alpha/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/.coveragerc create mode 100644 owl-bot-staging/google-analytics-data/v1beta/.flake8 create mode 100644 owl-bot-staging/google-analytics-data/v1beta/MANIFEST.in create mode 100644 owl-bot-staging/google-analytics-data/v1beta/README.rst create mode 100644 owl-bot-staging/google-analytics-data/v1beta/docs/_static/custom.css create mode 100644 owl-bot-staging/google-analytics-data/v1beta/docs/conf.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/docs/data_v1beta/beta_analytics_data.rst create mode 100644 owl-bot-staging/google-analytics-data/v1beta/docs/data_v1beta/services_.rst create mode 100644 owl-bot-staging/google-analytics-data/v1beta/docs/data_v1beta/types_.rst create mode 100644 owl-bot-staging/google-analytics-data/v1beta/docs/index.rst create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data/gapic_version.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data/py.typed create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/gapic_metadata.json create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/gapic_version.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/py.typed create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/client.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/pagers.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/README.rst create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest_base.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/types/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/types/analytics_data_api.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/types/data.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/mypy.ini create mode 100644 owl-bot-staging/google-analytics-data/v1beta/noxfile.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_pivot_reports_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_pivot_reports_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_reports_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_reports_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_check_compatibility_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_check_compatibility_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_pivot_report_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_pivot_report_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_realtime_report_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_realtime_report_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_report_async.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_report_sync.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json create mode 100644 owl-bot-staging/google-analytics-data/v1beta/scripts/fixup_data_v1beta_keywords.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/setup.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-analytics-data/v1beta/tests/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/tests/unit/gapic/data_v1beta/__init__.py create mode 100644 owl-bot-staging/google-analytics-data/v1beta/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py diff --git a/owl-bot-staging/google-analytics-data/v1alpha/.coveragerc b/owl-bot-staging/google-analytics-data/v1alpha/.coveragerc new file mode 100644 index 000000000000..d50b66c60b09 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/analytics/data/__init__.py + google/analytics/data/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-analytics-data/v1alpha/.flake8 b/owl-bot-staging/google-analytics-data/v1alpha/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-analytics-data/v1alpha/MANIFEST.in b/owl-bot-staging/google-analytics-data/v1alpha/MANIFEST.in new file mode 100644 index 000000000000..938f3f33c2f9 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/analytics/data *.py +recursive-include google/analytics/data_v1alpha *.py diff --git a/owl-bot-staging/google-analytics-data/v1alpha/README.rst b/owl-bot-staging/google-analytics-data/v1alpha/README.rst new file mode 100644 index 000000000000..d62497b892c1 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Analytics Data API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Analytics Data API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-analytics-data/v1alpha/docs/_static/custom.css b/owl-bot-staging/google-analytics-data/v1alpha/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-analytics-data/v1alpha/docs/conf.py b/owl-bot-staging/google-analytics-data/v1alpha/docs/conf.py new file mode 100644 index 000000000000..d7245576d5f6 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-analytics-data documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-analytics-data" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Analytics Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-analytics-data-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-analytics-data.tex", + u"google-analytics-data Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-analytics-data", + u"Google Analytics Data Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-analytics-data", + u"google-analytics-data Documentation", + author, + "google-analytics-data", + "GAPIC library for Google Analytics Data API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-analytics-data/v1alpha/docs/data_v1alpha/alpha_analytics_data.rst b/owl-bot-staging/google-analytics-data/v1alpha/docs/data_v1alpha/alpha_analytics_data.rst new file mode 100644 index 000000000000..76209b2d9dd0 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/docs/data_v1alpha/alpha_analytics_data.rst @@ -0,0 +1,10 @@ +AlphaAnalyticsData +------------------------------------ + +.. automodule:: google.analytics.data_v1alpha.services.alpha_analytics_data + :members: + :inherited-members: + +.. automodule:: google.analytics.data_v1alpha.services.alpha_analytics_data.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-analytics-data/v1alpha/docs/data_v1alpha/services_.rst b/owl-bot-staging/google-analytics-data/v1alpha/docs/data_v1alpha/services_.rst new file mode 100644 index 000000000000..607505d6bd7f --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/docs/data_v1alpha/services_.rst @@ -0,0 +1,6 @@ +Services for Google Analytics Data v1alpha API +============================================== +.. toctree:: + :maxdepth: 2 + + alpha_analytics_data diff --git a/owl-bot-staging/google-analytics-data/v1alpha/docs/data_v1alpha/types_.rst b/owl-bot-staging/google-analytics-data/v1alpha/docs/data_v1alpha/types_.rst new file mode 100644 index 000000000000..c0378d5ba6e3 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/docs/data_v1alpha/types_.rst @@ -0,0 +1,6 @@ +Types for Google Analytics Data v1alpha API +=========================================== + +.. automodule:: google.analytics.data_v1alpha.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-analytics-data/v1alpha/docs/index.rst b/owl-bot-staging/google-analytics-data/v1alpha/docs/index.rst new file mode 100644 index 000000000000..8d73b383c1d1 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + data_v1alpha/services_ + data_v1alpha/types_ diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data/__init__.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data/__init__.py new file mode 100644 index 000000000000..529b35d84f0a --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data/__init__.py @@ -0,0 +1,237 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.analytics.data import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.analytics.data_v1alpha.services.alpha_analytics_data.client import AlphaAnalyticsDataClient +from google.analytics.data_v1alpha.services.alpha_analytics_data.async_client import AlphaAnalyticsDataAsyncClient + +from google.analytics.data_v1alpha.types.analytics_data_api import AudienceDimension +from google.analytics.data_v1alpha.types.analytics_data_api import AudienceDimensionValue +from google.analytics.data_v1alpha.types.analytics_data_api import AudienceList +from google.analytics.data_v1alpha.types.analytics_data_api import AudienceListMetadata +from google.analytics.data_v1alpha.types.analytics_data_api import AudienceRow +from google.analytics.data_v1alpha.types.analytics_data_api import CreateAudienceListRequest +from google.analytics.data_v1alpha.types.analytics_data_api import CreateRecurringAudienceListRequest +from google.analytics.data_v1alpha.types.analytics_data_api import CreateReportTaskRequest +from google.analytics.data_v1alpha.types.analytics_data_api import GetAudienceListRequest +from google.analytics.data_v1alpha.types.analytics_data_api import GetPropertyQuotasSnapshotRequest +from google.analytics.data_v1alpha.types.analytics_data_api import GetRecurringAudienceListRequest +from google.analytics.data_v1alpha.types.analytics_data_api import GetReportTaskRequest +from google.analytics.data_v1alpha.types.analytics_data_api import ListAudienceListsRequest +from google.analytics.data_v1alpha.types.analytics_data_api import ListAudienceListsResponse +from google.analytics.data_v1alpha.types.analytics_data_api import ListRecurringAudienceListsRequest +from google.analytics.data_v1alpha.types.analytics_data_api import ListRecurringAudienceListsResponse +from google.analytics.data_v1alpha.types.analytics_data_api import ListReportTasksRequest +from google.analytics.data_v1alpha.types.analytics_data_api import ListReportTasksResponse +from google.analytics.data_v1alpha.types.analytics_data_api import PropertyQuotasSnapshot +from google.analytics.data_v1alpha.types.analytics_data_api import QueryAudienceListRequest +from google.analytics.data_v1alpha.types.analytics_data_api import QueryAudienceListResponse +from google.analytics.data_v1alpha.types.analytics_data_api import QueryReportTaskRequest +from google.analytics.data_v1alpha.types.analytics_data_api import QueryReportTaskResponse +from google.analytics.data_v1alpha.types.analytics_data_api import RecurringAudienceList +from google.analytics.data_v1alpha.types.analytics_data_api import ReportTask +from google.analytics.data_v1alpha.types.analytics_data_api import ReportTaskMetadata +from google.analytics.data_v1alpha.types.analytics_data_api import RunFunnelReportRequest +from google.analytics.data_v1alpha.types.analytics_data_api import RunFunnelReportResponse +from google.analytics.data_v1alpha.types.analytics_data_api import SheetExportAudienceListRequest +from google.analytics.data_v1alpha.types.analytics_data_api import SheetExportAudienceListResponse +from google.analytics.data_v1alpha.types.analytics_data_api import WebhookNotification +from google.analytics.data_v1alpha.types.data import BetweenFilter +from google.analytics.data_v1alpha.types.data import Cohort +from google.analytics.data_v1alpha.types.data import CohortReportSettings +from google.analytics.data_v1alpha.types.data import CohortSpec +from google.analytics.data_v1alpha.types.data import CohortsRange +from google.analytics.data_v1alpha.types.data import DateRange +from google.analytics.data_v1alpha.types.data import Dimension +from google.analytics.data_v1alpha.types.data import DimensionExpression +from google.analytics.data_v1alpha.types.data import DimensionHeader +from google.analytics.data_v1alpha.types.data import DimensionValue +from google.analytics.data_v1alpha.types.data import EmptyFilter +from google.analytics.data_v1alpha.types.data import EventSegment +from google.analytics.data_v1alpha.types.data import EventSegmentConditionGroup +from google.analytics.data_v1alpha.types.data import EventSegmentCriteria +from google.analytics.data_v1alpha.types.data import EventSegmentExclusion +from google.analytics.data_v1alpha.types.data import Filter +from google.analytics.data_v1alpha.types.data import FilterExpression +from google.analytics.data_v1alpha.types.data import FilterExpressionList +from google.analytics.data_v1alpha.types.data import Funnel +from google.analytics.data_v1alpha.types.data import FunnelBreakdown +from google.analytics.data_v1alpha.types.data import FunnelEventFilter +from google.analytics.data_v1alpha.types.data import FunnelFieldFilter +from google.analytics.data_v1alpha.types.data import FunnelFilterExpression +from google.analytics.data_v1alpha.types.data import FunnelFilterExpressionList +from google.analytics.data_v1alpha.types.data import FunnelNextAction +from google.analytics.data_v1alpha.types.data import FunnelParameterFilter +from google.analytics.data_v1alpha.types.data import FunnelParameterFilterExpression +from google.analytics.data_v1alpha.types.data import FunnelParameterFilterExpressionList +from google.analytics.data_v1alpha.types.data import FunnelResponseMetadata +from google.analytics.data_v1alpha.types.data import FunnelStep +from google.analytics.data_v1alpha.types.data import FunnelSubReport +from google.analytics.data_v1alpha.types.data import InListFilter +from google.analytics.data_v1alpha.types.data import Metric +from google.analytics.data_v1alpha.types.data import MetricHeader +from google.analytics.data_v1alpha.types.data import MetricValue +from google.analytics.data_v1alpha.types.data import NumericFilter +from google.analytics.data_v1alpha.types.data import NumericValue +from google.analytics.data_v1alpha.types.data import OrderBy +from google.analytics.data_v1alpha.types.data import PropertyQuota +from google.analytics.data_v1alpha.types.data import QuotaStatus +from google.analytics.data_v1alpha.types.data import ResponseMetaData +from google.analytics.data_v1alpha.types.data import Row +from google.analytics.data_v1alpha.types.data import SamplingMetadata +from google.analytics.data_v1alpha.types.data import Segment +from google.analytics.data_v1alpha.types.data import SegmentEventFilter +from google.analytics.data_v1alpha.types.data import SegmentFilter +from google.analytics.data_v1alpha.types.data import SegmentFilterExpression +from google.analytics.data_v1alpha.types.data import SegmentFilterExpressionList +from google.analytics.data_v1alpha.types.data import SegmentFilterScoping +from google.analytics.data_v1alpha.types.data import SegmentParameterFilter +from google.analytics.data_v1alpha.types.data import SegmentParameterFilterExpression +from google.analytics.data_v1alpha.types.data import SegmentParameterFilterExpressionList +from google.analytics.data_v1alpha.types.data import SegmentParameterFilterScoping +from google.analytics.data_v1alpha.types.data import SessionSegment +from google.analytics.data_v1alpha.types.data import SessionSegmentConditionGroup +from google.analytics.data_v1alpha.types.data import SessionSegmentCriteria +from google.analytics.data_v1alpha.types.data import SessionSegmentExclusion +from google.analytics.data_v1alpha.types.data import StringFilter +from google.analytics.data_v1alpha.types.data import UserSegment +from google.analytics.data_v1alpha.types.data import UserSegmentConditionGroup +from google.analytics.data_v1alpha.types.data import UserSegmentCriteria +from google.analytics.data_v1alpha.types.data import UserSegmentExclusion +from google.analytics.data_v1alpha.types.data import UserSegmentSequenceGroup +from google.analytics.data_v1alpha.types.data import UserSequenceStep +from google.analytics.data_v1alpha.types.data import EventCriteriaScoping +from google.analytics.data_v1alpha.types.data import EventExclusionDuration +from google.analytics.data_v1alpha.types.data import MetricAggregation +from google.analytics.data_v1alpha.types.data import MetricType +from google.analytics.data_v1alpha.types.data import RestrictedMetricType +from google.analytics.data_v1alpha.types.data import SamplingLevel +from google.analytics.data_v1alpha.types.data import SessionCriteriaScoping +from google.analytics.data_v1alpha.types.data import SessionExclusionDuration +from google.analytics.data_v1alpha.types.data import UserCriteriaScoping +from google.analytics.data_v1alpha.types.data import UserExclusionDuration + +__all__ = ('AlphaAnalyticsDataClient', + 'AlphaAnalyticsDataAsyncClient', + 'AudienceDimension', + 'AudienceDimensionValue', + 'AudienceList', + 'AudienceListMetadata', + 'AudienceRow', + 'CreateAudienceListRequest', + 'CreateRecurringAudienceListRequest', + 'CreateReportTaskRequest', + 'GetAudienceListRequest', + 'GetPropertyQuotasSnapshotRequest', + 'GetRecurringAudienceListRequest', + 'GetReportTaskRequest', + 'ListAudienceListsRequest', + 'ListAudienceListsResponse', + 'ListRecurringAudienceListsRequest', + 'ListRecurringAudienceListsResponse', + 'ListReportTasksRequest', + 'ListReportTasksResponse', + 'PropertyQuotasSnapshot', + 'QueryAudienceListRequest', + 'QueryAudienceListResponse', + 'QueryReportTaskRequest', + 'QueryReportTaskResponse', + 'RecurringAudienceList', + 'ReportTask', + 'ReportTaskMetadata', + 'RunFunnelReportRequest', + 'RunFunnelReportResponse', + 'SheetExportAudienceListRequest', + 'SheetExportAudienceListResponse', + 'WebhookNotification', + 'BetweenFilter', + 'Cohort', + 'CohortReportSettings', + 'CohortSpec', + 'CohortsRange', + 'DateRange', + 'Dimension', + 'DimensionExpression', + 'DimensionHeader', + 'DimensionValue', + 'EmptyFilter', + 'EventSegment', + 'EventSegmentConditionGroup', + 'EventSegmentCriteria', + 'EventSegmentExclusion', + 'Filter', + 'FilterExpression', + 'FilterExpressionList', + 'Funnel', + 'FunnelBreakdown', + 'FunnelEventFilter', + 'FunnelFieldFilter', + 'FunnelFilterExpression', + 'FunnelFilterExpressionList', + 'FunnelNextAction', + 'FunnelParameterFilter', + 'FunnelParameterFilterExpression', + 'FunnelParameterFilterExpressionList', + 'FunnelResponseMetadata', + 'FunnelStep', + 'FunnelSubReport', + 'InListFilter', + 'Metric', + 'MetricHeader', + 'MetricValue', + 'NumericFilter', + 'NumericValue', + 'OrderBy', + 'PropertyQuota', + 'QuotaStatus', + 'ResponseMetaData', + 'Row', + 'SamplingMetadata', + 'Segment', + 'SegmentEventFilter', + 'SegmentFilter', + 'SegmentFilterExpression', + 'SegmentFilterExpressionList', + 'SegmentFilterScoping', + 'SegmentParameterFilter', + 'SegmentParameterFilterExpression', + 'SegmentParameterFilterExpressionList', + 'SegmentParameterFilterScoping', + 'SessionSegment', + 'SessionSegmentConditionGroup', + 'SessionSegmentCriteria', + 'SessionSegmentExclusion', + 'StringFilter', + 'UserSegment', + 'UserSegmentConditionGroup', + 'UserSegmentCriteria', + 'UserSegmentExclusion', + 'UserSegmentSequenceGroup', + 'UserSequenceStep', + 'EventCriteriaScoping', + 'EventExclusionDuration', + 'MetricAggregation', + 'MetricType', + 'RestrictedMetricType', + 'SamplingLevel', + 'SessionCriteriaScoping', + 'SessionExclusionDuration', + 'UserCriteriaScoping', + 'UserExclusionDuration', +) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data/gapic_version.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data/py.typed b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data/py.typed new file mode 100644 index 000000000000..1d549e500f60 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-analytics-data package uses inline types. diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/__init__.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/__init__.py new file mode 100644 index 000000000000..42c19987e5a0 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/__init__.py @@ -0,0 +1,238 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.analytics.data_v1alpha import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.alpha_analytics_data import AlphaAnalyticsDataClient +from .services.alpha_analytics_data import AlphaAnalyticsDataAsyncClient + +from .types.analytics_data_api import AudienceDimension +from .types.analytics_data_api import AudienceDimensionValue +from .types.analytics_data_api import AudienceList +from .types.analytics_data_api import AudienceListMetadata +from .types.analytics_data_api import AudienceRow +from .types.analytics_data_api import CreateAudienceListRequest +from .types.analytics_data_api import CreateRecurringAudienceListRequest +from .types.analytics_data_api import CreateReportTaskRequest +from .types.analytics_data_api import GetAudienceListRequest +from .types.analytics_data_api import GetPropertyQuotasSnapshotRequest +from .types.analytics_data_api import GetRecurringAudienceListRequest +from .types.analytics_data_api import GetReportTaskRequest +from .types.analytics_data_api import ListAudienceListsRequest +from .types.analytics_data_api import ListAudienceListsResponse +from .types.analytics_data_api import ListRecurringAudienceListsRequest +from .types.analytics_data_api import ListRecurringAudienceListsResponse +from .types.analytics_data_api import ListReportTasksRequest +from .types.analytics_data_api import ListReportTasksResponse +from .types.analytics_data_api import PropertyQuotasSnapshot +from .types.analytics_data_api import QueryAudienceListRequest +from .types.analytics_data_api import QueryAudienceListResponse +from .types.analytics_data_api import QueryReportTaskRequest +from .types.analytics_data_api import QueryReportTaskResponse +from .types.analytics_data_api import RecurringAudienceList +from .types.analytics_data_api import ReportTask +from .types.analytics_data_api import ReportTaskMetadata +from .types.analytics_data_api import RunFunnelReportRequest +from .types.analytics_data_api import RunFunnelReportResponse +from .types.analytics_data_api import SheetExportAudienceListRequest +from .types.analytics_data_api import SheetExportAudienceListResponse +from .types.analytics_data_api import WebhookNotification +from .types.data import BetweenFilter +from .types.data import Cohort +from .types.data import CohortReportSettings +from .types.data import CohortSpec +from .types.data import CohortsRange +from .types.data import DateRange +from .types.data import Dimension +from .types.data import DimensionExpression +from .types.data import DimensionHeader +from .types.data import DimensionValue +from .types.data import EmptyFilter +from .types.data import EventSegment +from .types.data import EventSegmentConditionGroup +from .types.data import EventSegmentCriteria +from .types.data import EventSegmentExclusion +from .types.data import Filter +from .types.data import FilterExpression +from .types.data import FilterExpressionList +from .types.data import Funnel +from .types.data import FunnelBreakdown +from .types.data import FunnelEventFilter +from .types.data import FunnelFieldFilter +from .types.data import FunnelFilterExpression +from .types.data import FunnelFilterExpressionList +from .types.data import FunnelNextAction +from .types.data import FunnelParameterFilter +from .types.data import FunnelParameterFilterExpression +from .types.data import FunnelParameterFilterExpressionList +from .types.data import FunnelResponseMetadata +from .types.data import FunnelStep +from .types.data import FunnelSubReport +from .types.data import InListFilter +from .types.data import Metric +from .types.data import MetricHeader +from .types.data import MetricValue +from .types.data import NumericFilter +from .types.data import NumericValue +from .types.data import OrderBy +from .types.data import PropertyQuota +from .types.data import QuotaStatus +from .types.data import ResponseMetaData +from .types.data import Row +from .types.data import SamplingMetadata +from .types.data import Segment +from .types.data import SegmentEventFilter +from .types.data import SegmentFilter +from .types.data import SegmentFilterExpression +from .types.data import SegmentFilterExpressionList +from .types.data import SegmentFilterScoping +from .types.data import SegmentParameterFilter +from .types.data import SegmentParameterFilterExpression +from .types.data import SegmentParameterFilterExpressionList +from .types.data import SegmentParameterFilterScoping +from .types.data import SessionSegment +from .types.data import SessionSegmentConditionGroup +from .types.data import SessionSegmentCriteria +from .types.data import SessionSegmentExclusion +from .types.data import StringFilter +from .types.data import UserSegment +from .types.data import UserSegmentConditionGroup +from .types.data import UserSegmentCriteria +from .types.data import UserSegmentExclusion +from .types.data import UserSegmentSequenceGroup +from .types.data import UserSequenceStep +from .types.data import EventCriteriaScoping +from .types.data import EventExclusionDuration +from .types.data import MetricAggregation +from .types.data import MetricType +from .types.data import RestrictedMetricType +from .types.data import SamplingLevel +from .types.data import SessionCriteriaScoping +from .types.data import SessionExclusionDuration +from .types.data import UserCriteriaScoping +from .types.data import UserExclusionDuration + +__all__ = ( + 'AlphaAnalyticsDataAsyncClient', +'AlphaAnalyticsDataClient', +'AudienceDimension', +'AudienceDimensionValue', +'AudienceList', +'AudienceListMetadata', +'AudienceRow', +'BetweenFilter', +'Cohort', +'CohortReportSettings', +'CohortSpec', +'CohortsRange', +'CreateAudienceListRequest', +'CreateRecurringAudienceListRequest', +'CreateReportTaskRequest', +'DateRange', +'Dimension', +'DimensionExpression', +'DimensionHeader', +'DimensionValue', +'EmptyFilter', +'EventCriteriaScoping', +'EventExclusionDuration', +'EventSegment', +'EventSegmentConditionGroup', +'EventSegmentCriteria', +'EventSegmentExclusion', +'Filter', +'FilterExpression', +'FilterExpressionList', +'Funnel', +'FunnelBreakdown', +'FunnelEventFilter', +'FunnelFieldFilter', +'FunnelFilterExpression', +'FunnelFilterExpressionList', +'FunnelNextAction', +'FunnelParameterFilter', +'FunnelParameterFilterExpression', +'FunnelParameterFilterExpressionList', +'FunnelResponseMetadata', +'FunnelStep', +'FunnelSubReport', +'GetAudienceListRequest', +'GetPropertyQuotasSnapshotRequest', +'GetRecurringAudienceListRequest', +'GetReportTaskRequest', +'InListFilter', +'ListAudienceListsRequest', +'ListAudienceListsResponse', +'ListRecurringAudienceListsRequest', +'ListRecurringAudienceListsResponse', +'ListReportTasksRequest', +'ListReportTasksResponse', +'Metric', +'MetricAggregation', +'MetricHeader', +'MetricType', +'MetricValue', +'NumericFilter', +'NumericValue', +'OrderBy', +'PropertyQuota', +'PropertyQuotasSnapshot', +'QueryAudienceListRequest', +'QueryAudienceListResponse', +'QueryReportTaskRequest', +'QueryReportTaskResponse', +'QuotaStatus', +'RecurringAudienceList', +'ReportTask', +'ReportTaskMetadata', +'ResponseMetaData', +'RestrictedMetricType', +'Row', +'RunFunnelReportRequest', +'RunFunnelReportResponse', +'SamplingLevel', +'SamplingMetadata', +'Segment', +'SegmentEventFilter', +'SegmentFilter', +'SegmentFilterExpression', +'SegmentFilterExpressionList', +'SegmentFilterScoping', +'SegmentParameterFilter', +'SegmentParameterFilterExpression', +'SegmentParameterFilterExpressionList', +'SegmentParameterFilterScoping', +'SessionCriteriaScoping', +'SessionExclusionDuration', +'SessionSegment', +'SessionSegmentConditionGroup', +'SessionSegmentCriteria', +'SessionSegmentExclusion', +'SheetExportAudienceListRequest', +'SheetExportAudienceListResponse', +'StringFilter', +'UserCriteriaScoping', +'UserExclusionDuration', +'UserSegment', +'UserSegmentConditionGroup', +'UserSegmentCriteria', +'UserSegmentExclusion', +'UserSegmentSequenceGroup', +'UserSequenceStep', +'WebhookNotification', +) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/gapic_metadata.json b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/gapic_metadata.json new file mode 100644 index 000000000000..bb6c0b6f462d --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/gapic_metadata.json @@ -0,0 +1,238 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.analytics.data_v1alpha", + "protoPackage": "google.analytics.data.v1alpha", + "schema": "1.0", + "services": { + "AlphaAnalyticsData": { + "clients": { + "grpc": { + "libraryClient": "AlphaAnalyticsDataClient", + "rpcs": { + "CreateAudienceList": { + "methods": [ + "create_audience_list" + ] + }, + "CreateRecurringAudienceList": { + "methods": [ + "create_recurring_audience_list" + ] + }, + "CreateReportTask": { + "methods": [ + "create_report_task" + ] + }, + "GetAudienceList": { + "methods": [ + "get_audience_list" + ] + }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, + "GetRecurringAudienceList": { + "methods": [ + "get_recurring_audience_list" + ] + }, + "GetReportTask": { + "methods": [ + "get_report_task" + ] + }, + "ListAudienceLists": { + "methods": [ + "list_audience_lists" + ] + }, + "ListRecurringAudienceLists": { + "methods": [ + "list_recurring_audience_lists" + ] + }, + "ListReportTasks": { + "methods": [ + "list_report_tasks" + ] + }, + "QueryAudienceList": { + "methods": [ + "query_audience_list" + ] + }, + "QueryReportTask": { + "methods": [ + "query_report_task" + ] + }, + "RunFunnelReport": { + "methods": [ + "run_funnel_report" + ] + }, + "SheetExportAudienceList": { + "methods": [ + "sheet_export_audience_list" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AlphaAnalyticsDataAsyncClient", + "rpcs": { + "CreateAudienceList": { + "methods": [ + "create_audience_list" + ] + }, + "CreateRecurringAudienceList": { + "methods": [ + "create_recurring_audience_list" + ] + }, + "CreateReportTask": { + "methods": [ + "create_report_task" + ] + }, + "GetAudienceList": { + "methods": [ + "get_audience_list" + ] + }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, + "GetRecurringAudienceList": { + "methods": [ + "get_recurring_audience_list" + ] + }, + "GetReportTask": { + "methods": [ + "get_report_task" + ] + }, + "ListAudienceLists": { + "methods": [ + "list_audience_lists" + ] + }, + "ListRecurringAudienceLists": { + "methods": [ + "list_recurring_audience_lists" + ] + }, + "ListReportTasks": { + "methods": [ + "list_report_tasks" + ] + }, + "QueryAudienceList": { + "methods": [ + "query_audience_list" + ] + }, + "QueryReportTask": { + "methods": [ + "query_report_task" + ] + }, + "RunFunnelReport": { + "methods": [ + "run_funnel_report" + ] + }, + "SheetExportAudienceList": { + "methods": [ + "sheet_export_audience_list" + ] + } + } + }, + "rest": { + "libraryClient": "AlphaAnalyticsDataClient", + "rpcs": { + "CreateAudienceList": { + "methods": [ + "create_audience_list" + ] + }, + "CreateRecurringAudienceList": { + "methods": [ + "create_recurring_audience_list" + ] + }, + "CreateReportTask": { + "methods": [ + "create_report_task" + ] + }, + "GetAudienceList": { + "methods": [ + "get_audience_list" + ] + }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, + "GetRecurringAudienceList": { + "methods": [ + "get_recurring_audience_list" + ] + }, + "GetReportTask": { + "methods": [ + "get_report_task" + ] + }, + "ListAudienceLists": { + "methods": [ + "list_audience_lists" + ] + }, + "ListRecurringAudienceLists": { + "methods": [ + "list_recurring_audience_lists" + ] + }, + "ListReportTasks": { + "methods": [ + "list_report_tasks" + ] + }, + "QueryAudienceList": { + "methods": [ + "query_audience_list" + ] + }, + "QueryReportTask": { + "methods": [ + "query_report_task" + ] + }, + "RunFunnelReport": { + "methods": [ + "run_funnel_report" + ] + }, + "SheetExportAudienceList": { + "methods": [ + "sheet_export_audience_list" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/gapic_version.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/py.typed b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/py.typed new file mode 100644 index 000000000000..1d549e500f60 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-analytics-data package uses inline types. diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/__init__.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/__init__.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/__init__.py new file mode 100644 index 000000000000..af9aed4a1b3f --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import AlphaAnalyticsDataClient +from .async_client import AlphaAnalyticsDataAsyncClient + +__all__ = ( + 'AlphaAnalyticsDataClient', + 'AlphaAnalyticsDataAsyncClient', +) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py new file mode 100644 index 000000000000..895a58adfe86 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py @@ -0,0 +1,1966 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.analytics.data_v1alpha import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.analytics.data_v1alpha.services.alpha_analytics_data import pagers +from google.analytics.data_v1alpha.types import analytics_data_api +from google.analytics.data_v1alpha.types import data +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import AlphaAnalyticsDataTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import AlphaAnalyticsDataGrpcAsyncIOTransport +from .client import AlphaAnalyticsDataClient + + +class AlphaAnalyticsDataAsyncClient: + """Google Analytics reporting data service.""" + + _client: AlphaAnalyticsDataClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = AlphaAnalyticsDataClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AlphaAnalyticsDataClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = AlphaAnalyticsDataClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = AlphaAnalyticsDataClient._DEFAULT_UNIVERSE + + audience_list_path = staticmethod(AlphaAnalyticsDataClient.audience_list_path) + parse_audience_list_path = staticmethod(AlphaAnalyticsDataClient.parse_audience_list_path) + property_quotas_snapshot_path = staticmethod(AlphaAnalyticsDataClient.property_quotas_snapshot_path) + parse_property_quotas_snapshot_path = staticmethod(AlphaAnalyticsDataClient.parse_property_quotas_snapshot_path) + recurring_audience_list_path = staticmethod(AlphaAnalyticsDataClient.recurring_audience_list_path) + parse_recurring_audience_list_path = staticmethod(AlphaAnalyticsDataClient.parse_recurring_audience_list_path) + report_task_path = staticmethod(AlphaAnalyticsDataClient.report_task_path) + parse_report_task_path = staticmethod(AlphaAnalyticsDataClient.parse_report_task_path) + common_billing_account_path = staticmethod(AlphaAnalyticsDataClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(AlphaAnalyticsDataClient.parse_common_billing_account_path) + common_folder_path = staticmethod(AlphaAnalyticsDataClient.common_folder_path) + parse_common_folder_path = staticmethod(AlphaAnalyticsDataClient.parse_common_folder_path) + common_organization_path = staticmethod(AlphaAnalyticsDataClient.common_organization_path) + parse_common_organization_path = staticmethod(AlphaAnalyticsDataClient.parse_common_organization_path) + common_project_path = staticmethod(AlphaAnalyticsDataClient.common_project_path) + parse_common_project_path = staticmethod(AlphaAnalyticsDataClient.parse_common_project_path) + common_location_path = staticmethod(AlphaAnalyticsDataClient.common_location_path) + parse_common_location_path = staticmethod(AlphaAnalyticsDataClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlphaAnalyticsDataAsyncClient: The constructed client. + """ + return AlphaAnalyticsDataClient.from_service_account_info.__func__(AlphaAnalyticsDataAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlphaAnalyticsDataAsyncClient: The constructed client. + """ + return AlphaAnalyticsDataClient.from_service_account_file.__func__(AlphaAnalyticsDataAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AlphaAnalyticsDataClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AlphaAnalyticsDataTransport: + """Returns the transport used by the client instance. + + Returns: + AlphaAnalyticsDataTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = AlphaAnalyticsDataClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AlphaAnalyticsDataTransport, Callable[..., AlphaAnalyticsDataTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the alpha analytics data async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AlphaAnalyticsDataTransport,Callable[..., AlphaAnalyticsDataTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AlphaAnalyticsDataTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AlphaAnalyticsDataClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def run_funnel_report(self, + request: Optional[Union[analytics_data_api.RunFunnelReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.RunFunnelReportResponse: + r"""Returns a customized funnel report of your Google Analytics + event data. The data returned from the API is as a table with + columns for the requested dimensions and metrics. + + Funnel exploration lets you visualize the steps your users take + to complete a task and quickly see how well they are succeeding + or failing at each step. For example, how do prospects become + shoppers and then become buyers? How do one time buyers become + repeat buyers? With this information, you can improve + inefficient or abandoned customer journeys. To learn more, see + `GA4 Funnel + Explorations `__. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Data API Funnel Reporting + Feedback `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_run_funnel_report(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.RunFunnelReportRequest( + ) + + # Make the request + response = await client.run_funnel_report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.RunFunnelReportRequest, dict]]): + The request object. The request for a funnel report. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.RunFunnelReportResponse: + The funnel report response contains + two sub reports. The two sub reports are + different combinations of dimensions and + metrics. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.RunFunnelReportRequest): + request = analytics_data_api.RunFunnelReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.run_funnel_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_audience_list(self, + request: Optional[Union[analytics_data_api.CreateAudienceListRequest, dict]] = None, + *, + parent: Optional[str] = None, + audience_list: Optional[analytics_data_api.AudienceList] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an audience list for later retrieval. This method + quickly returns the audience list's resource name and initiates + a long running asynchronous request to form an audience list. To + list the users in an audience list, first create the audience + list through this method and then send the audience resource + name to the ``QueryAudienceList`` method. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + An audience list is a snapshot of the users currently in the + audience at the time of audience list creation. Creating + audience lists for one audience on different days will return + different results as users enter and exit the audience. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + Audience lists contain the users in each audience. + + This method is available at beta stability at + `audienceExports.create `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_create_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + audience_list = data_v1alpha.AudienceList() + audience_list.audience = "audience_value" + + request = data_v1alpha.CreateAudienceListRequest( + parent="parent_value", + audience_list=audience_list, + ) + + # Make the request + operation = client.create_audience_list(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.CreateAudienceListRequest, dict]]): + The request object. A request to create a new audience + list. + parent (:class:`str`): + Required. The parent resource where this audience list + will be created. Format: ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + audience_list (:class:`google.analytics.data_v1alpha.types.AudienceList`): + Required. The audience list to + create. + + This corresponds to the ``audience_list`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.analytics.data_v1alpha.types.AudienceList` An audience list is a list of users in an audience at the time of the list's + creation. One audience may have multiple audience + lists created for different days. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, audience_list]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.CreateAudienceListRequest): + request = analytics_data_api.CreateAudienceListRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if audience_list is not None: + request.audience_list = audience_list + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_audience_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + analytics_data_api.AudienceList, + metadata_type=analytics_data_api.AudienceListMetadata, + ) + + # Done; return the response. + return response + + async def query_audience_list(self, + request: Optional[Union[analytics_data_api.QueryAudienceListRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.QueryAudienceListResponse: + r"""Retrieves an audience list of users. After creating an audience, + the users are not immediately available for listing. First, a + request to ``CreateAudienceList`` is necessary to create an + audience list of users, and then second, this method is used to + retrieve the users in the audience list. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + This method is available at beta stability at + `audienceExports.query `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_query_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.QueryAudienceListRequest( + name="name_value", + ) + + # Make the request + response = await client.query_audience_list(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.QueryAudienceListRequest, dict]]): + The request object. A request to list users in an + audience list. + name (:class:`str`): + Required. The name of the audience list to retrieve + users from. Format: + ``properties/{property}/audienceLists/{audience_list}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.QueryAudienceListResponse: + A list of users in an audience list. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.QueryAudienceListRequest): + request = analytics_data_api.QueryAudienceListRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.query_audience_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def sheet_export_audience_list(self, + request: Optional[Union[analytics_data_api.SheetExportAudienceListRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.SheetExportAudienceListResponse: + r"""Exports an audience list of users to a Google Sheet. After + creating an audience, the users are not immediately available + for listing. First, a request to ``CreateAudienceList`` is + necessary to create an audience list of users, and then second, + this method is used to export those users in the audience list + to a Google Sheet. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_sheet_export_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.SheetExportAudienceListRequest( + name="name_value", + ) + + # Make the request + response = await client.sheet_export_audience_list(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.SheetExportAudienceListRequest, dict]]): + The request object. A request to export users in an + audience list to a Google Sheet. + name (:class:`str`): + Required. The name of the audience list to retrieve + users from. Format: + ``properties/{property}/audienceLists/{audience_list}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.SheetExportAudienceListResponse: + The created Google Sheet with the + list of users in an audience list. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.SheetExportAudienceListRequest): + request = analytics_data_api.SheetExportAudienceListRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.sheet_export_audience_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_audience_list(self, + request: Optional[Union[analytics_data_api.GetAudienceListRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.AudienceList: + r"""Gets configuration metadata about a specific audience list. This + method can be used to understand an audience list after it has + been created. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + This method is available at beta stability at + `audienceExports.get `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_get_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetAudienceListRequest( + name="name_value", + ) + + # Make the request + response = await client.get_audience_list(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.GetAudienceListRequest, dict]]): + The request object. A request to retrieve configuration + metadata about a specific audience list. + name (:class:`str`): + Required. The audience list resource name. Format: + ``properties/{property}/audienceLists/{audience_list}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.AudienceList: + An audience list is a list of users + in an audience at the time of the list's + creation. One audience may have multiple + audience lists created for different + days. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetAudienceListRequest): + request = analytics_data_api.GetAudienceListRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_audience_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_audience_lists(self, + request: Optional[Union[analytics_data_api.ListAudienceListsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAudienceListsAsyncPager: + r"""Lists all audience lists for a property. This method can be used + for you to find and reuse existing audience lists rather than + creating unnecessary new audience lists. The same audience can + have multiple audience lists that represent the list of users + that were in an audience on different days. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + This method is available at beta stability at + `audienceExports.list `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_list_audience_lists(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.ListAudienceListsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_audience_lists(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.ListAudienceListsRequest, dict]]): + The request object. A request to list all audience lists + for a property. + parent (:class:`str`): + Required. All audience lists for this property will be + listed in the response. Format: + ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.services.alpha_analytics_data.pagers.ListAudienceListsAsyncPager: + A list of all audience lists for a + property. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.ListAudienceListsRequest): + request = analytics_data_api.ListAudienceListsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_audience_lists] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAudienceListsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_recurring_audience_list(self, + request: Optional[Union[analytics_data_api.CreateRecurringAudienceListRequest, dict]] = None, + *, + parent: Optional[str] = None, + recurring_audience_list: Optional[analytics_data_api.RecurringAudienceList] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.RecurringAudienceList: + r"""Creates a recurring audience list. Recurring audience lists + produces new audience lists each day. Audience lists are users + in an audience at the time of the list's creation. + + A recurring audience list ensures that you have audience list + based on the most recent data available for use each day. If you + manually create audience list, you don't know when an audience + list based on an additional day's data is available. This + recurring audience list automates the creation of an audience + list when an additional day's data is available. You will + consume fewer quota tokens by using recurring audience list + versus manually creating audience list at various times of day + trying to guess when an additional day's data is ready. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_create_recurring_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + recurring_audience_list = data_v1alpha.RecurringAudienceList() + recurring_audience_list.audience = "audience_value" + + request = data_v1alpha.CreateRecurringAudienceListRequest( + parent="parent_value", + recurring_audience_list=recurring_audience_list, + ) + + # Make the request + response = await client.create_recurring_audience_list(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.CreateRecurringAudienceListRequest, dict]]): + The request object. A request to create a new recurring + audience list. + parent (:class:`str`): + Required. The parent resource where this recurring + audience list will be created. Format: + ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + recurring_audience_list (:class:`google.analytics.data_v1alpha.types.RecurringAudienceList`): + Required. The recurring audience list + to create. + + This corresponds to the ``recurring_audience_list`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.RecurringAudienceList: + A recurring audience list produces + new audience lists each day. Audience + lists are users in an audience at the + time of the list's creation. A recurring + audience list ensures that you have + audience list based on the most recent + data available for use each day. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, recurring_audience_list]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.CreateRecurringAudienceListRequest): + request = analytics_data_api.CreateRecurringAudienceListRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if recurring_audience_list is not None: + request.recurring_audience_list = recurring_audience_list + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_recurring_audience_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_recurring_audience_list(self, + request: Optional[Union[analytics_data_api.GetRecurringAudienceListRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.RecurringAudienceList: + r"""Gets configuration metadata about a specific recurring audience + list. This method can be used to understand a recurring audience + list's state after it has been created. For example, a recurring + audience list resource will generate audience list instances for + each day, and this method can be used to get the resource name + of the most recent audience list instance. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_get_recurring_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetRecurringAudienceListRequest( + name="name_value", + ) + + # Make the request + response = await client.get_recurring_audience_list(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.GetRecurringAudienceListRequest, dict]]): + The request object. A request to retrieve configuration + metadata about a specific recurring + audience list. + name (:class:`str`): + Required. The recurring audience list resource name. + Format: + ``properties/{property}/recurringAudienceLists/{recurring_audience_list}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.RecurringAudienceList: + A recurring audience list produces + new audience lists each day. Audience + lists are users in an audience at the + time of the list's creation. A recurring + audience list ensures that you have + audience list based on the most recent + data available for use each day. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetRecurringAudienceListRequest): + request = analytics_data_api.GetRecurringAudienceListRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_recurring_audience_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_recurring_audience_lists(self, + request: Optional[Union[analytics_data_api.ListRecurringAudienceListsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRecurringAudienceListsAsyncPager: + r"""Lists all recurring audience lists for a property. This method + can be used for you to find and reuse existing recurring + audience lists rather than creating unnecessary new recurring + audience lists. The same audience can have multiple recurring + audience lists that represent different dimension combinations; + for example, just the dimension ``deviceId`` or both the + dimensions ``deviceId`` and ``userId``. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_list_recurring_audience_lists(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.ListRecurringAudienceListsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_recurring_audience_lists(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.ListRecurringAudienceListsRequest, dict]]): + The request object. A request to list all recurring + audience lists for a property. + parent (:class:`str`): + Required. All recurring audience lists for this property + will be listed in the response. Format: + ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.services.alpha_analytics_data.pagers.ListRecurringAudienceListsAsyncPager: + A list of all recurring audience + lists for a property. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.ListRecurringAudienceListsRequest): + request = analytics_data_api.ListRecurringAudienceListsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_recurring_audience_lists] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRecurringAudienceListsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_property_quotas_snapshot(self, + request: Optional[Union[analytics_data_api.GetPropertyQuotasSnapshotRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = await client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest, dict]]): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + name (:class:`str`): + Required. Quotas from this property will be listed in + the response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetPropertyQuotasSnapshotRequest): + request = analytics_data_api.GetPropertyQuotasSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_property_quotas_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_report_task(self, + request: Optional[Union[analytics_data_api.CreateReportTaskRequest, dict]] = None, + *, + parent: Optional[str] = None, + report_task: Optional[analytics_data_api.ReportTask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Initiates the creation of a report task. This method + quickly returns a report task and initiates a long + running asynchronous request to form a customized report + of your Google Analytics event data. + + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_create_report_task(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.CreateReportTaskRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_report_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.CreateReportTaskRequest, dict]]): + The request object. A request to create a report task. + parent (:class:`str`): + Required. The parent resource where this report task + will be created. Format: ``properties/{propertyId}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + report_task (:class:`google.analytics.data_v1alpha.types.ReportTask`): + Required. The report task + configuration to create. + + This corresponds to the ``report_task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.analytics.data_v1alpha.types.ReportTask` + A specific report task configuration. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, report_task]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.CreateReportTaskRequest): + request = analytics_data_api.CreateReportTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if report_task is not None: + request.report_task = report_task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_report_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + analytics_data_api.ReportTask, + metadata_type=analytics_data_api.ReportTaskMetadata, + ) + + # Done; return the response. + return response + + async def query_report_task(self, + request: Optional[Union[analytics_data_api.QueryReportTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.QueryReportTaskResponse: + r"""Retrieves a report task's content. After requesting the + ``CreateReportTask``, you are able to retrieve the report + content once the report is ACTIVE. This method will return an + error if the report task's state is not ``ACTIVE``. A query + response will return the tabular row & column values of the + report. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_query_report_task(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.QueryReportTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.query_report_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.QueryReportTaskRequest, dict]]): + The request object. A request to fetch the report content + for a report task. + name (:class:`str`): + Required. The report source name. Format: + ``properties/{property}/reportTasks/{report}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.QueryReportTaskResponse: + The report content corresponding to a + report task. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.QueryReportTaskRequest): + request = analytics_data_api.QueryReportTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.query_report_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_report_task(self, + request: Optional[Union[analytics_data_api.GetReportTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.ReportTask: + r"""Gets report metadata about a specific report task. + After creating a report task, use this method to check + its processing state or inspect its report definition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_get_report_task(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetReportTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_report_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.GetReportTaskRequest, dict]]): + The request object. A request to retrieve configuration + metadata about a specific report task. + name (:class:`str`): + Required. The report task resource name. Format: + ``properties/{property}/reportTasks/{report_task}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.ReportTask: + A specific report task configuration. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetReportTaskRequest): + request = analytics_data_api.GetReportTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_report_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_report_tasks(self, + request: Optional[Union[analytics_data_api.ListReportTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListReportTasksAsyncPager: + r"""Lists all report tasks for a property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_list_report_tasks(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.ListReportTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_report_tasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.ListReportTasksRequest, dict]]): + The request object. A request to list all report tasks + for a property. + parent (:class:`str`): + Required. All report tasks for this property will be + listed in the response. Format: + ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.services.alpha_analytics_data.pagers.ListReportTasksAsyncPager: + A list of all report tasks for a + property. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.ListReportTasksRequest): + request = analytics_data_api.ListReportTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_report_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListReportTasksAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AlphaAnalyticsDataAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "AlphaAnalyticsDataAsyncClient", +) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py new file mode 100644 index 000000000000..7475ca12cac8 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py @@ -0,0 +1,2314 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.analytics.data_v1alpha import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.analytics.data_v1alpha.services.alpha_analytics_data import pagers +from google.analytics.data_v1alpha.types import analytics_data_api +from google.analytics.data_v1alpha.types import data +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import AlphaAnalyticsDataTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import AlphaAnalyticsDataGrpcTransport +from .transports.grpc_asyncio import AlphaAnalyticsDataGrpcAsyncIOTransport +from .transports.rest import AlphaAnalyticsDataRestTransport + + +class AlphaAnalyticsDataClientMeta(type): + """Metaclass for the AlphaAnalyticsData client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[AlphaAnalyticsDataTransport]] + _transport_registry["grpc"] = AlphaAnalyticsDataGrpcTransport + _transport_registry["grpc_asyncio"] = AlphaAnalyticsDataGrpcAsyncIOTransport + _transport_registry["rest"] = AlphaAnalyticsDataRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[AlphaAnalyticsDataTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AlphaAnalyticsDataClient(metaclass=AlphaAnalyticsDataClientMeta): + """Google Analytics reporting data service.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "analyticsdata.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "analyticsdata.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlphaAnalyticsDataClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlphaAnalyticsDataClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AlphaAnalyticsDataTransport: + """Returns the transport used by the client instance. + + Returns: + AlphaAnalyticsDataTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def audience_list_path(property: str,audience_list: str,) -> str: + """Returns a fully-qualified audience_list string.""" + return "properties/{property}/audienceLists/{audience_list}".format(property=property, audience_list=audience_list, ) + + @staticmethod + def parse_audience_list_path(path: str) -> Dict[str,str]: + """Parses a audience_list path into its component segments.""" + m = re.match(r"^properties/(?P.+?)/audienceLists/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def property_quotas_snapshot_path(property: str,) -> str: + """Returns a fully-qualified property_quotas_snapshot string.""" + return "properties/{property}/propertyQuotasSnapshot".format(property=property, ) + + @staticmethod + def parse_property_quotas_snapshot_path(path: str) -> Dict[str,str]: + """Parses a property_quotas_snapshot path into its component segments.""" + m = re.match(r"^properties/(?P.+?)/propertyQuotasSnapshot$", path) + return m.groupdict() if m else {} + + @staticmethod + def recurring_audience_list_path(property: str,recurring_audience_list: str,) -> str: + """Returns a fully-qualified recurring_audience_list string.""" + return "properties/{property}/recurringAudienceLists/{recurring_audience_list}".format(property=property, recurring_audience_list=recurring_audience_list, ) + + @staticmethod + def parse_recurring_audience_list_path(path: str) -> Dict[str,str]: + """Parses a recurring_audience_list path into its component segments.""" + m = re.match(r"^properties/(?P.+?)/recurringAudienceLists/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def report_task_path(property: str,report_task: str,) -> str: + """Returns a fully-qualified report_task string.""" + return "properties/{property}/reportTasks/{report_task}".format(property=property, report_task=report_task, ) + + @staticmethod + def parse_report_task_path(path: str) -> Dict[str,str]: + """Parses a report_task path into its component segments.""" + m = re.match(r"^properties/(?P.+?)/reportTasks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = AlphaAnalyticsDataClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = AlphaAnalyticsDataClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = AlphaAnalyticsDataClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = AlphaAnalyticsDataClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AlphaAnalyticsDataTransport, Callable[..., AlphaAnalyticsDataTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the alpha analytics data client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AlphaAnalyticsDataTransport,Callable[..., AlphaAnalyticsDataTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AlphaAnalyticsDataTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AlphaAnalyticsDataClient._read_environment_variables() + self._client_cert_source = AlphaAnalyticsDataClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = AlphaAnalyticsDataClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, AlphaAnalyticsDataTransport) + if transport_provided: + # transport is a AlphaAnalyticsDataTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(AlphaAnalyticsDataTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + AlphaAnalyticsDataClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[AlphaAnalyticsDataTransport], Callable[..., AlphaAnalyticsDataTransport]] = ( + AlphaAnalyticsDataClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., AlphaAnalyticsDataTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def run_funnel_report(self, + request: Optional[Union[analytics_data_api.RunFunnelReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.RunFunnelReportResponse: + r"""Returns a customized funnel report of your Google Analytics + event data. The data returned from the API is as a table with + columns for the requested dimensions and metrics. + + Funnel exploration lets you visualize the steps your users take + to complete a task and quickly see how well they are succeeding + or failing at each step. For example, how do prospects become + shoppers and then become buyers? How do one time buyers become + repeat buyers? With this information, you can improve + inefficient or abandoned customer journeys. To learn more, see + `GA4 Funnel + Explorations `__. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Data API Funnel Reporting + Feedback `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_run_funnel_report(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.RunFunnelReportRequest( + ) + + # Make the request + response = client.run_funnel_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.RunFunnelReportRequest, dict]): + The request object. The request for a funnel report. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.RunFunnelReportResponse: + The funnel report response contains + two sub reports. The two sub reports are + different combinations of dimensions and + metrics. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.RunFunnelReportRequest): + request = analytics_data_api.RunFunnelReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_funnel_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_audience_list(self, + request: Optional[Union[analytics_data_api.CreateAudienceListRequest, dict]] = None, + *, + parent: Optional[str] = None, + audience_list: Optional[analytics_data_api.AudienceList] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates an audience list for later retrieval. This method + quickly returns the audience list's resource name and initiates + a long running asynchronous request to form an audience list. To + list the users in an audience list, first create the audience + list through this method and then send the audience resource + name to the ``QueryAudienceList`` method. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + An audience list is a snapshot of the users currently in the + audience at the time of audience list creation. Creating + audience lists for one audience on different days will return + different results as users enter and exit the audience. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + Audience lists contain the users in each audience. + + This method is available at beta stability at + `audienceExports.create `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_create_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + audience_list = data_v1alpha.AudienceList() + audience_list.audience = "audience_value" + + request = data_v1alpha.CreateAudienceListRequest( + parent="parent_value", + audience_list=audience_list, + ) + + # Make the request + operation = client.create_audience_list(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.CreateAudienceListRequest, dict]): + The request object. A request to create a new audience + list. + parent (str): + Required. The parent resource where this audience list + will be created. Format: ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + audience_list (google.analytics.data_v1alpha.types.AudienceList): + Required. The audience list to + create. + + This corresponds to the ``audience_list`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.analytics.data_v1alpha.types.AudienceList` An audience list is a list of users in an audience at the time of the list's + creation. One audience may have multiple audience + lists created for different days. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, audience_list]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.CreateAudienceListRequest): + request = analytics_data_api.CreateAudienceListRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if audience_list is not None: + request.audience_list = audience_list + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_audience_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + analytics_data_api.AudienceList, + metadata_type=analytics_data_api.AudienceListMetadata, + ) + + # Done; return the response. + return response + + def query_audience_list(self, + request: Optional[Union[analytics_data_api.QueryAudienceListRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.QueryAudienceListResponse: + r"""Retrieves an audience list of users. After creating an audience, + the users are not immediately available for listing. First, a + request to ``CreateAudienceList`` is necessary to create an + audience list of users, and then second, this method is used to + retrieve the users in the audience list. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + This method is available at beta stability at + `audienceExports.query `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_query_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.QueryAudienceListRequest( + name="name_value", + ) + + # Make the request + response = client.query_audience_list(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.QueryAudienceListRequest, dict]): + The request object. A request to list users in an + audience list. + name (str): + Required. The name of the audience list to retrieve + users from. Format: + ``properties/{property}/audienceLists/{audience_list}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.QueryAudienceListResponse: + A list of users in an audience list. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.QueryAudienceListRequest): + request = analytics_data_api.QueryAudienceListRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.query_audience_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def sheet_export_audience_list(self, + request: Optional[Union[analytics_data_api.SheetExportAudienceListRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.SheetExportAudienceListResponse: + r"""Exports an audience list of users to a Google Sheet. After + creating an audience, the users are not immediately available + for listing. First, a request to ``CreateAudienceList`` is + necessary to create an audience list of users, and then second, + this method is used to export those users in the audience list + to a Google Sheet. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_sheet_export_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.SheetExportAudienceListRequest( + name="name_value", + ) + + # Make the request + response = client.sheet_export_audience_list(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.SheetExportAudienceListRequest, dict]): + The request object. A request to export users in an + audience list to a Google Sheet. + name (str): + Required. The name of the audience list to retrieve + users from. Format: + ``properties/{property}/audienceLists/{audience_list}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.SheetExportAudienceListResponse: + The created Google Sheet with the + list of users in an audience list. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.SheetExportAudienceListRequest): + request = analytics_data_api.SheetExportAudienceListRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.sheet_export_audience_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_audience_list(self, + request: Optional[Union[analytics_data_api.GetAudienceListRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.AudienceList: + r"""Gets configuration metadata about a specific audience list. This + method can be used to understand an audience list after it has + been created. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + This method is available at beta stability at + `audienceExports.get `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_get_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetAudienceListRequest( + name="name_value", + ) + + # Make the request + response = client.get_audience_list(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.GetAudienceListRequest, dict]): + The request object. A request to retrieve configuration + metadata about a specific audience list. + name (str): + Required. The audience list resource name. Format: + ``properties/{property}/audienceLists/{audience_list}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.AudienceList: + An audience list is a list of users + in an audience at the time of the list's + creation. One audience may have multiple + audience lists created for different + days. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetAudienceListRequest): + request = analytics_data_api.GetAudienceListRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_audience_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_audience_lists(self, + request: Optional[Union[analytics_data_api.ListAudienceListsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAudienceListsPager: + r"""Lists all audience lists for a property. This method can be used + for you to find and reuse existing audience lists rather than + creating unnecessary new audience lists. The same audience can + have multiple audience lists that represent the list of users + that were in an audience on different days. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + This method is available at beta stability at + `audienceExports.list `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_list_audience_lists(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.ListAudienceListsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_audience_lists(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.ListAudienceListsRequest, dict]): + The request object. A request to list all audience lists + for a property. + parent (str): + Required. All audience lists for this property will be + listed in the response. Format: + ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.services.alpha_analytics_data.pagers.ListAudienceListsPager: + A list of all audience lists for a + property. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.ListAudienceListsRequest): + request = analytics_data_api.ListAudienceListsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_audience_lists] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAudienceListsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_recurring_audience_list(self, + request: Optional[Union[analytics_data_api.CreateRecurringAudienceListRequest, dict]] = None, + *, + parent: Optional[str] = None, + recurring_audience_list: Optional[analytics_data_api.RecurringAudienceList] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.RecurringAudienceList: + r"""Creates a recurring audience list. Recurring audience lists + produces new audience lists each day. Audience lists are users + in an audience at the time of the list's creation. + + A recurring audience list ensures that you have audience list + based on the most recent data available for use each day. If you + manually create audience list, you don't know when an audience + list based on an additional day's data is available. This + recurring audience list automates the creation of an audience + list when an additional day's data is available. You will + consume fewer quota tokens by using recurring audience list + versus manually creating audience list at various times of day + trying to guess when an additional day's data is ready. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_create_recurring_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + recurring_audience_list = data_v1alpha.RecurringAudienceList() + recurring_audience_list.audience = "audience_value" + + request = data_v1alpha.CreateRecurringAudienceListRequest( + parent="parent_value", + recurring_audience_list=recurring_audience_list, + ) + + # Make the request + response = client.create_recurring_audience_list(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.CreateRecurringAudienceListRequest, dict]): + The request object. A request to create a new recurring + audience list. + parent (str): + Required. The parent resource where this recurring + audience list will be created. Format: + ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + recurring_audience_list (google.analytics.data_v1alpha.types.RecurringAudienceList): + Required. The recurring audience list + to create. + + This corresponds to the ``recurring_audience_list`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.RecurringAudienceList: + A recurring audience list produces + new audience lists each day. Audience + lists are users in an audience at the + time of the list's creation. A recurring + audience list ensures that you have + audience list based on the most recent + data available for use each day. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, recurring_audience_list]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.CreateRecurringAudienceListRequest): + request = analytics_data_api.CreateRecurringAudienceListRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if recurring_audience_list is not None: + request.recurring_audience_list = recurring_audience_list + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_recurring_audience_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_recurring_audience_list(self, + request: Optional[Union[analytics_data_api.GetRecurringAudienceListRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.RecurringAudienceList: + r"""Gets configuration metadata about a specific recurring audience + list. This method can be used to understand a recurring audience + list's state after it has been created. For example, a recurring + audience list resource will generate audience list instances for + each day, and this method can be used to get the resource name + of the most recent audience list instance. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_get_recurring_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetRecurringAudienceListRequest( + name="name_value", + ) + + # Make the request + response = client.get_recurring_audience_list(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.GetRecurringAudienceListRequest, dict]): + The request object. A request to retrieve configuration + metadata about a specific recurring + audience list. + name (str): + Required. The recurring audience list resource name. + Format: + ``properties/{property}/recurringAudienceLists/{recurring_audience_list}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.RecurringAudienceList: + A recurring audience list produces + new audience lists each day. Audience + lists are users in an audience at the + time of the list's creation. A recurring + audience list ensures that you have + audience list based on the most recent + data available for use each day. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetRecurringAudienceListRequest): + request = analytics_data_api.GetRecurringAudienceListRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_recurring_audience_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_recurring_audience_lists(self, + request: Optional[Union[analytics_data_api.ListRecurringAudienceListsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRecurringAudienceListsPager: + r"""Lists all recurring audience lists for a property. This method + can be used for you to find and reuse existing recurring + audience lists rather than creating unnecessary new recurring + audience lists. The same audience can have multiple recurring + audience lists that represent different dimension combinations; + for example, just the dimension ``deviceId`` or both the + dimensions ``deviceId`` and ``userId``. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_list_recurring_audience_lists(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.ListRecurringAudienceListsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_recurring_audience_lists(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.ListRecurringAudienceListsRequest, dict]): + The request object. A request to list all recurring + audience lists for a property. + parent (str): + Required. All recurring audience lists for this property + will be listed in the response. Format: + ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.services.alpha_analytics_data.pagers.ListRecurringAudienceListsPager: + A list of all recurring audience + lists for a property. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.ListRecurringAudienceListsRequest): + request = analytics_data_api.ListRecurringAudienceListsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_recurring_audience_lists] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRecurringAudienceListsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_property_quotas_snapshot(self, + request: Optional[Union[analytics_data_api.GetPropertyQuotasSnapshotRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest, dict]): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + name (str): + Required. Quotas from this property will be listed in + the response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetPropertyQuotasSnapshotRequest): + request = analytics_data_api.GetPropertyQuotasSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_property_quotas_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_report_task(self, + request: Optional[Union[analytics_data_api.CreateReportTaskRequest, dict]] = None, + *, + parent: Optional[str] = None, + report_task: Optional[analytics_data_api.ReportTask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Initiates the creation of a report task. This method + quickly returns a report task and initiates a long + running asynchronous request to form a customized report + of your Google Analytics event data. + + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_create_report_task(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.CreateReportTaskRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_report_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.CreateReportTaskRequest, dict]): + The request object. A request to create a report task. + parent (str): + Required. The parent resource where this report task + will be created. Format: ``properties/{propertyId}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + report_task (google.analytics.data_v1alpha.types.ReportTask): + Required. The report task + configuration to create. + + This corresponds to the ``report_task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.analytics.data_v1alpha.types.ReportTask` + A specific report task configuration. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, report_task]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.CreateReportTaskRequest): + request = analytics_data_api.CreateReportTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if report_task is not None: + request.report_task = report_task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_report_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + analytics_data_api.ReportTask, + metadata_type=analytics_data_api.ReportTaskMetadata, + ) + + # Done; return the response. + return response + + def query_report_task(self, + request: Optional[Union[analytics_data_api.QueryReportTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.QueryReportTaskResponse: + r"""Retrieves a report task's content. After requesting the + ``CreateReportTask``, you are able to retrieve the report + content once the report is ACTIVE. This method will return an + error if the report task's state is not ``ACTIVE``. A query + response will return the tabular row & column values of the + report. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_query_report_task(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.QueryReportTaskRequest( + name="name_value", + ) + + # Make the request + response = client.query_report_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.QueryReportTaskRequest, dict]): + The request object. A request to fetch the report content + for a report task. + name (str): + Required. The report source name. Format: + ``properties/{property}/reportTasks/{report}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.QueryReportTaskResponse: + The report content corresponding to a + report task. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.QueryReportTaskRequest): + request = analytics_data_api.QueryReportTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.query_report_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_report_task(self, + request: Optional[Union[analytics_data_api.GetReportTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.ReportTask: + r"""Gets report metadata about a specific report task. + After creating a report task, use this method to check + its processing state or inspect its report definition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_get_report_task(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetReportTaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_report_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.GetReportTaskRequest, dict]): + The request object. A request to retrieve configuration + metadata about a specific report task. + name (str): + Required. The report task resource name. Format: + ``properties/{property}/reportTasks/{report_task}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.ReportTask: + A specific report task configuration. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetReportTaskRequest): + request = analytics_data_api.GetReportTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_report_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_report_tasks(self, + request: Optional[Union[analytics_data_api.ListReportTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListReportTasksPager: + r"""Lists all report tasks for a property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_list_report_tasks(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.ListReportTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_report_tasks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.ListReportTasksRequest, dict]): + The request object. A request to list all report tasks + for a property. + parent (str): + Required. All report tasks for this property will be + listed in the response. Format: + ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.services.alpha_analytics_data.pagers.ListReportTasksPager: + A list of all report tasks for a + property. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.ListReportTasksRequest): + request = analytics_data_api.ListReportTasksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_report_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListReportTasksPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AlphaAnalyticsDataClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "AlphaAnalyticsDataClient", +) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/pagers.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/pagers.py new file mode 100644 index 000000000000..879c1af9a6c6 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/pagers.py @@ -0,0 +1,432 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.analytics.data_v1alpha.types import analytics_data_api + + +class ListAudienceListsPager: + """A pager for iterating through ``list_audience_lists`` requests. + + This class thinly wraps an initial + :class:`google.analytics.data_v1alpha.types.ListAudienceListsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``audience_lists`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAudienceLists`` requests and continue to iterate + through the ``audience_lists`` field on the + corresponding responses. + + All the usual :class:`google.analytics.data_v1alpha.types.ListAudienceListsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., analytics_data_api.ListAudienceListsResponse], + request: analytics_data_api.ListAudienceListsRequest, + response: analytics_data_api.ListAudienceListsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.data_v1alpha.types.ListAudienceListsRequest): + The initial request object. + response (google.analytics.data_v1alpha.types.ListAudienceListsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_data_api.ListAudienceListsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[analytics_data_api.ListAudienceListsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analytics_data_api.AudienceList]: + for page in self.pages: + yield from page.audience_lists + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAudienceListsAsyncPager: + """A pager for iterating through ``list_audience_lists`` requests. + + This class thinly wraps an initial + :class:`google.analytics.data_v1alpha.types.ListAudienceListsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``audience_lists`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAudienceLists`` requests and continue to iterate + through the ``audience_lists`` field on the + corresponding responses. + + All the usual :class:`google.analytics.data_v1alpha.types.ListAudienceListsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[analytics_data_api.ListAudienceListsResponse]], + request: analytics_data_api.ListAudienceListsRequest, + response: analytics_data_api.ListAudienceListsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.data_v1alpha.types.ListAudienceListsRequest): + The initial request object. + response (google.analytics.data_v1alpha.types.ListAudienceListsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_data_api.ListAudienceListsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[analytics_data_api.ListAudienceListsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[analytics_data_api.AudienceList]: + async def async_generator(): + async for page in self.pages: + for response in page.audience_lists: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListRecurringAudienceListsPager: + """A pager for iterating through ``list_recurring_audience_lists`` requests. + + This class thinly wraps an initial + :class:`google.analytics.data_v1alpha.types.ListRecurringAudienceListsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``recurring_audience_lists`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListRecurringAudienceLists`` requests and continue to iterate + through the ``recurring_audience_lists`` field on the + corresponding responses. + + All the usual :class:`google.analytics.data_v1alpha.types.ListRecurringAudienceListsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., analytics_data_api.ListRecurringAudienceListsResponse], + request: analytics_data_api.ListRecurringAudienceListsRequest, + response: analytics_data_api.ListRecurringAudienceListsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.data_v1alpha.types.ListRecurringAudienceListsRequest): + The initial request object. + response (google.analytics.data_v1alpha.types.ListRecurringAudienceListsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_data_api.ListRecurringAudienceListsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[analytics_data_api.ListRecurringAudienceListsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analytics_data_api.RecurringAudienceList]: + for page in self.pages: + yield from page.recurring_audience_lists + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListRecurringAudienceListsAsyncPager: + """A pager for iterating through ``list_recurring_audience_lists`` requests. + + This class thinly wraps an initial + :class:`google.analytics.data_v1alpha.types.ListRecurringAudienceListsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``recurring_audience_lists`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListRecurringAudienceLists`` requests and continue to iterate + through the ``recurring_audience_lists`` field on the + corresponding responses. + + All the usual :class:`google.analytics.data_v1alpha.types.ListRecurringAudienceListsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[analytics_data_api.ListRecurringAudienceListsResponse]], + request: analytics_data_api.ListRecurringAudienceListsRequest, + response: analytics_data_api.ListRecurringAudienceListsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.data_v1alpha.types.ListRecurringAudienceListsRequest): + The initial request object. + response (google.analytics.data_v1alpha.types.ListRecurringAudienceListsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_data_api.ListRecurringAudienceListsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[analytics_data_api.ListRecurringAudienceListsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[analytics_data_api.RecurringAudienceList]: + async def async_generator(): + async for page in self.pages: + for response in page.recurring_audience_lists: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListReportTasksPager: + """A pager for iterating through ``list_report_tasks`` requests. + + This class thinly wraps an initial + :class:`google.analytics.data_v1alpha.types.ListReportTasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``report_tasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListReportTasks`` requests and continue to iterate + through the ``report_tasks`` field on the + corresponding responses. + + All the usual :class:`google.analytics.data_v1alpha.types.ListReportTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., analytics_data_api.ListReportTasksResponse], + request: analytics_data_api.ListReportTasksRequest, + response: analytics_data_api.ListReportTasksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.data_v1alpha.types.ListReportTasksRequest): + The initial request object. + response (google.analytics.data_v1alpha.types.ListReportTasksResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_data_api.ListReportTasksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[analytics_data_api.ListReportTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analytics_data_api.ReportTask]: + for page in self.pages: + yield from page.report_tasks + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListReportTasksAsyncPager: + """A pager for iterating through ``list_report_tasks`` requests. + + This class thinly wraps an initial + :class:`google.analytics.data_v1alpha.types.ListReportTasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``report_tasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListReportTasks`` requests and continue to iterate + through the ``report_tasks`` field on the + corresponding responses. + + All the usual :class:`google.analytics.data_v1alpha.types.ListReportTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[analytics_data_api.ListReportTasksResponse]], + request: analytics_data_api.ListReportTasksRequest, + response: analytics_data_api.ListReportTasksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.data_v1alpha.types.ListReportTasksRequest): + The initial request object. + response (google.analytics.data_v1alpha.types.ListReportTasksResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_data_api.ListReportTasksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[analytics_data_api.ListReportTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[analytics_data_api.ReportTask]: + async def async_generator(): + async for page in self.pages: + for response in page.report_tasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/README.rst b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/README.rst new file mode 100644 index 000000000000..aada6b24865f --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`AlphaAnalyticsDataTransport` is the ABC for all transports. +- public child `AlphaAnalyticsDataGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `AlphaAnalyticsDataGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseAlphaAnalyticsDataRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `AlphaAnalyticsDataRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/__init__.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/__init__.py new file mode 100644 index 000000000000..73e9224a3be5 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AlphaAnalyticsDataTransport +from .grpc import AlphaAnalyticsDataGrpcTransport +from .grpc_asyncio import AlphaAnalyticsDataGrpcAsyncIOTransport +from .rest import AlphaAnalyticsDataRestTransport +from .rest import AlphaAnalyticsDataRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AlphaAnalyticsDataTransport]] +_transport_registry['grpc'] = AlphaAnalyticsDataGrpcTransport +_transport_registry['grpc_asyncio'] = AlphaAnalyticsDataGrpcAsyncIOTransport +_transport_registry['rest'] = AlphaAnalyticsDataRestTransport + +__all__ = ( + 'AlphaAnalyticsDataTransport', + 'AlphaAnalyticsDataGrpcTransport', + 'AlphaAnalyticsDataGrpcAsyncIOTransport', + 'AlphaAnalyticsDataRestTransport', + 'AlphaAnalyticsDataRestInterceptor', +) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py new file mode 100644 index 000000000000..e939d9c1266f --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py @@ -0,0 +1,347 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.analytics.data_v1alpha import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.analytics.data_v1alpha.types import analytics_data_api +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class AlphaAnalyticsDataTransport(abc.ABC): + """Abstract transport class for AlphaAnalyticsData.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/analytics', + 'https://www.googleapis.com/auth/analytics.readonly', + 'https://www.googleapis.com/auth/drive', + 'https://www.googleapis.com/auth/drive.file', + 'https://www.googleapis.com/auth/spreadsheets', + ) + + DEFAULT_HOST: str = 'analyticsdata.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'analyticsdata.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.run_funnel_report: gapic_v1.method.wrap_method( + self.run_funnel_report, + default_timeout=60.0, + client_info=client_info, + ), + self.create_audience_list: gapic_v1.method.wrap_method( + self.create_audience_list, + default_timeout=None, + client_info=client_info, + ), + self.query_audience_list: gapic_v1.method.wrap_method( + self.query_audience_list, + default_timeout=None, + client_info=client_info, + ), + self.sheet_export_audience_list: gapic_v1.method.wrap_method( + self.sheet_export_audience_list, + default_timeout=None, + client_info=client_info, + ), + self.get_audience_list: gapic_v1.method.wrap_method( + self.get_audience_list, + default_timeout=None, + client_info=client_info, + ), + self.list_audience_lists: gapic_v1.method.wrap_method( + self.list_audience_lists, + default_timeout=None, + client_info=client_info, + ), + self.create_recurring_audience_list: gapic_v1.method.wrap_method( + self.create_recurring_audience_list, + default_timeout=None, + client_info=client_info, + ), + self.get_recurring_audience_list: gapic_v1.method.wrap_method( + self.get_recurring_audience_list, + default_timeout=None, + client_info=client_info, + ), + self.list_recurring_audience_lists: gapic_v1.method.wrap_method( + self.list_recurring_audience_lists, + default_timeout=None, + client_info=client_info, + ), + self.get_property_quotas_snapshot: gapic_v1.method.wrap_method( + self.get_property_quotas_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.create_report_task: gapic_v1.method.wrap_method( + self.create_report_task, + default_timeout=None, + client_info=client_info, + ), + self.query_report_task: gapic_v1.method.wrap_method( + self.query_report_task, + default_timeout=None, + client_info=client_info, + ), + self.get_report_task: gapic_v1.method.wrap_method( + self.get_report_task, + default_timeout=None, + client_info=client_info, + ), + self.list_report_tasks: gapic_v1.method.wrap_method( + self.list_report_tasks, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def run_funnel_report(self) -> Callable[ + [analytics_data_api.RunFunnelReportRequest], + Union[ + analytics_data_api.RunFunnelReportResponse, + Awaitable[analytics_data_api.RunFunnelReportResponse] + ]]: + raise NotImplementedError() + + @property + def create_audience_list(self) -> Callable[ + [analytics_data_api.CreateAudienceListRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def query_audience_list(self) -> Callable[ + [analytics_data_api.QueryAudienceListRequest], + Union[ + analytics_data_api.QueryAudienceListResponse, + Awaitable[analytics_data_api.QueryAudienceListResponse] + ]]: + raise NotImplementedError() + + @property + def sheet_export_audience_list(self) -> Callable[ + [analytics_data_api.SheetExportAudienceListRequest], + Union[ + analytics_data_api.SheetExportAudienceListResponse, + Awaitable[analytics_data_api.SheetExportAudienceListResponse] + ]]: + raise NotImplementedError() + + @property + def get_audience_list(self) -> Callable[ + [analytics_data_api.GetAudienceListRequest], + Union[ + analytics_data_api.AudienceList, + Awaitable[analytics_data_api.AudienceList] + ]]: + raise NotImplementedError() + + @property + def list_audience_lists(self) -> Callable[ + [analytics_data_api.ListAudienceListsRequest], + Union[ + analytics_data_api.ListAudienceListsResponse, + Awaitable[analytics_data_api.ListAudienceListsResponse] + ]]: + raise NotImplementedError() + + @property + def create_recurring_audience_list(self) -> Callable[ + [analytics_data_api.CreateRecurringAudienceListRequest], + Union[ + analytics_data_api.RecurringAudienceList, + Awaitable[analytics_data_api.RecurringAudienceList] + ]]: + raise NotImplementedError() + + @property + def get_recurring_audience_list(self) -> Callable[ + [analytics_data_api.GetRecurringAudienceListRequest], + Union[ + analytics_data_api.RecurringAudienceList, + Awaitable[analytics_data_api.RecurringAudienceList] + ]]: + raise NotImplementedError() + + @property + def list_recurring_audience_lists(self) -> Callable[ + [analytics_data_api.ListRecurringAudienceListsRequest], + Union[ + analytics_data_api.ListRecurringAudienceListsResponse, + Awaitable[analytics_data_api.ListRecurringAudienceListsResponse] + ]]: + raise NotImplementedError() + + @property + def get_property_quotas_snapshot(self) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + Union[ + analytics_data_api.PropertyQuotasSnapshot, + Awaitable[analytics_data_api.PropertyQuotasSnapshot] + ]]: + raise NotImplementedError() + + @property + def create_report_task(self) -> Callable[ + [analytics_data_api.CreateReportTaskRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def query_report_task(self) -> Callable[ + [analytics_data_api.QueryReportTaskRequest], + Union[ + analytics_data_api.QueryReportTaskResponse, + Awaitable[analytics_data_api.QueryReportTaskResponse] + ]]: + raise NotImplementedError() + + @property + def get_report_task(self) -> Callable[ + [analytics_data_api.GetReportTaskRequest], + Union[ + analytics_data_api.ReportTask, + Awaitable[analytics_data_api.ReportTask] + ]]: + raise NotImplementedError() + + @property + def list_report_tasks(self) -> Callable[ + [analytics_data_api.ListReportTasksRequest], + Union[ + analytics_data_api.ListReportTasksResponse, + Awaitable[analytics_data_api.ListReportTasksResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'AlphaAnalyticsDataTransport', +) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py new file mode 100644 index 000000000000..19fac9a3a7a4 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py @@ -0,0 +1,791 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.analytics.data_v1alpha.types import analytics_data_api +from google.longrunning import operations_pb2 # type: ignore +from .base import AlphaAnalyticsDataTransport, DEFAULT_CLIENT_INFO + + +class AlphaAnalyticsDataGrpcTransport(AlphaAnalyticsDataTransport): + """gRPC backend transport for AlphaAnalyticsData. + + Google Analytics reporting data service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'analyticsdata.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'analyticsdata.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'analyticsdata.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def run_funnel_report(self) -> Callable[ + [analytics_data_api.RunFunnelReportRequest], + analytics_data_api.RunFunnelReportResponse]: + r"""Return a callable for the run funnel report method over gRPC. + + Returns a customized funnel report of your Google Analytics + event data. The data returned from the API is as a table with + columns for the requested dimensions and metrics. + + Funnel exploration lets you visualize the steps your users take + to complete a task and quickly see how well they are succeeding + or failing at each step. For example, how do prospects become + shoppers and then become buyers? How do one time buyers become + repeat buyers? With this information, you can improve + inefficient or abandoned customer journeys. To learn more, see + `GA4 Funnel + Explorations `__. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Data API Funnel Reporting + Feedback `__. + + Returns: + Callable[[~.RunFunnelReportRequest], + ~.RunFunnelReportResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_funnel_report' not in self._stubs: + self._stubs['run_funnel_report'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/RunFunnelReport', + request_serializer=analytics_data_api.RunFunnelReportRequest.serialize, + response_deserializer=analytics_data_api.RunFunnelReportResponse.deserialize, + ) + return self._stubs['run_funnel_report'] + + @property + def create_audience_list(self) -> Callable[ + [analytics_data_api.CreateAudienceListRequest], + operations_pb2.Operation]: + r"""Return a callable for the create audience list method over gRPC. + + Creates an audience list for later retrieval. This method + quickly returns the audience list's resource name and initiates + a long running asynchronous request to form an audience list. To + list the users in an audience list, first create the audience + list through this method and then send the audience resource + name to the ``QueryAudienceList`` method. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + An audience list is a snapshot of the users currently in the + audience at the time of audience list creation. Creating + audience lists for one audience on different days will return + different results as users enter and exit the audience. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + Audience lists contain the users in each audience. + + This method is available at beta stability at + `audienceExports.create `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.CreateAudienceListRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_audience_list' not in self._stubs: + self._stubs['create_audience_list'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/CreateAudienceList', + request_serializer=analytics_data_api.CreateAudienceListRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_audience_list'] + + @property + def query_audience_list(self) -> Callable[ + [analytics_data_api.QueryAudienceListRequest], + analytics_data_api.QueryAudienceListResponse]: + r"""Return a callable for the query audience list method over gRPC. + + Retrieves an audience list of users. After creating an audience, + the users are not immediately available for listing. First, a + request to ``CreateAudienceList`` is necessary to create an + audience list of users, and then second, this method is used to + retrieve the users in the audience list. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + This method is available at beta stability at + `audienceExports.query `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.QueryAudienceListRequest], + ~.QueryAudienceListResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'query_audience_list' not in self._stubs: + self._stubs['query_audience_list'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/QueryAudienceList', + request_serializer=analytics_data_api.QueryAudienceListRequest.serialize, + response_deserializer=analytics_data_api.QueryAudienceListResponse.deserialize, + ) + return self._stubs['query_audience_list'] + + @property + def sheet_export_audience_list(self) -> Callable[ + [analytics_data_api.SheetExportAudienceListRequest], + analytics_data_api.SheetExportAudienceListResponse]: + r"""Return a callable for the sheet export audience list method over gRPC. + + Exports an audience list of users to a Google Sheet. After + creating an audience, the users are not immediately available + for listing. First, a request to ``CreateAudienceList`` is + necessary to create an audience list of users, and then second, + this method is used to export those users in the audience list + to a Google Sheet. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.SheetExportAudienceListRequest], + ~.SheetExportAudienceListResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'sheet_export_audience_list' not in self._stubs: + self._stubs['sheet_export_audience_list'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/SheetExportAudienceList', + request_serializer=analytics_data_api.SheetExportAudienceListRequest.serialize, + response_deserializer=analytics_data_api.SheetExportAudienceListResponse.deserialize, + ) + return self._stubs['sheet_export_audience_list'] + + @property + def get_audience_list(self) -> Callable[ + [analytics_data_api.GetAudienceListRequest], + analytics_data_api.AudienceList]: + r"""Return a callable for the get audience list method over gRPC. + + Gets configuration metadata about a specific audience list. This + method can be used to understand an audience list after it has + been created. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + This method is available at beta stability at + `audienceExports.get `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.GetAudienceListRequest], + ~.AudienceList]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_audience_list' not in self._stubs: + self._stubs['get_audience_list'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/GetAudienceList', + request_serializer=analytics_data_api.GetAudienceListRequest.serialize, + response_deserializer=analytics_data_api.AudienceList.deserialize, + ) + return self._stubs['get_audience_list'] + + @property + def list_audience_lists(self) -> Callable[ + [analytics_data_api.ListAudienceListsRequest], + analytics_data_api.ListAudienceListsResponse]: + r"""Return a callable for the list audience lists method over gRPC. + + Lists all audience lists for a property. This method can be used + for you to find and reuse existing audience lists rather than + creating unnecessary new audience lists. The same audience can + have multiple audience lists that represent the list of users + that were in an audience on different days. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + This method is available at beta stability at + `audienceExports.list `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.ListAudienceListsRequest], + ~.ListAudienceListsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_audience_lists' not in self._stubs: + self._stubs['list_audience_lists'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/ListAudienceLists', + request_serializer=analytics_data_api.ListAudienceListsRequest.serialize, + response_deserializer=analytics_data_api.ListAudienceListsResponse.deserialize, + ) + return self._stubs['list_audience_lists'] + + @property + def create_recurring_audience_list(self) -> Callable[ + [analytics_data_api.CreateRecurringAudienceListRequest], + analytics_data_api.RecurringAudienceList]: + r"""Return a callable for the create recurring audience list method over gRPC. + + Creates a recurring audience list. Recurring audience lists + produces new audience lists each day. Audience lists are users + in an audience at the time of the list's creation. + + A recurring audience list ensures that you have audience list + based on the most recent data available for use each day. If you + manually create audience list, you don't know when an audience + list based on an additional day's data is available. This + recurring audience list automates the creation of an audience + list when an additional day's data is available. You will + consume fewer quota tokens by using recurring audience list + versus manually creating audience list at various times of day + trying to guess when an additional day's data is ready. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.CreateRecurringAudienceListRequest], + ~.RecurringAudienceList]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_recurring_audience_list' not in self._stubs: + self._stubs['create_recurring_audience_list'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/CreateRecurringAudienceList', + request_serializer=analytics_data_api.CreateRecurringAudienceListRequest.serialize, + response_deserializer=analytics_data_api.RecurringAudienceList.deserialize, + ) + return self._stubs['create_recurring_audience_list'] + + @property + def get_recurring_audience_list(self) -> Callable[ + [analytics_data_api.GetRecurringAudienceListRequest], + analytics_data_api.RecurringAudienceList]: + r"""Return a callable for the get recurring audience list method over gRPC. + + Gets configuration metadata about a specific recurring audience + list. This method can be used to understand a recurring audience + list's state after it has been created. For example, a recurring + audience list resource will generate audience list instances for + each day, and this method can be used to get the resource name + of the most recent audience list instance. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.GetRecurringAudienceListRequest], + ~.RecurringAudienceList]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_recurring_audience_list' not in self._stubs: + self._stubs['get_recurring_audience_list'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/GetRecurringAudienceList', + request_serializer=analytics_data_api.GetRecurringAudienceListRequest.serialize, + response_deserializer=analytics_data_api.RecurringAudienceList.deserialize, + ) + return self._stubs['get_recurring_audience_list'] + + @property + def list_recurring_audience_lists(self) -> Callable[ + [analytics_data_api.ListRecurringAudienceListsRequest], + analytics_data_api.ListRecurringAudienceListsResponse]: + r"""Return a callable for the list recurring audience lists method over gRPC. + + Lists all recurring audience lists for a property. This method + can be used for you to find and reuse existing recurring + audience lists rather than creating unnecessary new recurring + audience lists. The same audience can have multiple recurring + audience lists that represent different dimension combinations; + for example, just the dimension ``deviceId`` or both the + dimensions ``deviceId`` and ``userId``. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.ListRecurringAudienceListsRequest], + ~.ListRecurringAudienceListsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_recurring_audience_lists' not in self._stubs: + self._stubs['list_recurring_audience_lists'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/ListRecurringAudienceLists', + request_serializer=analytics_data_api.ListRecurringAudienceListsRequest.serialize, + response_deserializer=analytics_data_api.ListRecurringAudienceListsResponse.deserialize, + ) + return self._stubs['list_recurring_audience_lists'] + + @property + def get_property_quotas_snapshot(self) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + analytics_data_api.PropertyQuotasSnapshot]: + r"""Return a callable for the get property quotas snapshot method over gRPC. + + Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + Returns: + Callable[[~.GetPropertyQuotasSnapshotRequest], + ~.PropertyQuotasSnapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_property_quotas_snapshot' not in self._stubs: + self._stubs['get_property_quotas_snapshot'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/GetPropertyQuotasSnapshot', + request_serializer=analytics_data_api.GetPropertyQuotasSnapshotRequest.serialize, + response_deserializer=analytics_data_api.PropertyQuotasSnapshot.deserialize, + ) + return self._stubs['get_property_quotas_snapshot'] + + @property + def create_report_task(self) -> Callable[ + [analytics_data_api.CreateReportTaskRequest], + operations_pb2.Operation]: + r"""Return a callable for the create report task method over gRPC. + + Initiates the creation of a report task. This method + quickly returns a report task and initiates a long + running asynchronous request to form a customized report + of your Google Analytics event data. + + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + + Returns: + Callable[[~.CreateReportTaskRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_report_task' not in self._stubs: + self._stubs['create_report_task'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/CreateReportTask', + request_serializer=analytics_data_api.CreateReportTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_report_task'] + + @property + def query_report_task(self) -> Callable[ + [analytics_data_api.QueryReportTaskRequest], + analytics_data_api.QueryReportTaskResponse]: + r"""Return a callable for the query report task method over gRPC. + + Retrieves a report task's content. After requesting the + ``CreateReportTask``, you are able to retrieve the report + content once the report is ACTIVE. This method will return an + error if the report task's state is not ``ACTIVE``. A query + response will return the tabular row & column values of the + report. + + Returns: + Callable[[~.QueryReportTaskRequest], + ~.QueryReportTaskResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'query_report_task' not in self._stubs: + self._stubs['query_report_task'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/QueryReportTask', + request_serializer=analytics_data_api.QueryReportTaskRequest.serialize, + response_deserializer=analytics_data_api.QueryReportTaskResponse.deserialize, + ) + return self._stubs['query_report_task'] + + @property + def get_report_task(self) -> Callable[ + [analytics_data_api.GetReportTaskRequest], + analytics_data_api.ReportTask]: + r"""Return a callable for the get report task method over gRPC. + + Gets report metadata about a specific report task. + After creating a report task, use this method to check + its processing state or inspect its report definition. + + Returns: + Callable[[~.GetReportTaskRequest], + ~.ReportTask]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_report_task' not in self._stubs: + self._stubs['get_report_task'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/GetReportTask', + request_serializer=analytics_data_api.GetReportTaskRequest.serialize, + response_deserializer=analytics_data_api.ReportTask.deserialize, + ) + return self._stubs['get_report_task'] + + @property + def list_report_tasks(self) -> Callable[ + [analytics_data_api.ListReportTasksRequest], + analytics_data_api.ListReportTasksResponse]: + r"""Return a callable for the list report tasks method over gRPC. + + Lists all report tasks for a property. + + Returns: + Callable[[~.ListReportTasksRequest], + ~.ListReportTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_report_tasks' not in self._stubs: + self._stubs['list_report_tasks'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/ListReportTasks', + request_serializer=analytics_data_api.ListReportTasksRequest.serialize, + response_deserializer=analytics_data_api.ListReportTasksResponse.deserialize, + ) + return self._stubs['list_report_tasks'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'AlphaAnalyticsDataGrpcTransport', +) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py new file mode 100644 index 000000000000..f17ee7d9abd8 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py @@ -0,0 +1,877 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.analytics.data_v1alpha.types import analytics_data_api +from google.longrunning import operations_pb2 # type: ignore +from .base import AlphaAnalyticsDataTransport, DEFAULT_CLIENT_INFO +from .grpc import AlphaAnalyticsDataGrpcTransport + + +class AlphaAnalyticsDataGrpcAsyncIOTransport(AlphaAnalyticsDataTransport): + """gRPC AsyncIO backend transport for AlphaAnalyticsData. + + Google Analytics reporting data service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'analyticsdata.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'analyticsdata.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'analyticsdata.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def run_funnel_report(self) -> Callable[ + [analytics_data_api.RunFunnelReportRequest], + Awaitable[analytics_data_api.RunFunnelReportResponse]]: + r"""Return a callable for the run funnel report method over gRPC. + + Returns a customized funnel report of your Google Analytics + event data. The data returned from the API is as a table with + columns for the requested dimensions and metrics. + + Funnel exploration lets you visualize the steps your users take + to complete a task and quickly see how well they are succeeding + or failing at each step. For example, how do prospects become + shoppers and then become buyers? How do one time buyers become + repeat buyers? With this information, you can improve + inefficient or abandoned customer journeys. To learn more, see + `GA4 Funnel + Explorations `__. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Data API Funnel Reporting + Feedback `__. + + Returns: + Callable[[~.RunFunnelReportRequest], + Awaitable[~.RunFunnelReportResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_funnel_report' not in self._stubs: + self._stubs['run_funnel_report'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/RunFunnelReport', + request_serializer=analytics_data_api.RunFunnelReportRequest.serialize, + response_deserializer=analytics_data_api.RunFunnelReportResponse.deserialize, + ) + return self._stubs['run_funnel_report'] + + @property + def create_audience_list(self) -> Callable[ + [analytics_data_api.CreateAudienceListRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create audience list method over gRPC. + + Creates an audience list for later retrieval. This method + quickly returns the audience list's resource name and initiates + a long running asynchronous request to form an audience list. To + list the users in an audience list, first create the audience + list through this method and then send the audience resource + name to the ``QueryAudienceList`` method. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + An audience list is a snapshot of the users currently in the + audience at the time of audience list creation. Creating + audience lists for one audience on different days will return + different results as users enter and exit the audience. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + Audience lists contain the users in each audience. + + This method is available at beta stability at + `audienceExports.create `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.CreateAudienceListRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_audience_list' not in self._stubs: + self._stubs['create_audience_list'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/CreateAudienceList', + request_serializer=analytics_data_api.CreateAudienceListRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_audience_list'] + + @property + def query_audience_list(self) -> Callable[ + [analytics_data_api.QueryAudienceListRequest], + Awaitable[analytics_data_api.QueryAudienceListResponse]]: + r"""Return a callable for the query audience list method over gRPC. + + Retrieves an audience list of users. After creating an audience, + the users are not immediately available for listing. First, a + request to ``CreateAudienceList`` is necessary to create an + audience list of users, and then second, this method is used to + retrieve the users in the audience list. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + This method is available at beta stability at + `audienceExports.query `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.QueryAudienceListRequest], + Awaitable[~.QueryAudienceListResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'query_audience_list' not in self._stubs: + self._stubs['query_audience_list'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/QueryAudienceList', + request_serializer=analytics_data_api.QueryAudienceListRequest.serialize, + response_deserializer=analytics_data_api.QueryAudienceListResponse.deserialize, + ) + return self._stubs['query_audience_list'] + + @property + def sheet_export_audience_list(self) -> Callable[ + [analytics_data_api.SheetExportAudienceListRequest], + Awaitable[analytics_data_api.SheetExportAudienceListResponse]]: + r"""Return a callable for the sheet export audience list method over gRPC. + + Exports an audience list of users to a Google Sheet. After + creating an audience, the users are not immediately available + for listing. First, a request to ``CreateAudienceList`` is + necessary to create an audience list of users, and then second, + this method is used to export those users in the audience list + to a Google Sheet. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.SheetExportAudienceListRequest], + Awaitable[~.SheetExportAudienceListResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'sheet_export_audience_list' not in self._stubs: + self._stubs['sheet_export_audience_list'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/SheetExportAudienceList', + request_serializer=analytics_data_api.SheetExportAudienceListRequest.serialize, + response_deserializer=analytics_data_api.SheetExportAudienceListResponse.deserialize, + ) + return self._stubs['sheet_export_audience_list'] + + @property + def get_audience_list(self) -> Callable[ + [analytics_data_api.GetAudienceListRequest], + Awaitable[analytics_data_api.AudienceList]]: + r"""Return a callable for the get audience list method over gRPC. + + Gets configuration metadata about a specific audience list. This + method can be used to understand an audience list after it has + been created. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + This method is available at beta stability at + `audienceExports.get `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.GetAudienceListRequest], + Awaitable[~.AudienceList]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_audience_list' not in self._stubs: + self._stubs['get_audience_list'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/GetAudienceList', + request_serializer=analytics_data_api.GetAudienceListRequest.serialize, + response_deserializer=analytics_data_api.AudienceList.deserialize, + ) + return self._stubs['get_audience_list'] + + @property + def list_audience_lists(self) -> Callable[ + [analytics_data_api.ListAudienceListsRequest], + Awaitable[analytics_data_api.ListAudienceListsResponse]]: + r"""Return a callable for the list audience lists method over gRPC. + + Lists all audience lists for a property. This method can be used + for you to find and reuse existing audience lists rather than + creating unnecessary new audience lists. The same audience can + have multiple audience lists that represent the list of users + that were in an audience on different days. + + See `Creating an Audience + List `__ + for an introduction to Audience Lists with examples. + + This method is available at beta stability at + `audienceExports.list `__. + To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.ListAudienceListsRequest], + Awaitable[~.ListAudienceListsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_audience_lists' not in self._stubs: + self._stubs['list_audience_lists'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/ListAudienceLists', + request_serializer=analytics_data_api.ListAudienceListsRequest.serialize, + response_deserializer=analytics_data_api.ListAudienceListsResponse.deserialize, + ) + return self._stubs['list_audience_lists'] + + @property + def create_recurring_audience_list(self) -> Callable[ + [analytics_data_api.CreateRecurringAudienceListRequest], + Awaitable[analytics_data_api.RecurringAudienceList]]: + r"""Return a callable for the create recurring audience list method over gRPC. + + Creates a recurring audience list. Recurring audience lists + produces new audience lists each day. Audience lists are users + in an audience at the time of the list's creation. + + A recurring audience list ensures that you have audience list + based on the most recent data available for use each day. If you + manually create audience list, you don't know when an audience + list based on an additional day's data is available. This + recurring audience list automates the creation of an audience + list when an additional day's data is available. You will + consume fewer quota tokens by using recurring audience list + versus manually creating audience list at various times of day + trying to guess when an additional day's data is ready. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.CreateRecurringAudienceListRequest], + Awaitable[~.RecurringAudienceList]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_recurring_audience_list' not in self._stubs: + self._stubs['create_recurring_audience_list'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/CreateRecurringAudienceList', + request_serializer=analytics_data_api.CreateRecurringAudienceListRequest.serialize, + response_deserializer=analytics_data_api.RecurringAudienceList.deserialize, + ) + return self._stubs['create_recurring_audience_list'] + + @property + def get_recurring_audience_list(self) -> Callable[ + [analytics_data_api.GetRecurringAudienceListRequest], + Awaitable[analytics_data_api.RecurringAudienceList]]: + r"""Return a callable for the get recurring audience list method over gRPC. + + Gets configuration metadata about a specific recurring audience + list. This method can be used to understand a recurring audience + list's state after it has been created. For example, a recurring + audience list resource will generate audience list instances for + each day, and this method can be used to get the resource name + of the most recent audience list instance. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.GetRecurringAudienceListRequest], + Awaitable[~.RecurringAudienceList]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_recurring_audience_list' not in self._stubs: + self._stubs['get_recurring_audience_list'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/GetRecurringAudienceList', + request_serializer=analytics_data_api.GetRecurringAudienceListRequest.serialize, + response_deserializer=analytics_data_api.RecurringAudienceList.deserialize, + ) + return self._stubs['get_recurring_audience_list'] + + @property + def list_recurring_audience_lists(self) -> Callable[ + [analytics_data_api.ListRecurringAudienceListsRequest], + Awaitable[analytics_data_api.ListRecurringAudienceListsResponse]]: + r"""Return a callable for the list recurring audience lists method over gRPC. + + Lists all recurring audience lists for a property. This method + can be used for you to find and reuse existing recurring + audience lists rather than creating unnecessary new recurring + audience lists. The same audience can have multiple recurring + audience lists that represent different dimension combinations; + for example, just the dimension ``deviceId`` or both the + dimensions ``deviceId`` and ``userId``. + + This method is introduced at alpha stability with the intention + of gathering feedback on syntax and capabilities before entering + beta. To give your feedback on this API, complete the `Google + Analytics Audience Export API + Feedback `__ form. + + Returns: + Callable[[~.ListRecurringAudienceListsRequest], + Awaitable[~.ListRecurringAudienceListsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_recurring_audience_lists' not in self._stubs: + self._stubs['list_recurring_audience_lists'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/ListRecurringAudienceLists', + request_serializer=analytics_data_api.ListRecurringAudienceListsRequest.serialize, + response_deserializer=analytics_data_api.ListRecurringAudienceListsResponse.deserialize, + ) + return self._stubs['list_recurring_audience_lists'] + + @property + def get_property_quotas_snapshot(self) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + Awaitable[analytics_data_api.PropertyQuotasSnapshot]]: + r"""Return a callable for the get property quotas snapshot method over gRPC. + + Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + Returns: + Callable[[~.GetPropertyQuotasSnapshotRequest], + Awaitable[~.PropertyQuotasSnapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_property_quotas_snapshot' not in self._stubs: + self._stubs['get_property_quotas_snapshot'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/GetPropertyQuotasSnapshot', + request_serializer=analytics_data_api.GetPropertyQuotasSnapshotRequest.serialize, + response_deserializer=analytics_data_api.PropertyQuotasSnapshot.deserialize, + ) + return self._stubs['get_property_quotas_snapshot'] + + @property + def create_report_task(self) -> Callable[ + [analytics_data_api.CreateReportTaskRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create report task method over gRPC. + + Initiates the creation of a report task. This method + quickly returns a report task and initiates a long + running asynchronous request to form a customized report + of your Google Analytics event data. + + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + + Returns: + Callable[[~.CreateReportTaskRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_report_task' not in self._stubs: + self._stubs['create_report_task'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/CreateReportTask', + request_serializer=analytics_data_api.CreateReportTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_report_task'] + + @property + def query_report_task(self) -> Callable[ + [analytics_data_api.QueryReportTaskRequest], + Awaitable[analytics_data_api.QueryReportTaskResponse]]: + r"""Return a callable for the query report task method over gRPC. + + Retrieves a report task's content. After requesting the + ``CreateReportTask``, you are able to retrieve the report + content once the report is ACTIVE. This method will return an + error if the report task's state is not ``ACTIVE``. A query + response will return the tabular row & column values of the + report. + + Returns: + Callable[[~.QueryReportTaskRequest], + Awaitable[~.QueryReportTaskResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'query_report_task' not in self._stubs: + self._stubs['query_report_task'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/QueryReportTask', + request_serializer=analytics_data_api.QueryReportTaskRequest.serialize, + response_deserializer=analytics_data_api.QueryReportTaskResponse.deserialize, + ) + return self._stubs['query_report_task'] + + @property + def get_report_task(self) -> Callable[ + [analytics_data_api.GetReportTaskRequest], + Awaitable[analytics_data_api.ReportTask]]: + r"""Return a callable for the get report task method over gRPC. + + Gets report metadata about a specific report task. + After creating a report task, use this method to check + its processing state or inspect its report definition. + + Returns: + Callable[[~.GetReportTaskRequest], + Awaitable[~.ReportTask]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_report_task' not in self._stubs: + self._stubs['get_report_task'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/GetReportTask', + request_serializer=analytics_data_api.GetReportTaskRequest.serialize, + response_deserializer=analytics_data_api.ReportTask.deserialize, + ) + return self._stubs['get_report_task'] + + @property + def list_report_tasks(self) -> Callable[ + [analytics_data_api.ListReportTasksRequest], + Awaitable[analytics_data_api.ListReportTasksResponse]]: + r"""Return a callable for the list report tasks method over gRPC. + + Lists all report tasks for a property. + + Returns: + Callable[[~.ListReportTasksRequest], + Awaitable[~.ListReportTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_report_tasks' not in self._stubs: + self._stubs['list_report_tasks'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1alpha.AlphaAnalyticsData/ListReportTasks', + request_serializer=analytics_data_api.ListReportTasksRequest.serialize, + response_deserializer=analytics_data_api.ListReportTasksResponse.deserialize, + ) + return self._stubs['list_report_tasks'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.run_funnel_report: self._wrap_method( + self.run_funnel_report, + default_timeout=60.0, + client_info=client_info, + ), + self.create_audience_list: self._wrap_method( + self.create_audience_list, + default_timeout=None, + client_info=client_info, + ), + self.query_audience_list: self._wrap_method( + self.query_audience_list, + default_timeout=None, + client_info=client_info, + ), + self.sheet_export_audience_list: self._wrap_method( + self.sheet_export_audience_list, + default_timeout=None, + client_info=client_info, + ), + self.get_audience_list: self._wrap_method( + self.get_audience_list, + default_timeout=None, + client_info=client_info, + ), + self.list_audience_lists: self._wrap_method( + self.list_audience_lists, + default_timeout=None, + client_info=client_info, + ), + self.create_recurring_audience_list: self._wrap_method( + self.create_recurring_audience_list, + default_timeout=None, + client_info=client_info, + ), + self.get_recurring_audience_list: self._wrap_method( + self.get_recurring_audience_list, + default_timeout=None, + client_info=client_info, + ), + self.list_recurring_audience_lists: self._wrap_method( + self.list_recurring_audience_lists, + default_timeout=None, + client_info=client_info, + ), + self.get_property_quotas_snapshot: self._wrap_method( + self.get_property_quotas_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.create_report_task: self._wrap_method( + self.create_report_task, + default_timeout=None, + client_info=client_info, + ), + self.query_report_task: self._wrap_method( + self.query_report_task, + default_timeout=None, + client_info=client_info, + ), + self.get_report_task: self._wrap_method( + self.get_report_task, + default_timeout=None, + client_info=client_info, + ), + self.list_report_tasks: self._wrap_method( + self.list_report_tasks, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + +__all__ = ( + 'AlphaAnalyticsDataGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py new file mode 100644 index 000000000000..49078b1236d1 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py @@ -0,0 +1,1729 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.analytics.data_v1alpha.types import analytics_data_api +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseAlphaAnalyticsDataRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class AlphaAnalyticsDataRestInterceptor: + """Interceptor for AlphaAnalyticsData. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AlphaAnalyticsDataRestTransport. + + .. code-block:: python + class MyCustomAlphaAnalyticsDataInterceptor(AlphaAnalyticsDataRestInterceptor): + def pre_create_audience_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_audience_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_recurring_audience_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_recurring_audience_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_report_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_report_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_audience_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_audience_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_property_quotas_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_property_quotas_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_recurring_audience_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_recurring_audience_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_report_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_report_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_audience_lists(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_audience_lists(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_recurring_audience_lists(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_recurring_audience_lists(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_report_tasks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_report_tasks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_query_audience_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_query_audience_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_query_report_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_query_report_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_funnel_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_funnel_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_sheet_export_audience_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_sheet_export_audience_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AlphaAnalyticsDataRestTransport(interceptor=MyCustomAlphaAnalyticsDataInterceptor()) + client = AlphaAnalyticsDataClient(transport=transport) + + + """ + def pre_create_audience_list(self, request: analytics_data_api.CreateAudienceListRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.CreateAudienceListRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_audience_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_create_audience_list(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_audience_list + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_create_recurring_audience_list(self, request: analytics_data_api.CreateRecurringAudienceListRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.CreateRecurringAudienceListRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_recurring_audience_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_create_recurring_audience_list(self, response: analytics_data_api.RecurringAudienceList) -> analytics_data_api.RecurringAudienceList: + """Post-rpc interceptor for create_recurring_audience_list + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_create_report_task(self, request: analytics_data_api.CreateReportTaskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.CreateReportTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_report_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_create_report_task(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_report_task + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_get_audience_list(self, request: analytics_data_api.GetAudienceListRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.GetAudienceListRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_audience_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_get_audience_list(self, response: analytics_data_api.AudienceList) -> analytics_data_api.AudienceList: + """Post-rpc interceptor for get_audience_list + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_get_property_quotas_snapshot(self, request: analytics_data_api.GetPropertyQuotasSnapshotRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.GetPropertyQuotasSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_property_quotas_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_get_property_quotas_snapshot(self, response: analytics_data_api.PropertyQuotasSnapshot) -> analytics_data_api.PropertyQuotasSnapshot: + """Post-rpc interceptor for get_property_quotas_snapshot + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_get_recurring_audience_list(self, request: analytics_data_api.GetRecurringAudienceListRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.GetRecurringAudienceListRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_recurring_audience_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_get_recurring_audience_list(self, response: analytics_data_api.RecurringAudienceList) -> analytics_data_api.RecurringAudienceList: + """Post-rpc interceptor for get_recurring_audience_list + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_get_report_task(self, request: analytics_data_api.GetReportTaskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.GetReportTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_report_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_get_report_task(self, response: analytics_data_api.ReportTask) -> analytics_data_api.ReportTask: + """Post-rpc interceptor for get_report_task + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_list_audience_lists(self, request: analytics_data_api.ListAudienceListsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.ListAudienceListsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_audience_lists + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_list_audience_lists(self, response: analytics_data_api.ListAudienceListsResponse) -> analytics_data_api.ListAudienceListsResponse: + """Post-rpc interceptor for list_audience_lists + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_list_recurring_audience_lists(self, request: analytics_data_api.ListRecurringAudienceListsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.ListRecurringAudienceListsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_recurring_audience_lists + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_list_recurring_audience_lists(self, response: analytics_data_api.ListRecurringAudienceListsResponse) -> analytics_data_api.ListRecurringAudienceListsResponse: + """Post-rpc interceptor for list_recurring_audience_lists + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_list_report_tasks(self, request: analytics_data_api.ListReportTasksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.ListReportTasksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_report_tasks + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_list_report_tasks(self, response: analytics_data_api.ListReportTasksResponse) -> analytics_data_api.ListReportTasksResponse: + """Post-rpc interceptor for list_report_tasks + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_query_audience_list(self, request: analytics_data_api.QueryAudienceListRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.QueryAudienceListRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for query_audience_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_query_audience_list(self, response: analytics_data_api.QueryAudienceListResponse) -> analytics_data_api.QueryAudienceListResponse: + """Post-rpc interceptor for query_audience_list + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_query_report_task(self, request: analytics_data_api.QueryReportTaskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.QueryReportTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for query_report_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_query_report_task(self, response: analytics_data_api.QueryReportTaskResponse) -> analytics_data_api.QueryReportTaskResponse: + """Post-rpc interceptor for query_report_task + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_run_funnel_report(self, request: analytics_data_api.RunFunnelReportRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.RunFunnelReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_funnel_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_run_funnel_report(self, response: analytics_data_api.RunFunnelReportResponse) -> analytics_data_api.RunFunnelReportResponse: + """Post-rpc interceptor for run_funnel_report + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_sheet_export_audience_list(self, request: analytics_data_api.SheetExportAudienceListRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.SheetExportAudienceListRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for sheet_export_audience_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_sheet_export_audience_list(self, response: analytics_data_api.SheetExportAudienceListResponse) -> analytics_data_api.SheetExportAudienceListResponse: + """Post-rpc interceptor for sheet_export_audience_list + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AlphaAnalyticsDataRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AlphaAnalyticsDataRestInterceptor + + +class AlphaAnalyticsDataRestTransport(_BaseAlphaAnalyticsDataRestTransport): + """REST backend synchronous transport for AlphaAnalyticsData. + + Google Analytics reporting data service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'analyticsdata.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[AlphaAnalyticsDataRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'analyticsdata.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AlphaAnalyticsDataRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateAudienceList(_BaseAlphaAnalyticsDataRestTransport._BaseCreateAudienceList, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.CreateAudienceList") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.CreateAudienceListRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create audience list method over HTTP. + + Args: + request (~.analytics_data_api.CreateAudienceListRequest): + The request object. A request to create a new audience + list. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseCreateAudienceList._get_http_options() + request, metadata = self._interceptor.pre_create_audience_list(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseCreateAudienceList._get_transcoded_request(http_options, request) + + body = _BaseAlphaAnalyticsDataRestTransport._BaseCreateAudienceList._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseCreateAudienceList._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._CreateAudienceList._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_audience_list(resp) + return resp + + class _CreateRecurringAudienceList(_BaseAlphaAnalyticsDataRestTransport._BaseCreateRecurringAudienceList, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.CreateRecurringAudienceList") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.CreateRecurringAudienceListRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.RecurringAudienceList: + r"""Call the create recurring audience + list method over HTTP. + + Args: + request (~.analytics_data_api.CreateRecurringAudienceListRequest): + The request object. A request to create a new recurring + audience list. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.RecurringAudienceList: + A recurring audience list produces + new audience lists each day. Audience + lists are users in an audience at the + time of the list's creation. A recurring + audience list ensures that you have + audience list based on the most recent + data available for use each day. + + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseCreateRecurringAudienceList._get_http_options() + request, metadata = self._interceptor.pre_create_recurring_audience_list(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseCreateRecurringAudienceList._get_transcoded_request(http_options, request) + + body = _BaseAlphaAnalyticsDataRestTransport._BaseCreateRecurringAudienceList._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseCreateRecurringAudienceList._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._CreateRecurringAudienceList._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.RecurringAudienceList() + pb_resp = analytics_data_api.RecurringAudienceList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_recurring_audience_list(resp) + return resp + + class _CreateReportTask(_BaseAlphaAnalyticsDataRestTransport._BaseCreateReportTask, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.CreateReportTask") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.CreateReportTaskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create report task method over HTTP. + + Args: + request (~.analytics_data_api.CreateReportTaskRequest): + The request object. A request to create a report task. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseCreateReportTask._get_http_options() + request, metadata = self._interceptor.pre_create_report_task(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseCreateReportTask._get_transcoded_request(http_options, request) + + body = _BaseAlphaAnalyticsDataRestTransport._BaseCreateReportTask._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseCreateReportTask._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._CreateReportTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_report_task(resp) + return resp + + class _GetAudienceList(_BaseAlphaAnalyticsDataRestTransport._BaseGetAudienceList, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.GetAudienceList") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: analytics_data_api.GetAudienceListRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.AudienceList: + r"""Call the get audience list method over HTTP. + + Args: + request (~.analytics_data_api.GetAudienceListRequest): + The request object. A request to retrieve configuration + metadata about a specific audience list. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.AudienceList: + An audience list is a list of users + in an audience at the time of the list's + creation. One audience may have multiple + audience lists created for different + days. + + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseGetAudienceList._get_http_options() + request, metadata = self._interceptor.pre_get_audience_list(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseGetAudienceList._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseGetAudienceList._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._GetAudienceList._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.AudienceList() + pb_resp = analytics_data_api.AudienceList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_audience_list(resp) + return resp + + class _GetPropertyQuotasSnapshot(_BaseAlphaAnalyticsDataRestTransport._BaseGetPropertyQuotasSnapshot, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.GetPropertyQuotasSnapshot") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: analytics_data_api.GetPropertyQuotasSnapshotRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Call the get property quotas + snapshot method over HTTP. + + Args: + request (~.analytics_data_api.GetPropertyQuotasSnapshotRequest): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseGetPropertyQuotasSnapshot._get_http_options() + request, metadata = self._interceptor.pre_get_property_quotas_snapshot(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseGetPropertyQuotasSnapshot._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseGetPropertyQuotasSnapshot._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._GetPropertyQuotasSnapshot._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.PropertyQuotasSnapshot() + pb_resp = analytics_data_api.PropertyQuotasSnapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_property_quotas_snapshot(resp) + return resp + + class _GetRecurringAudienceList(_BaseAlphaAnalyticsDataRestTransport._BaseGetRecurringAudienceList, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.GetRecurringAudienceList") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: analytics_data_api.GetRecurringAudienceListRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.RecurringAudienceList: + r"""Call the get recurring audience + list method over HTTP. + + Args: + request (~.analytics_data_api.GetRecurringAudienceListRequest): + The request object. A request to retrieve configuration + metadata about a specific recurring + audience list. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.RecurringAudienceList: + A recurring audience list produces + new audience lists each day. Audience + lists are users in an audience at the + time of the list's creation. A recurring + audience list ensures that you have + audience list based on the most recent + data available for use each day. + + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseGetRecurringAudienceList._get_http_options() + request, metadata = self._interceptor.pre_get_recurring_audience_list(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseGetRecurringAudienceList._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseGetRecurringAudienceList._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._GetRecurringAudienceList._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.RecurringAudienceList() + pb_resp = analytics_data_api.RecurringAudienceList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_recurring_audience_list(resp) + return resp + + class _GetReportTask(_BaseAlphaAnalyticsDataRestTransport._BaseGetReportTask, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.GetReportTask") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: analytics_data_api.GetReportTaskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.ReportTask: + r"""Call the get report task method over HTTP. + + Args: + request (~.analytics_data_api.GetReportTaskRequest): + The request object. A request to retrieve configuration + metadata about a specific report task. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.ReportTask: + A specific report task configuration. + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseGetReportTask._get_http_options() + request, metadata = self._interceptor.pre_get_report_task(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseGetReportTask._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseGetReportTask._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._GetReportTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.ReportTask() + pb_resp = analytics_data_api.ReportTask.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_report_task(resp) + return resp + + class _ListAudienceLists(_BaseAlphaAnalyticsDataRestTransport._BaseListAudienceLists, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.ListAudienceLists") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: analytics_data_api.ListAudienceListsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.ListAudienceListsResponse: + r"""Call the list audience lists method over HTTP. + + Args: + request (~.analytics_data_api.ListAudienceListsRequest): + The request object. A request to list all audience lists + for a property. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.ListAudienceListsResponse: + A list of all audience lists for a + property. + + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseListAudienceLists._get_http_options() + request, metadata = self._interceptor.pre_list_audience_lists(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseListAudienceLists._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseListAudienceLists._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._ListAudienceLists._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.ListAudienceListsResponse() + pb_resp = analytics_data_api.ListAudienceListsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_audience_lists(resp) + return resp + + class _ListRecurringAudienceLists(_BaseAlphaAnalyticsDataRestTransport._BaseListRecurringAudienceLists, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.ListRecurringAudienceLists") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: analytics_data_api.ListRecurringAudienceListsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.ListRecurringAudienceListsResponse: + r"""Call the list recurring audience + lists method over HTTP. + + Args: + request (~.analytics_data_api.ListRecurringAudienceListsRequest): + The request object. A request to list all recurring + audience lists for a property. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.ListRecurringAudienceListsResponse: + A list of all recurring audience + lists for a property. + + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseListRecurringAudienceLists._get_http_options() + request, metadata = self._interceptor.pre_list_recurring_audience_lists(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseListRecurringAudienceLists._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseListRecurringAudienceLists._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._ListRecurringAudienceLists._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.ListRecurringAudienceListsResponse() + pb_resp = analytics_data_api.ListRecurringAudienceListsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_recurring_audience_lists(resp) + return resp + + class _ListReportTasks(_BaseAlphaAnalyticsDataRestTransport._BaseListReportTasks, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.ListReportTasks") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: analytics_data_api.ListReportTasksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.ListReportTasksResponse: + r"""Call the list report tasks method over HTTP. + + Args: + request (~.analytics_data_api.ListReportTasksRequest): + The request object. A request to list all report tasks + for a property. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.ListReportTasksResponse: + A list of all report tasks for a + property. + + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseListReportTasks._get_http_options() + request, metadata = self._interceptor.pre_list_report_tasks(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseListReportTasks._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseListReportTasks._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._ListReportTasks._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.ListReportTasksResponse() + pb_resp = analytics_data_api.ListReportTasksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_report_tasks(resp) + return resp + + class _QueryAudienceList(_BaseAlphaAnalyticsDataRestTransport._BaseQueryAudienceList, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.QueryAudienceList") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.QueryAudienceListRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.QueryAudienceListResponse: + r"""Call the query audience list method over HTTP. + + Args: + request (~.analytics_data_api.QueryAudienceListRequest): + The request object. A request to list users in an + audience list. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.QueryAudienceListResponse: + A list of users in an audience list. + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseQueryAudienceList._get_http_options() + request, metadata = self._interceptor.pre_query_audience_list(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseQueryAudienceList._get_transcoded_request(http_options, request) + + body = _BaseAlphaAnalyticsDataRestTransport._BaseQueryAudienceList._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseQueryAudienceList._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._QueryAudienceList._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.QueryAudienceListResponse() + pb_resp = analytics_data_api.QueryAudienceListResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_query_audience_list(resp) + return resp + + class _QueryReportTask(_BaseAlphaAnalyticsDataRestTransport._BaseQueryReportTask, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.QueryReportTask") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.QueryReportTaskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.QueryReportTaskResponse: + r"""Call the query report task method over HTTP. + + Args: + request (~.analytics_data_api.QueryReportTaskRequest): + The request object. A request to fetch the report content + for a report task. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.QueryReportTaskResponse: + The report content corresponding to a + report task. + + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseQueryReportTask._get_http_options() + request, metadata = self._interceptor.pre_query_report_task(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseQueryReportTask._get_transcoded_request(http_options, request) + + body = _BaseAlphaAnalyticsDataRestTransport._BaseQueryReportTask._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseQueryReportTask._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._QueryReportTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.QueryReportTaskResponse() + pb_resp = analytics_data_api.QueryReportTaskResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_query_report_task(resp) + return resp + + class _RunFunnelReport(_BaseAlphaAnalyticsDataRestTransport._BaseRunFunnelReport, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.RunFunnelReport") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.RunFunnelReportRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.RunFunnelReportResponse: + r"""Call the run funnel report method over HTTP. + + Args: + request (~.analytics_data_api.RunFunnelReportRequest): + The request object. The request for a funnel report. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.RunFunnelReportResponse: + The funnel report response contains + two sub reports. The two sub reports are + different combinations of dimensions and + metrics. + + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseRunFunnelReport._get_http_options() + request, metadata = self._interceptor.pre_run_funnel_report(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseRunFunnelReport._get_transcoded_request(http_options, request) + + body = _BaseAlphaAnalyticsDataRestTransport._BaseRunFunnelReport._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseRunFunnelReport._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._RunFunnelReport._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.RunFunnelReportResponse() + pb_resp = analytics_data_api.RunFunnelReportResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_funnel_report(resp) + return resp + + class _SheetExportAudienceList(_BaseAlphaAnalyticsDataRestTransport._BaseSheetExportAudienceList, AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.SheetExportAudienceList") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.SheetExportAudienceListRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.SheetExportAudienceListResponse: + r"""Call the sheet export audience + list method over HTTP. + + Args: + request (~.analytics_data_api.SheetExportAudienceListRequest): + The request object. A request to export users in an + audience list to a Google Sheet. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.SheetExportAudienceListResponse: + The created Google Sheet with the + list of users in an audience list. + + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseSheetExportAudienceList._get_http_options() + request, metadata = self._interceptor.pre_sheet_export_audience_list(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseSheetExportAudienceList._get_transcoded_request(http_options, request) + + body = _BaseAlphaAnalyticsDataRestTransport._BaseSheetExportAudienceList._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseSheetExportAudienceList._get_query_params_json(transcoded_request) + + # Send the request + response = AlphaAnalyticsDataRestTransport._SheetExportAudienceList._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.SheetExportAudienceListResponse() + pb_resp = analytics_data_api.SheetExportAudienceListResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_sheet_export_audience_list(resp) + return resp + + @property + def create_audience_list(self) -> Callable[ + [analytics_data_api.CreateAudienceListRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAudienceList(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_recurring_audience_list(self) -> Callable[ + [analytics_data_api.CreateRecurringAudienceListRequest], + analytics_data_api.RecurringAudienceList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateRecurringAudienceList(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_report_task(self) -> Callable[ + [analytics_data_api.CreateReportTaskRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateReportTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_audience_list(self) -> Callable[ + [analytics_data_api.GetAudienceListRequest], + analytics_data_api.AudienceList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAudienceList(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_property_quotas_snapshot(self) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + analytics_data_api.PropertyQuotasSnapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPropertyQuotasSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_recurring_audience_list(self) -> Callable[ + [analytics_data_api.GetRecurringAudienceListRequest], + analytics_data_api.RecurringAudienceList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRecurringAudienceList(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_report_task(self) -> Callable[ + [analytics_data_api.GetReportTaskRequest], + analytics_data_api.ReportTask]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetReportTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_audience_lists(self) -> Callable[ + [analytics_data_api.ListAudienceListsRequest], + analytics_data_api.ListAudienceListsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAudienceLists(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_recurring_audience_lists(self) -> Callable[ + [analytics_data_api.ListRecurringAudienceListsRequest], + analytics_data_api.ListRecurringAudienceListsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRecurringAudienceLists(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_report_tasks(self) -> Callable[ + [analytics_data_api.ListReportTasksRequest], + analytics_data_api.ListReportTasksResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListReportTasks(self._session, self._host, self._interceptor) # type: ignore + + @property + def query_audience_list(self) -> Callable[ + [analytics_data_api.QueryAudienceListRequest], + analytics_data_api.QueryAudienceListResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._QueryAudienceList(self._session, self._host, self._interceptor) # type: ignore + + @property + def query_report_task(self) -> Callable[ + [analytics_data_api.QueryReportTaskRequest], + analytics_data_api.QueryReportTaskResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._QueryReportTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_funnel_report(self) -> Callable[ + [analytics_data_api.RunFunnelReportRequest], + analytics_data_api.RunFunnelReportResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunFunnelReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def sheet_export_audience_list(self) -> Callable[ + [analytics_data_api.SheetExportAudienceListRequest], + analytics_data_api.SheetExportAudienceListResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SheetExportAudienceList(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'AlphaAnalyticsDataRestTransport', +) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest_base.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest_base.py new file mode 100644 index 000000000000..e82e34d0d46d --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest_base.py @@ -0,0 +1,672 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import AlphaAnalyticsDataTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.analytics.data_v1alpha.types import analytics_data_api +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseAlphaAnalyticsDataRestTransport(AlphaAnalyticsDataTransport): + """Base REST backend transport for AlphaAnalyticsData. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'analyticsdata.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'analyticsdata.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateAudienceList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1alpha/{parent=properties/*}/audienceLists', + 'body': 'audience_list', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.CreateAudienceListRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseAlphaAnalyticsDataRestTransport._BaseCreateAudienceList._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateRecurringAudienceList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1alpha/{parent=properties/*}/recurringAudienceLists', + 'body': 'recurring_audience_list', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.CreateRecurringAudienceListRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseAlphaAnalyticsDataRestTransport._BaseCreateRecurringAudienceList._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateReportTask: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1alpha/{parent=properties/*}/reportTasks', + 'body': 'report_task', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.CreateReportTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseAlphaAnalyticsDataRestTransport._BaseCreateReportTask._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetAudienceList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1alpha/{name=properties/*/audienceLists/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.GetAudienceListRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseAlphaAnalyticsDataRestTransport._BaseGetAudienceList._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetPropertyQuotasSnapshot: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1alpha/{name=properties/*/propertyQuotasSnapshot}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.GetPropertyQuotasSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseAlphaAnalyticsDataRestTransport._BaseGetPropertyQuotasSnapshot._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetRecurringAudienceList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1alpha/{name=properties/*/recurringAudienceLists/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.GetRecurringAudienceListRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseAlphaAnalyticsDataRestTransport._BaseGetRecurringAudienceList._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetReportTask: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1alpha/{name=properties/*/reportTasks/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.GetReportTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseAlphaAnalyticsDataRestTransport._BaseGetReportTask._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListAudienceLists: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1alpha/{parent=properties/*}/audienceLists', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.ListAudienceListsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseAlphaAnalyticsDataRestTransport._BaseListAudienceLists._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListRecurringAudienceLists: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1alpha/{parent=properties/*}/recurringAudienceLists', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.ListRecurringAudienceListsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseAlphaAnalyticsDataRestTransport._BaseListRecurringAudienceLists._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListReportTasks: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1alpha/{parent=properties/*}/reportTasks', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.ListReportTasksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseAlphaAnalyticsDataRestTransport._BaseListReportTasks._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseQueryAudienceList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1alpha/{name=properties/*/audienceLists/*}:query', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.QueryAudienceListRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseAlphaAnalyticsDataRestTransport._BaseQueryAudienceList._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseQueryReportTask: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1alpha/{name=properties/*/reportTasks/*}:query', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.QueryReportTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseAlphaAnalyticsDataRestTransport._BaseQueryReportTask._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRunFunnelReport: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1alpha/{property=properties/*}:runFunnelReport', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.RunFunnelReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseSheetExportAudienceList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1alpha/{name=properties/*/audienceLists/*}:exportSheet', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.SheetExportAudienceListRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseAlphaAnalyticsDataRestTransport._BaseSheetExportAudienceList._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + +__all__=( + '_BaseAlphaAnalyticsDataRestTransport', +) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/types/__init__.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/types/__init__.py new file mode 100644 index 000000000000..2a8a510e2fe3 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/types/__init__.py @@ -0,0 +1,232 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .analytics_data_api import ( + AudienceDimension, + AudienceDimensionValue, + AudienceList, + AudienceListMetadata, + AudienceRow, + CreateAudienceListRequest, + CreateRecurringAudienceListRequest, + CreateReportTaskRequest, + GetAudienceListRequest, + GetPropertyQuotasSnapshotRequest, + GetRecurringAudienceListRequest, + GetReportTaskRequest, + ListAudienceListsRequest, + ListAudienceListsResponse, + ListRecurringAudienceListsRequest, + ListRecurringAudienceListsResponse, + ListReportTasksRequest, + ListReportTasksResponse, + PropertyQuotasSnapshot, + QueryAudienceListRequest, + QueryAudienceListResponse, + QueryReportTaskRequest, + QueryReportTaskResponse, + RecurringAudienceList, + ReportTask, + ReportTaskMetadata, + RunFunnelReportRequest, + RunFunnelReportResponse, + SheetExportAudienceListRequest, + SheetExportAudienceListResponse, + WebhookNotification, +) +from .data import ( + BetweenFilter, + Cohort, + CohortReportSettings, + CohortSpec, + CohortsRange, + DateRange, + Dimension, + DimensionExpression, + DimensionHeader, + DimensionValue, + EmptyFilter, + EventSegment, + EventSegmentConditionGroup, + EventSegmentCriteria, + EventSegmentExclusion, + Filter, + FilterExpression, + FilterExpressionList, + Funnel, + FunnelBreakdown, + FunnelEventFilter, + FunnelFieldFilter, + FunnelFilterExpression, + FunnelFilterExpressionList, + FunnelNextAction, + FunnelParameterFilter, + FunnelParameterFilterExpression, + FunnelParameterFilterExpressionList, + FunnelResponseMetadata, + FunnelStep, + FunnelSubReport, + InListFilter, + Metric, + MetricHeader, + MetricValue, + NumericFilter, + NumericValue, + OrderBy, + PropertyQuota, + QuotaStatus, + ResponseMetaData, + Row, + SamplingMetadata, + Segment, + SegmentEventFilter, + SegmentFilter, + SegmentFilterExpression, + SegmentFilterExpressionList, + SegmentFilterScoping, + SegmentParameterFilter, + SegmentParameterFilterExpression, + SegmentParameterFilterExpressionList, + SegmentParameterFilterScoping, + SessionSegment, + SessionSegmentConditionGroup, + SessionSegmentCriteria, + SessionSegmentExclusion, + StringFilter, + UserSegment, + UserSegmentConditionGroup, + UserSegmentCriteria, + UserSegmentExclusion, + UserSegmentSequenceGroup, + UserSequenceStep, + EventCriteriaScoping, + EventExclusionDuration, + MetricAggregation, + MetricType, + RestrictedMetricType, + SamplingLevel, + SessionCriteriaScoping, + SessionExclusionDuration, + UserCriteriaScoping, + UserExclusionDuration, +) + +__all__ = ( + 'AudienceDimension', + 'AudienceDimensionValue', + 'AudienceList', + 'AudienceListMetadata', + 'AudienceRow', + 'CreateAudienceListRequest', + 'CreateRecurringAudienceListRequest', + 'CreateReportTaskRequest', + 'GetAudienceListRequest', + 'GetPropertyQuotasSnapshotRequest', + 'GetRecurringAudienceListRequest', + 'GetReportTaskRequest', + 'ListAudienceListsRequest', + 'ListAudienceListsResponse', + 'ListRecurringAudienceListsRequest', + 'ListRecurringAudienceListsResponse', + 'ListReportTasksRequest', + 'ListReportTasksResponse', + 'PropertyQuotasSnapshot', + 'QueryAudienceListRequest', + 'QueryAudienceListResponse', + 'QueryReportTaskRequest', + 'QueryReportTaskResponse', + 'RecurringAudienceList', + 'ReportTask', + 'ReportTaskMetadata', + 'RunFunnelReportRequest', + 'RunFunnelReportResponse', + 'SheetExportAudienceListRequest', + 'SheetExportAudienceListResponse', + 'WebhookNotification', + 'BetweenFilter', + 'Cohort', + 'CohortReportSettings', + 'CohortSpec', + 'CohortsRange', + 'DateRange', + 'Dimension', + 'DimensionExpression', + 'DimensionHeader', + 'DimensionValue', + 'EmptyFilter', + 'EventSegment', + 'EventSegmentConditionGroup', + 'EventSegmentCriteria', + 'EventSegmentExclusion', + 'Filter', + 'FilterExpression', + 'FilterExpressionList', + 'Funnel', + 'FunnelBreakdown', + 'FunnelEventFilter', + 'FunnelFieldFilter', + 'FunnelFilterExpression', + 'FunnelFilterExpressionList', + 'FunnelNextAction', + 'FunnelParameterFilter', + 'FunnelParameterFilterExpression', + 'FunnelParameterFilterExpressionList', + 'FunnelResponseMetadata', + 'FunnelStep', + 'FunnelSubReport', + 'InListFilter', + 'Metric', + 'MetricHeader', + 'MetricValue', + 'NumericFilter', + 'NumericValue', + 'OrderBy', + 'PropertyQuota', + 'QuotaStatus', + 'ResponseMetaData', + 'Row', + 'SamplingMetadata', + 'Segment', + 'SegmentEventFilter', + 'SegmentFilter', + 'SegmentFilterExpression', + 'SegmentFilterExpressionList', + 'SegmentFilterScoping', + 'SegmentParameterFilter', + 'SegmentParameterFilterExpression', + 'SegmentParameterFilterExpressionList', + 'SegmentParameterFilterScoping', + 'SessionSegment', + 'SessionSegmentConditionGroup', + 'SessionSegmentCriteria', + 'SessionSegmentExclusion', + 'StringFilter', + 'UserSegment', + 'UserSegmentConditionGroup', + 'UserSegmentCriteria', + 'UserSegmentExclusion', + 'UserSegmentSequenceGroup', + 'UserSequenceStep', + 'EventCriteriaScoping', + 'EventExclusionDuration', + 'MetricAggregation', + 'MetricType', + 'RestrictedMetricType', + 'SamplingLevel', + 'SessionCriteriaScoping', + 'SessionExclusionDuration', + 'UserCriteriaScoping', + 'UserExclusionDuration', +) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/types/analytics_data_api.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/types/analytics_data_api.py new file mode 100644 index 000000000000..c7861bfa2a81 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/types/analytics_data_api.py @@ -0,0 +1,1725 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.analytics.data_v1alpha.types import data +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.analytics.data.v1alpha', + manifest={ + 'CreateRecurringAudienceListRequest', + 'RecurringAudienceList', + 'WebhookNotification', + 'GetRecurringAudienceListRequest', + 'ListRecurringAudienceListsRequest', + 'ListRecurringAudienceListsResponse', + 'GetPropertyQuotasSnapshotRequest', + 'PropertyQuotasSnapshot', + 'GetAudienceListRequest', + 'ListAudienceListsRequest', + 'ListAudienceListsResponse', + 'CreateAudienceListRequest', + 'AudienceList', + 'AudienceListMetadata', + 'QueryAudienceListRequest', + 'QueryAudienceListResponse', + 'SheetExportAudienceListRequest', + 'SheetExportAudienceListResponse', + 'AudienceRow', + 'AudienceDimension', + 'AudienceDimensionValue', + 'RunFunnelReportRequest', + 'RunFunnelReportResponse', + 'ReportTask', + 'CreateReportTaskRequest', + 'ReportTaskMetadata', + 'QueryReportTaskRequest', + 'QueryReportTaskResponse', + 'GetReportTaskRequest', + 'ListReportTasksRequest', + 'ListReportTasksResponse', + }, +) + + +class CreateRecurringAudienceListRequest(proto.Message): + r"""A request to create a new recurring audience list. + + Attributes: + parent (str): + Required. The parent resource where this recurring audience + list will be created. Format: ``properties/{property}`` + recurring_audience_list (google.analytics.data_v1alpha.types.RecurringAudienceList): + Required. The recurring audience list to + create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + recurring_audience_list: 'RecurringAudienceList' = proto.Field( + proto.MESSAGE, + number=2, + message='RecurringAudienceList', + ) + + +class RecurringAudienceList(proto.Message): + r"""A recurring audience list produces new audience lists each + day. Audience lists are users in an audience at the time of the + list's creation. A recurring audience list ensures that you have + audience list based on the most recent data available for use + each day. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. The recurring audience list + resource name assigned during creation. This resource name + identifies this ``RecurringAudienceList``. + + Format: + ``properties/{property}/recurringAudienceLists/{recurring_audience_list}`` + audience (str): + Required. The audience resource name. This resource name + identifies the audience being listed and is shared between + the Analytics Data & Admin APIs. + + Format: ``properties/{property}/audiences/{audience}`` + audience_display_name (str): + Output only. The descriptive display name for + this audience. For example, "Purchasers". + dimensions (MutableSequence[google.analytics.data_v1alpha.types.AudienceDimension]): + Required. The dimensions requested and + displayed in the audience list response. + active_days_remaining (int): + Optional. The number of remaining days that a + recurring audience export will produce an + audience list instance. This counter decreases + by one each day, and when it reaches zero, no + new audience lists will be created. + + Recurring audience list request for Analytics + 360 properties default to 180 days and have a + maximum of 365 days. Requests for standard + Analytics properties default to 14 days and have + a maximum of 30 days. + + The minimum value allowed during creation is 1. + Requests above their respective maximum will be + coerced to their maximum. + + This field is a member of `oneof`_ ``_active_days_remaining``. + audience_lists (MutableSequence[str]): + Output only. Audience list resource names for + audience list instances created for this + recurring audience list. One audience list is + created for each day, and the audience list will + be listed here. + + This list is ordered with the most recently + created audience list first. + webhook_notification (google.analytics.data_v1alpha.types.WebhookNotification): + Optional. Configures webhook notifications to + be sent from the Google Analytics Data API to + your webhook server. Use of webhooks is + optional. If unused, you'll need to poll this + API to determine when a recurring audience list + creates new audience lists. Webhooks allow a + notification to be sent to your servers & avoid + the need for polling. + + Two POST requests will be sent each time a + recurring audience list creates an audience + list. This happens once per day until a + recurring audience list reaches 0 active days + remaining. The first request will be sent + showing a newly created audience list in its + CREATING state. The second request will be sent + after the audience list completes creation + (either the ACTIVE or FAILED state). + + This field is a member of `oneof`_ ``_webhook_notification``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + audience: str = proto.Field( + proto.STRING, + number=2, + ) + audience_display_name: str = proto.Field( + proto.STRING, + number=3, + ) + dimensions: MutableSequence['AudienceDimension'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AudienceDimension', + ) + active_days_remaining: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + audience_lists: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + webhook_notification: 'WebhookNotification' = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message='WebhookNotification', + ) + + +class WebhookNotification(proto.Message): + r"""Configures a long-running operation resource to send a + webhook notification from the Google Analytics Data API to your + webhook server when the resource updates. + + Notification configurations contain private values & are only + visible to your GCP project. Different GCP projects may attach + different webhook notifications to the same long-running + operation resource. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + Optional. The web address that will receive the webhook + notification. This address will receive POST requests as the + state of the long running operation resource changes. The + POST request will contain both a JSON version of the long + running operation resource in the body and a + ``sentTimestamp`` field. The sent timestamp will specify the + unix microseconds since the epoch that the request was sent; + this lets you identify replayed notifications. + + An example URI is + ``https://us-central1-example-project-id.cloudfunctions.net/example-function-1``. + + The URI must use HTTPS and point to a site with a valid SSL + certificate on the web server. The URI must have a maximum + string length of 128 characters & use only the allowlisted + characters from `RFC + 1738 `__. + + When your webhook server receives a notification, it is + expected to reply with an HTTP response status code of 200 + within 5 seconds. + + A URI is required to use webhook notifications. + + Requests to this webhook server will contain an ID token + authenticating the service account + ``google-analytics-audience-export@system.gserviceaccount.com``. + To learn more about ID tokens, see + https://cloud.google.com/docs/authentication/token-types#id. + For Google Cloud Functions, this lets you configure your + function to require authentication. In Cloud IAM, you will + need to grant the service account permissions to the Cloud + Run Invoker (``roles/run.invoker``) & Cloud Functions + Invoker (``roles/cloudfunctions.invoker``) roles for the + webhook post request to pass Google Cloud Functions + authentication. This API can send webhook notifications to + arbitrary URIs; for webhook servers other than Google Cloud + Functions, this ID token in the authorization bearer header + should be ignored if it is not needed. + + This field is a member of `oneof`_ ``_uri``. + channel_token (str): + Optional. The channel token is an arbitrary string value and + must have a maximum string length of 64 characters. Channel + tokens allow you to verify the source of a webhook + notification. This guards against the message being spoofed. + The channel token will be specified in the + ``X-Goog-Channel-Token`` HTTP header of the webhook POST + request. + + A channel token is not required to use webhook + notifications. + + This field is a member of `oneof`_ ``_channel_token``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + channel_token: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class GetRecurringAudienceListRequest(proto.Message): + r"""A request to retrieve configuration metadata about a specific + recurring audience list. + + Attributes: + name (str): + Required. The recurring audience list resource name. Format: + ``properties/{property}/recurringAudienceLists/{recurring_audience_list}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListRecurringAudienceListsRequest(proto.Message): + r"""A request to list all recurring audience lists for a + property. + + Attributes: + parent (str): + Required. All recurring audience lists for this property + will be listed in the response. Format: + ``properties/{property}`` + page_size (int): + Optional. The maximum number of recurring + audience lists to return. The service may return + fewer than this value. If unspecified, at most + 200 recurring audience lists will be returned. + The maximum value is 1000 (higher values will be + coerced to the maximum). + page_token (str): + Optional. A page token, received from a previous + ``ListRecurringAudienceLists`` call. Provide this to + retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListRecurringAudienceLists`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListRecurringAudienceListsResponse(proto.Message): + r"""A list of all recurring audience lists for a property. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + recurring_audience_lists (MutableSequence[google.analytics.data_v1alpha.types.RecurringAudienceList]): + Each recurring audience list for a property. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + + This field is a member of `oneof`_ ``_next_page_token``. + """ + + @property + def raw_page(self): + return self + + recurring_audience_lists: MutableSequence['RecurringAudienceList'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='RecurringAudienceList', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class GetPropertyQuotasSnapshotRequest(proto.Message): + r"""A request to return the PropertyQuotasSnapshot for a given + category. + + Attributes: + name (str): + Required. Quotas from this property will be listed in the + response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PropertyQuotasSnapshot(proto.Message): + r"""Current state of all Property Quotas organized by quota + category. + + Attributes: + name (str): + Identifier. The property quota snapshot + resource name. + core_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for core property tokens + realtime_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for realtime property tokens + funnel_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for funnel property tokens + """ + + name: str = proto.Field( + proto.STRING, + number=4, + ) + core_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=1, + message=data.PropertyQuota, + ) + realtime_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=2, + message=data.PropertyQuota, + ) + funnel_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=3, + message=data.PropertyQuota, + ) + + +class GetAudienceListRequest(proto.Message): + r"""A request to retrieve configuration metadata about a specific + audience list. + + Attributes: + name (str): + Required. The audience list resource name. Format: + ``properties/{property}/audienceLists/{audience_list}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListAudienceListsRequest(proto.Message): + r"""A request to list all audience lists for a property. + + Attributes: + parent (str): + Required. All audience lists for this property will be + listed in the response. Format: ``properties/{property}`` + page_size (int): + Optional. The maximum number of audience + lists to return. The service may return fewer + than this value. If unspecified, at most 200 + audience lists will be returned. The maximum + value is 1000 (higher values will be coerced to + the maximum). + page_token (str): + Optional. A page token, received from a previous + ``ListAudienceLists`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAudienceLists`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAudienceListsResponse(proto.Message): + r"""A list of all audience lists for a property. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + audience_lists (MutableSequence[google.analytics.data_v1alpha.types.AudienceList]): + Each audience list for a property. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + + This field is a member of `oneof`_ ``_next_page_token``. + """ + + @property + def raw_page(self): + return self + + audience_lists: MutableSequence['AudienceList'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AudienceList', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class CreateAudienceListRequest(proto.Message): + r"""A request to create a new audience list. + + Attributes: + parent (str): + Required. The parent resource where this audience list will + be created. Format: ``properties/{property}`` + audience_list (google.analytics.data_v1alpha.types.AudienceList): + Required. The audience list to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + audience_list: 'AudienceList' = proto.Field( + proto.MESSAGE, + number=2, + message='AudienceList', + ) + + +class AudienceList(proto.Message): + r"""An audience list is a list of users in an audience at the + time of the list's creation. One audience may have multiple + audience lists created for different days. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. The audience list resource name + assigned during creation. This resource name identifies this + ``AudienceList``. + + Format: + ``properties/{property}/audienceLists/{audience_list}`` + audience (str): + Required. The audience resource name. This resource name + identifies the audience being listed and is shared between + the Analytics Data & Admin APIs. + + Format: ``properties/{property}/audiences/{audience}`` + audience_display_name (str): + Output only. The descriptive display name for + this audience. For example, "Purchasers". + dimensions (MutableSequence[google.analytics.data_v1alpha.types.AudienceDimension]): + Required. The dimensions requested and + displayed in the query response. + state (google.analytics.data_v1alpha.types.AudienceList.State): + Output only. The current state for this + AudienceList. + + This field is a member of `oneof`_ ``_state``. + begin_creating_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when CreateAudienceList was called and + the AudienceList began the ``CREATING`` state. + + This field is a member of `oneof`_ ``_begin_creating_time``. + creation_quota_tokens_charged (int): + Output only. The total quota tokens charged during creation + of the AudienceList. Because this token count is based on + activity from the ``CREATING`` state, this tokens charged + will be fixed once an AudienceList enters the ``ACTIVE`` or + ``FAILED`` states. + row_count (int): + Output only. The total number of rows in the + AudienceList result. + + This field is a member of `oneof`_ ``_row_count``. + error_message (str): + Output only. Error message is populated when + an audience list fails during creation. A common + reason for such a failure is quota exhaustion. + + This field is a member of `oneof`_ ``_error_message``. + percentage_completed (float): + Output only. The percentage completed for + this audience export ranging between 0 to 100. + + This field is a member of `oneof`_ ``_percentage_completed``. + recurring_audience_list (str): + Output only. The recurring audience list that + created this audience list. Recurring audience + lists create audience lists daily. + + If audience lists are created directly, they + will have no associated recurring audience list, + and this field will be blank. + + This field is a member of `oneof`_ ``_recurring_audience_list``. + webhook_notification (google.analytics.data_v1alpha.types.WebhookNotification): + Optional. Configures webhook notifications to + be sent from the Google Analytics Data API to + your webhook server. Use of webhooks is + optional. If unused, you'll need to poll this + API to determine when an audience list is ready + to be used. Webhooks allow a notification to be + sent to your servers & avoid the need for + polling. + + Either one or two POST requests will be sent to + the webhook. The first POST request will be sent + immediately showing the newly created audience + list in its CREATING state. The second POST + request will be sent after the audience list + completes creation (either the ACTIVE or FAILED + state). + + If identical audience lists are requested in + quick succession, the second & subsequent + audience lists can be served from cache. In that + case, the audience list create method can return + an audience list is already ACTIVE. In this + scenario, only one POST request will be sent to + the webhook. + + This field is a member of `oneof`_ ``_webhook_notification``. + """ + class State(proto.Enum): + r"""The AudienceList currently exists in this state. + + Values: + STATE_UNSPECIFIED (0): + Unspecified state will never be used. + CREATING (1): + The AudienceList is currently creating and + will be available in the future. Creating occurs + immediately after the CreateAudienceList call. + ACTIVE (2): + The AudienceList is fully created and ready + for querying. An AudienceList is updated to + active asynchronously from a request; this + occurs some time (for example 15 minutes) after + the initial create call. + FAILED (3): + The AudienceList failed to be created. It is + possible that re-requesting this audience list + will succeed. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + FAILED = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + audience: str = proto.Field( + proto.STRING, + number=2, + ) + audience_display_name: str = proto.Field( + proto.STRING, + number=3, + ) + dimensions: MutableSequence['AudienceDimension'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AudienceDimension', + ) + state: State = proto.Field( + proto.ENUM, + number=5, + optional=True, + enum=State, + ) + begin_creating_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=timestamp_pb2.Timestamp, + ) + creation_quota_tokens_charged: int = proto.Field( + proto.INT32, + number=7, + ) + row_count: int = proto.Field( + proto.INT32, + number=8, + optional=True, + ) + error_message: str = proto.Field( + proto.STRING, + number=9, + optional=True, + ) + percentage_completed: float = proto.Field( + proto.DOUBLE, + number=11, + optional=True, + ) + recurring_audience_list: str = proto.Field( + proto.STRING, + number=12, + optional=True, + ) + webhook_notification: 'WebhookNotification' = proto.Field( + proto.MESSAGE, + number=13, + optional=True, + message='WebhookNotification', + ) + + +class AudienceListMetadata(proto.Message): + r"""This metadata is currently blank. + """ + + +class QueryAudienceListRequest(proto.Message): + r"""A request to list users in an audience list. + + Attributes: + name (str): + Required. The name of the audience list to retrieve users + from. Format: + ``properties/{property}/audienceLists/{audience_list}`` + offset (int): + Optional. The row count of the start row. The first row is + counted as row 0. + + When paging, the first request does not specify offset; or + equivalently, sets offset to 0; the first request returns + the first ``limit`` of rows. The second request sets offset + to the ``limit`` of the first request; the second request + returns the second ``limit`` of rows. + + To learn more about this pagination parameter, see + `Pagination `__. + limit (int): + Optional. The number of rows to return. If unspecified, + 10,000 rows are returned. The API returns a maximum of + 250,000 rows per request, no matter how many you ask for. + ``limit`` must be positive. + + The API can also return fewer rows than the requested + ``limit``, if there aren't as many dimension values as the + ``limit``. + + To learn more about this pagination parameter, see + `Pagination `__. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + offset: int = proto.Field( + proto.INT64, + number=2, + ) + limit: int = proto.Field( + proto.INT64, + number=3, + ) + + +class QueryAudienceListResponse(proto.Message): + r"""A list of users in an audience list. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + audience_list (google.analytics.data_v1alpha.types.AudienceList): + Configuration data about AudienceList being + queried. Returned to help interpret the audience + rows in this response. For example, the + dimensions in this AudienceList correspond to + the columns in the AudienceRows. + + This field is a member of `oneof`_ ``_audience_list``. + audience_rows (MutableSequence[google.analytics.data_v1alpha.types.AudienceRow]): + Rows for each user in an audience list. The + number of rows in this response will be less + than or equal to request's page size. + row_count (int): + The total number of rows in the AudienceList result. + ``rowCount`` is independent of the number of rows returned + in the response, the ``limit`` request parameter, and the + ``offset`` request parameter. For example if a query returns + 175 rows and includes ``limit`` of 50 in the API request, + the response will contain ``rowCount`` of 175 but only 50 + rows. + + To learn more about this pagination parameter, see + `Pagination `__. + + This field is a member of `oneof`_ ``_row_count``. + """ + + audience_list: 'AudienceList' = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message='AudienceList', + ) + audience_rows: MutableSequence['AudienceRow'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='AudienceRow', + ) + row_count: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + + +class SheetExportAudienceListRequest(proto.Message): + r"""A request to export users in an audience list to a Google + Sheet. + + Attributes: + name (str): + Required. The name of the audience list to retrieve users + from. Format: + ``properties/{property}/audienceLists/{audience_list}`` + offset (int): + Optional. The row count of the start row. The first row is + counted as row 0. + + When paging, the first request does not specify offset; or + equivalently, sets offset to 0; the first request returns + the first ``limit`` of rows. The second request sets offset + to the ``limit`` of the first request; the second request + returns the second ``limit`` of rows. + + To learn more about this pagination parameter, see + `Pagination `__. + limit (int): + Optional. The number of rows to return. If unspecified, + 10,000 rows are returned. The API returns a maximum of + 250,000 rows per request, no matter how many you ask for. + ``limit`` must be positive. + + The API can also return fewer rows than the requested + ``limit``, if there aren't as many dimension values as the + ``limit``. + + To learn more about this pagination parameter, see + `Pagination `__. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + offset: int = proto.Field( + proto.INT64, + number=2, + ) + limit: int = proto.Field( + proto.INT64, + number=3, + ) + + +class SheetExportAudienceListResponse(proto.Message): + r"""The created Google Sheet with the list of users in an + audience list. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + spreadsheet_uri (str): + A uri for you to visit in your browser to + view the Google Sheet. + + This field is a member of `oneof`_ ``_spreadsheet_uri``. + spreadsheet_id (str): + An ID that identifies the created Google + Sheet resource. + + This field is a member of `oneof`_ ``_spreadsheet_id``. + row_count (int): + The total number of rows in the AudienceList result. + ``rowCount`` is independent of the number of rows returned + in the response, the ``limit`` request parameter, and the + ``offset`` request parameter. For example if a query returns + 175 rows and includes ``limit`` of 50 in the API request, + the response will contain ``rowCount`` of 175 but only 50 + rows. + + To learn more about this pagination parameter, see + `Pagination `__. + + This field is a member of `oneof`_ ``_row_count``. + audience_list (google.analytics.data_v1alpha.types.AudienceList): + Configuration data about AudienceList being exported. + Returned to help interpret the AudienceList in the Google + Sheet of this response. + + For example, the AudienceList may have more rows than are + present in the Google Sheet, and in that case, you may want + to send an additional sheet export request with a different + ``offset`` value to retrieve the next page of rows in an + additional Google Sheet. + + This field is a member of `oneof`_ ``_audience_list``. + """ + + spreadsheet_uri: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + spreadsheet_id: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + row_count: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + audience_list: 'AudienceList' = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message='AudienceList', + ) + + +class AudienceRow(proto.Message): + r"""Dimension value attributes for the audience user row. + + Attributes: + dimension_values (MutableSequence[google.analytics.data_v1alpha.types.AudienceDimensionValue]): + Each dimension value attribute for an + audience user. One dimension value will be added + for each dimension column requested. + """ + + dimension_values: MutableSequence['AudienceDimensionValue'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AudienceDimensionValue', + ) + + +class AudienceDimension(proto.Message): + r"""An audience dimension is a user attribute. Specific user attributed + are requested and then later returned in the + ``QueryAudienceListResponse``. + + Attributes: + dimension_name (str): + Optional. The API name of the dimension. See the `API + Dimensions `__ + for the list of dimension names. + """ + + dimension_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AudienceDimensionValue(proto.Message): + r"""The value of a dimension. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + Value as a string if the dimension type is a + string. + + This field is a member of `oneof`_ ``one_value``. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + oneof='one_value', + ) + + +class RunFunnelReportRequest(proto.Message): + r"""The request for a funnel report. + + Attributes: + property (str): + Optional. A Google Analytics property identifier whose + events are tracked. Specified in the URL path and not the + body. To learn more, see `where to find your Property + ID `__. + Within a batch request, this property should either be + unspecified or consistent with the batch-level property. + + Example: properties/1234 + date_ranges (MutableSequence[google.analytics.data_v1alpha.types.DateRange]): + Optional. Date ranges of data to read. If + multiple date ranges are requested, each + response row will contain a zero based date + range index. If two date ranges overlap, the + event data for the overlapping days is included + in the response rows for both date ranges. + funnel (google.analytics.data_v1alpha.types.Funnel): + Optional. The configuration of this request's + funnel. This funnel configuration is required. + funnel_breakdown (google.analytics.data_v1alpha.types.FunnelBreakdown): + Optional. If specified, this breakdown adds a dimension to + the funnel table sub report response. This breakdown + dimension expands each funnel step to the unique values of + the breakdown dimension. For example, a breakdown by the + ``deviceCategory`` dimension will create rows for + ``mobile``, ``tablet``, ``desktop``, and the total. + funnel_next_action (google.analytics.data_v1alpha.types.FunnelNextAction): + Optional. If specified, next action adds a dimension to the + funnel visualization sub report response. This next action + dimension expands each funnel step to the unique values of + the next action. For example a next action of the + ``eventName`` dimension will create rows for several events + (for example ``session_start`` & ``click``) and the total. + + Next action only supports ``eventName`` and most Page / + Screen dimensions like ``pageTitle`` and ``pagePath``. + funnel_visualization_type (google.analytics.data_v1alpha.types.RunFunnelReportRequest.FunnelVisualizationType): + Optional. The funnel visualization type controls the + dimensions present in the funnel visualization sub report + response. If not specified, ``STANDARD_FUNNEL`` is used. + segments (MutableSequence[google.analytics.data_v1alpha.types.Segment]): + Optional. The configurations of segments. + Segments are subsets of a property's data. In a + funnel report with segments, the funnel is + evaluated in each segment. + + Each segment specified in this request + produces a separate row in the response; in the + response, each segment identified by its name. + + The segments parameter is optional. Requests are + limited to 4 segments. + limit (int): + Optional. The number of rows to return. If unspecified, + 10,000 rows are returned. The API returns a maximum of + 250,000 rows per request, no matter how many you ask for. + ``limit`` must be positive. + + The API can also return fewer rows than the requested + ``limit``, if there aren't as many dimension values as the + ``limit``. + dimension_filter (google.analytics.data_v1alpha.types.FilterExpression): + Optional. Dimension filters allow you to ask for only + specific dimension values in the report. To learn more, see + `Creating a Report: Dimension + Filters `__ + for examples. Metrics cannot be used in this filter. + return_property_quota (bool): + Optional. Toggles whether to return the current state of + this Analytics Property's quota. Quota is returned in + `PropertyQuota <#PropertyQuota>`__. + """ + class FunnelVisualizationType(proto.Enum): + r"""Controls the dimensions present in the funnel visualization + sub report response. + + Values: + FUNNEL_VISUALIZATION_TYPE_UNSPECIFIED (0): + Unspecified type. + STANDARD_FUNNEL (1): + A standard (stepped) funnel. The funnel + visualization sub report in the response will + not contain date. + TRENDED_FUNNEL (2): + A trended (line chart) funnel. The funnel + visualization sub report in the response will + contain the date dimension. + """ + FUNNEL_VISUALIZATION_TYPE_UNSPECIFIED = 0 + STANDARD_FUNNEL = 1 + TRENDED_FUNNEL = 2 + + property: str = proto.Field( + proto.STRING, + number=1, + ) + date_ranges: MutableSequence[data.DateRange] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.DateRange, + ) + funnel: data.Funnel = proto.Field( + proto.MESSAGE, + number=3, + message=data.Funnel, + ) + funnel_breakdown: data.FunnelBreakdown = proto.Field( + proto.MESSAGE, + number=4, + message=data.FunnelBreakdown, + ) + funnel_next_action: data.FunnelNextAction = proto.Field( + proto.MESSAGE, + number=5, + message=data.FunnelNextAction, + ) + funnel_visualization_type: FunnelVisualizationType = proto.Field( + proto.ENUM, + number=6, + enum=FunnelVisualizationType, + ) + segments: MutableSequence[data.Segment] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=data.Segment, + ) + limit: int = proto.Field( + proto.INT64, + number=9, + ) + dimension_filter: data.FilterExpression = proto.Field( + proto.MESSAGE, + number=10, + message=data.FilterExpression, + ) + return_property_quota: bool = proto.Field( + proto.BOOL, + number=12, + ) + + +class RunFunnelReportResponse(proto.Message): + r"""The funnel report response contains two sub reports. The two + sub reports are different combinations of dimensions and + metrics. + + Attributes: + funnel_table (google.analytics.data_v1alpha.types.FunnelSubReport): + The funnel table is a report with the funnel + step, segment, breakdown dimension, active + users, completion rate, abandonments, and + abandonments rate. + + The segment dimension is only present in this + response if a segment was requested. The + breakdown dimension is only present in this + response if it was requested. + funnel_visualization (google.analytics.data_v1alpha.types.FunnelSubReport): + The funnel visualization is a report with the funnel step, + segment, date, next action dimension, and active users. + + The segment dimension is only present in this response if a + segment was requested. The date dimension is only present in + this response if it was requested through the + ``TRENDED_FUNNEL`` funnel type. The next action dimension is + only present in the response if it was requested. + property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + This Analytics Property's quota state + including this request. + kind (str): + Identifies what kind of resource this message is. This + ``kind`` is always the fixed string + "analyticsData#runFunnelReport". Useful to distinguish + between response types in JSON. + """ + + funnel_table: data.FunnelSubReport = proto.Field( + proto.MESSAGE, + number=1, + message=data.FunnelSubReport, + ) + funnel_visualization: data.FunnelSubReport = proto.Field( + proto.MESSAGE, + number=2, + message=data.FunnelSubReport, + ) + property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=3, + message=data.PropertyQuota, + ) + kind: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ReportTask(proto.Message): + r"""A specific report task configuration. + + Attributes: + name (str): + Output only. Identifier. The report task resource name + assigned during creation. Format: + "properties/{property}/reportTasks/{report_task}". + report_definition (google.analytics.data_v1alpha.types.ReportTask.ReportDefinition): + Optional. A report definition to fetch report + data, which describes the structure of a report. + It typically includes the fields that will be + included in the report and the criteria that + will be used to filter the data. + report_metadata (google.analytics.data_v1alpha.types.ReportTask.ReportMetadata): + Output only. The report metadata for a + specific report task, which provides information + about a report. It typically includes the + following information: the resource name of the + report, the state of the report, the timestamp + the report was created, etc, + """ + + class ReportDefinition(proto.Message): + r"""The definition of how a report should be run. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dimensions (MutableSequence[google.analytics.data_v1alpha.types.Dimension]): + Optional. The dimensions requested and + displayed. + metrics (MutableSequence[google.analytics.data_v1alpha.types.Metric]): + Optional. The metrics requested and + displayed. + date_ranges (MutableSequence[google.analytics.data_v1alpha.types.DateRange]): + Optional. Date ranges of data to read. If multiple date + ranges are requested, each response row will contain a zero + based date range index. If two date ranges overlap, the + event data for the overlapping days is included in the + response rows for both date ranges. In a cohort request, + this ``dateRanges`` must be unspecified. + dimension_filter (google.analytics.data_v1alpha.types.FilterExpression): + Optional. Dimension filters let you ask for only specific + dimension values in the report. To learn more, see + `Fundamentals of Dimension + Filters `__ + for examples. Metrics cannot be used in this filter. + metric_filter (google.analytics.data_v1alpha.types.FilterExpression): + Optional. The filter clause of metrics. + Applied after aggregating the report's rows, + similar to SQL having-clause. Dimensions cannot + be used in this filter. + offset (int): + Optional. The row count of the start row from Google + Analytics Storage. The first row is counted as row 0. + + When creating a report task, the ``offset`` and ``limit`` + parameters define the subset of data rows from Google + Analytics storage to be included in the generated report. + For example, if there are a total of 300,000 rows in Google + Analytics storage, the initial report task may have the + first 10,000 rows with a limit of 10,000 and an offset of 0. + Subsequently, another report task could cover the next + 10,000 rows with a limit of 10,000 and an offset of 10,000. + limit (int): + Optional. The number of rows to return in the Report. If + unspecified, 10,000 rows are returned. The API returns a + maximum of 250,000 rows per request, no matter how many you + ask for. ``limit`` must be positive. + + The API can also return fewer rows than the requested + ``limit``, if there aren't as many dimension values as the + ``limit``. For instance, there are fewer than 300 possible + values for the dimension ``country``, so when reporting on + only ``country``, you can't get more than 300 rows, even if + you set ``limit`` to a higher value. + metric_aggregations (MutableSequence[google.analytics.data_v1alpha.types.MetricAggregation]): + Optional. Aggregation of metrics. Aggregated metric values + will be shown in rows where the dimension_values are set to + "RESERVED_(MetricAggregation)". + order_bys (MutableSequence[google.analytics.data_v1alpha.types.OrderBy]): + Optional. Specifies how rows are ordered in + the response. + currency_code (str): + Optional. A currency code in ISO4217 format, + such as "AED", "USD", "JPY". If the field is + empty, the report uses the property's default + currency. + cohort_spec (google.analytics.data_v1alpha.types.CohortSpec): + Optional. Cohort group associated with this + request. If there is a cohort group in the + request the 'cohort' dimension must be present. + keep_empty_rows (bool): + Optional. If false or unspecified, each row with all metrics + equal to 0 will not be returned. If true, these rows will be + returned if they are not separately removed by a filter. + + Regardless of this ``keep_empty_rows`` setting, only data + recorded by the Google Analytics property can be displayed + in a report. + + For example if a property never logs a ``purchase`` event, + then a query for the ``eventName`` dimension and + ``eventCount`` metric will not have a row containing + eventName: "purchase" and eventCount: 0. + sampling_level (google.analytics.data_v1alpha.types.SamplingLevel): + Optional. The report's sampling level. + + This field is a member of `oneof`_ ``_sampling_level``. + """ + + dimensions: MutableSequence[data.Dimension] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Dimension, + ) + metrics: MutableSequence[data.Metric] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Metric, + ) + date_ranges: MutableSequence[data.DateRange] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.DateRange, + ) + dimension_filter: data.FilterExpression = proto.Field( + proto.MESSAGE, + number=5, + message=data.FilterExpression, + ) + metric_filter: data.FilterExpression = proto.Field( + proto.MESSAGE, + number=6, + message=data.FilterExpression, + ) + offset: int = proto.Field( + proto.INT64, + number=7, + ) + limit: int = proto.Field( + proto.INT64, + number=8, + ) + metric_aggregations: MutableSequence[data.MetricAggregation] = proto.RepeatedField( + proto.ENUM, + number=9, + enum=data.MetricAggregation, + ) + order_bys: MutableSequence[data.OrderBy] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=data.OrderBy, + ) + currency_code: str = proto.Field( + proto.STRING, + number=11, + ) + cohort_spec: data.CohortSpec = proto.Field( + proto.MESSAGE, + number=12, + message=data.CohortSpec, + ) + keep_empty_rows: bool = proto.Field( + proto.BOOL, + number=13, + ) + sampling_level: data.SamplingLevel = proto.Field( + proto.ENUM, + number=14, + optional=True, + enum=data.SamplingLevel, + ) + + class ReportMetadata(proto.Message): + r"""The report metadata for a specific report task. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + state (google.analytics.data_v1alpha.types.ReportTask.ReportMetadata.State): + Output only. The current state for this + report task. + + This field is a member of `oneof`_ ``_state``. + begin_creating_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when ``CreateReportTask`` was called + and the report began the ``CREATING`` state. + + This field is a member of `oneof`_ ``_begin_creating_time``. + creation_quota_tokens_charged (int): + Output only. The total quota tokens charged during creation + of the report. Because this token count is based on activity + from the ``CREATING`` state, this tokens charge will be + fixed once a report task enters the ``ACTIVE`` or ``FAILED`` + state. + task_row_count (int): + Output only. The total number of rows in the report result. + This field will be populated when the state is active. You + can utilize ``task_row_count`` for pagination within the + confines of their existing report. + + This field is a member of `oneof`_ ``_task_row_count``. + error_message (str): + Output only. Error message is populated if a + report task fails during creation. + + This field is a member of `oneof`_ ``_error_message``. + total_row_count (int): + Output only. The total number of rows in Google Analytics + storage. If you want to query additional data rows beyond + the current report, they can initiate a new report task + based on the ``total_row_count``. + + The ``task_row_count`` represents the number of rows + specifically pertaining to the current report, whereas + ``total_row_count`` encompasses the total count of rows + across all data retrieved from Google Analytics storage. + + For example, suppose the current report's ``task_row_count`` + is 20, displaying the data from the first 20 rows. + Simultaneously, the ``total_row_count`` is 30, indicating + the presence of data for all 30 rows. The ``task_row_count`` + can be utilizated to paginate through the initial 20 rows. + To expand the report and include data from all 30 rows, a + new report task can be created using the total_row_count to + access the full set of 30 rows' worth of data. + + This field is a member of `oneof`_ ``_total_row_count``. + """ + class State(proto.Enum): + r"""The processing state. + + Values: + STATE_UNSPECIFIED (0): + Unspecified state will never be used. + CREATING (1): + The report is currently creating and will be + available in the future. Creating occurs + immediately after the CreateReport call. + ACTIVE (2): + The report is fully created and ready for + querying. + FAILED (3): + The report failed to be created. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + FAILED = 3 + + state: 'ReportTask.ReportMetadata.State' = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum='ReportTask.ReportMetadata.State', + ) + begin_creating_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=timestamp_pb2.Timestamp, + ) + creation_quota_tokens_charged: int = proto.Field( + proto.INT32, + number=3, + ) + task_row_count: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + error_message: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + total_row_count: int = proto.Field( + proto.INT32, + number=6, + optional=True, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + report_definition: ReportDefinition = proto.Field( + proto.MESSAGE, + number=2, + message=ReportDefinition, + ) + report_metadata: ReportMetadata = proto.Field( + proto.MESSAGE, + number=3, + message=ReportMetadata, + ) + + +class CreateReportTaskRequest(proto.Message): + r"""A request to create a report task. + + Attributes: + parent (str): + Required. The parent resource where this report task will be + created. Format: ``properties/{propertyId}`` + report_task (google.analytics.data_v1alpha.types.ReportTask): + Required. The report task configuration to + create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + report_task: 'ReportTask' = proto.Field( + proto.MESSAGE, + number=2, + message='ReportTask', + ) + + +class ReportTaskMetadata(proto.Message): + r"""Represents the metadata of a long-running operation. + Currently, this metadata is blank. + + """ + + +class QueryReportTaskRequest(proto.Message): + r"""A request to fetch the report content for a report task. + + Attributes: + name (str): + Required. The report source name. Format: + ``properties/{property}/reportTasks/{report}`` + offset (int): + Optional. The row count of the start row in the report. The + first row is counted as row 0. + + When paging, the first request does not specify offset; or + equivalently, sets offset to 0; the first request returns + the first ``limit`` of rows. The second request sets offset + to the ``limit`` of the first request; the second request + returns the second ``limit`` of rows. + + To learn more about this pagination parameter, see + `Pagination `__. + limit (int): + Optional. The number of rows to return from the report. If + unspecified, 10,000 rows are returned. The API returns a + maximum of 250,000 rows per request, no matter how many you + ask for. ``limit`` must be positive. + + The API can also return fewer rows than the requested + ``limit``, if there aren't as many dimension values as the + ``limit``. The number of rows available to a + QueryReportTaskRequest is further limited by the limit of + the associated ReportTask. A query can retrieve at most + ReportTask.limit rows. For example, if the ReportTask has a + limit of 1,000, then a QueryReportTask request with + offset=900 and limit=500 will return at most 100 rows. + + To learn more about this pagination parameter, see + `Pagination `__. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + offset: int = proto.Field( + proto.INT64, + number=2, + ) + limit: int = proto.Field( + proto.INT64, + number=3, + ) + + +class QueryReportTaskResponse(proto.Message): + r"""The report content corresponding to a report task. + + Attributes: + dimension_headers (MutableSequence[google.analytics.data_v1alpha.types.DimensionHeader]): + Describes dimension columns. The number of + DimensionHeaders and ordering of + DimensionHeaders matches the dimensions present + in rows. + metric_headers (MutableSequence[google.analytics.data_v1alpha.types.MetricHeader]): + Describes metric columns. The number of + MetricHeaders and ordering of MetricHeaders + matches the metrics present in rows. + rows (MutableSequence[google.analytics.data_v1alpha.types.Row]): + Rows of dimension value combinations and + metric values in the report. + totals (MutableSequence[google.analytics.data_v1alpha.types.Row]): + If requested, the totaled values of metrics. + maximums (MutableSequence[google.analytics.data_v1alpha.types.Row]): + If requested, the maximum values of metrics. + minimums (MutableSequence[google.analytics.data_v1alpha.types.Row]): + If requested, the minimum values of metrics. + row_count (int): + The total number of rows in the query result. + metadata (google.analytics.data_v1alpha.types.ResponseMetaData): + Metadata for the report. + """ + + dimension_headers: MutableSequence[data.DimensionHeader] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=data.DimensionHeader, + ) + metric_headers: MutableSequence[data.MetricHeader] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.MetricHeader, + ) + rows: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Row, + ) + totals: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.Row, + ) + maximums: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=data.Row, + ) + minimums: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=data.Row, + ) + row_count: int = proto.Field( + proto.INT32, + number=7, + ) + metadata: data.ResponseMetaData = proto.Field( + proto.MESSAGE, + number=8, + message=data.ResponseMetaData, + ) + + +class GetReportTaskRequest(proto.Message): + r"""A request to retrieve configuration metadata about a specific + report task. + + Attributes: + name (str): + Required. The report task resource name. Format: + ``properties/{property}/reportTasks/{report_task}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListReportTasksRequest(proto.Message): + r"""A request to list all report tasks for a property. + + Attributes: + parent (str): + Required. All report tasks for this property will be listed + in the response. Format: ``properties/{property}`` + page_size (int): + Optional. The maximum number of report tasks + to return. + page_token (str): + Optional. A page token, received from a previous + ``ListReportTasks`` call. Provide this to retrieve the + subsequent page. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListReportTasksResponse(proto.Message): + r"""A list of all report tasks for a property. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + report_tasks (MutableSequence[google.analytics.data_v1alpha.types.ReportTask]): + Each report task for a property. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + + This field is a member of `oneof`_ ``_next_page_token``. + """ + + @property + def raw_page(self): + return self + + report_tasks: MutableSequence['ReportTask'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ReportTask', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/types/data.py b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/types/data.py new file mode 100644 index 000000000000..da4942950274 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/google/analytics/data_v1alpha/types/data.py @@ -0,0 +1,3210 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.analytics.data.v1alpha', + manifest={ + 'UserCriteriaScoping', + 'UserExclusionDuration', + 'SessionCriteriaScoping', + 'SessionExclusionDuration', + 'EventCriteriaScoping', + 'EventExclusionDuration', + 'MetricAggregation', + 'MetricType', + 'RestrictedMetricType', + 'SamplingLevel', + 'DateRange', + 'Dimension', + 'DimensionExpression', + 'Metric', + 'FilterExpression', + 'FilterExpressionList', + 'Filter', + 'StringFilter', + 'InListFilter', + 'NumericFilter', + 'OrderBy', + 'BetweenFilter', + 'EmptyFilter', + 'NumericValue', + 'CohortSpec', + 'Cohort', + 'CohortsRange', + 'CohortReportSettings', + 'ResponseMetaData', + 'DimensionHeader', + 'MetricHeader', + 'Row', + 'DimensionValue', + 'MetricValue', + 'PropertyQuota', + 'QuotaStatus', + 'FunnelBreakdown', + 'FunnelNextAction', + 'Funnel', + 'FunnelStep', + 'FunnelSubReport', + 'UserSegment', + 'UserSegmentCriteria', + 'UserSegmentConditionGroup', + 'UserSegmentSequenceGroup', + 'UserSequenceStep', + 'UserSegmentExclusion', + 'SessionSegment', + 'SessionSegmentCriteria', + 'SessionSegmentConditionGroup', + 'SessionSegmentExclusion', + 'EventSegment', + 'EventSegmentCriteria', + 'EventSegmentConditionGroup', + 'EventSegmentExclusion', + 'Segment', + 'SegmentFilterExpression', + 'SegmentFilterExpressionList', + 'SegmentFilter', + 'SegmentFilterScoping', + 'SegmentEventFilter', + 'SegmentParameterFilterExpression', + 'SegmentParameterFilterExpressionList', + 'SegmentParameterFilter', + 'SegmentParameterFilterScoping', + 'FunnelFilterExpression', + 'FunnelFilterExpressionList', + 'FunnelFieldFilter', + 'FunnelEventFilter', + 'FunnelParameterFilterExpression', + 'FunnelParameterFilterExpressionList', + 'FunnelParameterFilter', + 'FunnelResponseMetadata', + 'SamplingMetadata', + }, +) + + +class UserCriteriaScoping(proto.Enum): + r"""Scoping specifies which events are considered when evaluating + if a user meets a criteria. + + Values: + USER_CRITERIA_SCOPING_UNSPECIFIED (0): + Unspecified criteria scoping. Do not specify. + USER_CRITERIA_WITHIN_SAME_EVENT (1): + If the criteria is satisfied within one + event, the user matches the criteria. + USER_CRITERIA_WITHIN_SAME_SESSION (2): + If the criteria is satisfied within one + session, the user matches the criteria. + USER_CRITERIA_ACROSS_ALL_SESSIONS (3): + If the criteria is satisfied by any events + for the user, the user matches the criteria. + """ + USER_CRITERIA_SCOPING_UNSPECIFIED = 0 + USER_CRITERIA_WITHIN_SAME_EVENT = 1 + USER_CRITERIA_WITHIN_SAME_SESSION = 2 + USER_CRITERIA_ACROSS_ALL_SESSIONS = 3 + + +class UserExclusionDuration(proto.Enum): + r"""Enumerates options for how long an exclusion will last if a user + matches the ``userExclusionCriteria``. + + Values: + USER_EXCLUSION_DURATION_UNSPECIFIED (0): + Unspecified exclusion duration. Do not + specify. + USER_EXCLUSION_TEMPORARY (1): + Temporarily exclude users from the segment during periods + when the user meets the ``userExclusionCriteria`` condition. + USER_EXCLUSION_PERMANENT (2): + Permanently exclude users from the segment if the user ever + meets the ``userExclusionCriteria`` condition. + """ + USER_EXCLUSION_DURATION_UNSPECIFIED = 0 + USER_EXCLUSION_TEMPORARY = 1 + USER_EXCLUSION_PERMANENT = 2 + + +class SessionCriteriaScoping(proto.Enum): + r"""Scoping specifies which events are considered when evaluating + if a session meets a criteria. + + Values: + SESSION_CRITERIA_SCOPING_UNSPECIFIED (0): + Unspecified criteria scoping. Do not specify. + SESSION_CRITERIA_WITHIN_SAME_EVENT (1): + If the criteria is satisfied within one + event, the session matches the criteria. + SESSION_CRITERIA_WITHIN_SAME_SESSION (2): + If the criteria is satisfied within one + session, the session matches the criteria. + """ + SESSION_CRITERIA_SCOPING_UNSPECIFIED = 0 + SESSION_CRITERIA_WITHIN_SAME_EVENT = 1 + SESSION_CRITERIA_WITHIN_SAME_SESSION = 2 + + +class SessionExclusionDuration(proto.Enum): + r"""Enumerates options for how long an exclusion will last if a session + matches the ``sessionExclusionCriteria``. + + Values: + SESSION_EXCLUSION_DURATION_UNSPECIFIED (0): + Unspecified exclusion duration. Do not + specify. + SESSION_EXCLUSION_TEMPORARY (1): + Temporarily exclude sessions from the segment during periods + when the session meets the ``sessionExclusionCriteria`` + condition. + SESSION_EXCLUSION_PERMANENT (2): + Permanently exclude sessions from the segment if the session + ever meets the ``sessionExclusionCriteria`` condition. + """ + SESSION_EXCLUSION_DURATION_UNSPECIFIED = 0 + SESSION_EXCLUSION_TEMPORARY = 1 + SESSION_EXCLUSION_PERMANENT = 2 + + +class EventCriteriaScoping(proto.Enum): + r"""Scoping specifies which events are considered when evaluating + if an event meets a criteria. + + Values: + EVENT_CRITERIA_SCOPING_UNSPECIFIED (0): + Unspecified criteria scoping. Do not specify. + EVENT_CRITERIA_WITHIN_SAME_EVENT (1): + If the criteria is satisfied within one + event, the event matches the criteria. + """ + EVENT_CRITERIA_SCOPING_UNSPECIFIED = 0 + EVENT_CRITERIA_WITHIN_SAME_EVENT = 1 + + +class EventExclusionDuration(proto.Enum): + r"""Enumerates options for how long an exclusion will last if an event + matches the ``eventExclusionCriteria``. + + Values: + EVENT_EXCLUSION_DURATION_UNSPECIFIED (0): + Unspecified exclusion duration. Do not + specify. + EVENT_EXCLUSION_PERMANENT (1): + Permanently exclude events from the segment if the event + ever meets the ``eventExclusionCriteria`` condition. + """ + EVENT_EXCLUSION_DURATION_UNSPECIFIED = 0 + EVENT_EXCLUSION_PERMANENT = 1 + + +class MetricAggregation(proto.Enum): + r"""Represents aggregation of metrics. + + Values: + METRIC_AGGREGATION_UNSPECIFIED (0): + Unspecified operator. + TOTAL (1): + SUM operator. + MINIMUM (5): + Minimum operator. + MAXIMUM (6): + Maximum operator. + COUNT (4): + Count operator. + """ + METRIC_AGGREGATION_UNSPECIFIED = 0 + TOTAL = 1 + MINIMUM = 5 + MAXIMUM = 6 + COUNT = 4 + + +class MetricType(proto.Enum): + r"""A metric's value type. + + Values: + METRIC_TYPE_UNSPECIFIED (0): + Unspecified type. + TYPE_INTEGER (1): + Integer type. + TYPE_FLOAT (2): + Floating point type. + TYPE_SECONDS (4): + A duration of seconds; a special floating + point type. + TYPE_MILLISECONDS (5): + A duration in milliseconds; a special + floating point type. + TYPE_MINUTES (6): + A duration in minutes; a special floating + point type. + TYPE_HOURS (7): + A duration in hours; a special floating point + type. + TYPE_STANDARD (8): + A custom metric of standard type; a special + floating point type. + TYPE_CURRENCY (9): + An amount of money; a special floating point + type. + TYPE_FEET (10): + A length in feet; a special floating point + type. + TYPE_MILES (11): + A length in miles; a special floating point + type. + TYPE_METERS (12): + A length in meters; a special floating point + type. + TYPE_KILOMETERS (13): + A length in kilometers; a special floating + point type. + """ + METRIC_TYPE_UNSPECIFIED = 0 + TYPE_INTEGER = 1 + TYPE_FLOAT = 2 + TYPE_SECONDS = 4 + TYPE_MILLISECONDS = 5 + TYPE_MINUTES = 6 + TYPE_HOURS = 7 + TYPE_STANDARD = 8 + TYPE_CURRENCY = 9 + TYPE_FEET = 10 + TYPE_MILES = 11 + TYPE_METERS = 12 + TYPE_KILOMETERS = 13 + + +class RestrictedMetricType(proto.Enum): + r"""Categories of data that you may be restricted from viewing on + certain Google Analytics properties. + + Values: + RESTRICTED_METRIC_TYPE_UNSPECIFIED (0): + Unspecified type. + COST_DATA (1): + Cost metrics such as ``adCost``. + REVENUE_DATA (2): + Revenue metrics such as ``purchaseRevenue``. + """ + RESTRICTED_METRIC_TYPE_UNSPECIFIED = 0 + COST_DATA = 1 + REVENUE_DATA = 2 + + +class SamplingLevel(proto.Enum): + r"""Categories of sampling levels for the requests. + + Values: + SAMPLING_LEVEL_UNSPECIFIED (0): + Unspecified type. + LOW (1): + Applies a sampling level of 10 million to + standard properties and 100 million to Google + Analytics 360 properties. + MEDIUM (2): + Exclusive to Google Analytics 360 properties + with a sampling level of 1 billion. + UNSAMPLED (3): + Exclusive to Google Analytics 360 properties. + Unsampled explorations are more accurate and can + reveal insights that aren't visible in standard + explorations. To learn more, see + https://support.google.com/analytics/answer/10896953. + """ + SAMPLING_LEVEL_UNSPECIFIED = 0 + LOW = 1 + MEDIUM = 2 + UNSAMPLED = 3 + + +class DateRange(proto.Message): + r"""A contiguous set of days: ``startDate``, ``startDate + 1``, ..., + ``endDate``. Requests are allowed up to 4 date ranges. + + Attributes: + start_date (str): + The inclusive start date for the query in the format + ``YYYY-MM-DD``. Cannot be after ``end_date``. The format + ``NdaysAgo``, ``yesterday``, or ``today`` is also accepted, + and in that case, the date is inferred based on the + property's reporting time zone. + end_date (str): + The inclusive end date for the query in the format + ``YYYY-MM-DD``. Cannot be before ``start_date``. The format + ``NdaysAgo``, ``yesterday``, or ``today`` is also accepted, + and in that case, the date is inferred based on the + property's reporting time zone. + name (str): + Assigns a name to this date range. The dimension + ``dateRange`` is valued to this name in a report response. + If set, cannot begin with ``date_range_`` or ``RESERVED_``. + If not set, date ranges are named by their zero based index + in the request: ``date_range_0``, ``date_range_1``, etc. + """ + + start_date: str = proto.Field( + proto.STRING, + number=1, + ) + end_date: str = proto.Field( + proto.STRING, + number=2, + ) + name: str = proto.Field( + proto.STRING, + number=3, + ) + + +class Dimension(proto.Message): + r"""Dimensions are attributes of your data. For example, the + dimension city indicates the city from which an event + originates. Dimension values in report responses are strings; + for example, the city could be "Paris" or "New York". + + Attributes: + name (str): + The name of the dimension. See the `API + Dimensions `__ + for the list of dimension names supported by core reporting + methods such as ``runReport`` and ``batchRunReports``. See + `Realtime + Dimensions `__ + for the list of dimension names supported by the + ``runRealtimeReport`` method. See `Funnel + Dimensions `__ + for the list of dimension names supported by the + ``runFunnelReport`` method. + + If ``dimensionExpression`` is specified, ``name`` can be any + string that you would like within the allowed character set. + For example if a ``dimensionExpression`` concatenates + ``country`` and ``city``, you could call that dimension + ``countryAndCity``. Dimension names that you choose must + match the regular expression ``^[a-zA-Z0-9_]$``. + + Dimensions are referenced by ``name`` in + ``dimensionFilter``, ``orderBys``, ``dimensionExpression``, + and ``pivots``. + dimension_expression (google.analytics.data_v1alpha.types.DimensionExpression): + One dimension can be the result of an + expression of multiple dimensions. For example, + dimension "country, city": concatenate(country, + ", ", city). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + dimension_expression: 'DimensionExpression' = proto.Field( + proto.MESSAGE, + number=2, + message='DimensionExpression', + ) + + +class DimensionExpression(proto.Message): + r"""Used to express a dimension which is the result of a formula of + multiple dimensions. Example usages: + + 1) lower_case(dimension) + 2) concatenate(dimension1, symbol, dimension2). + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + lower_case (google.analytics.data_v1alpha.types.DimensionExpression.CaseExpression): + Used to convert a dimension value to lower + case. + + This field is a member of `oneof`_ ``one_expression``. + upper_case (google.analytics.data_v1alpha.types.DimensionExpression.CaseExpression): + Used to convert a dimension value to upper + case. + + This field is a member of `oneof`_ ``one_expression``. + concatenate (google.analytics.data_v1alpha.types.DimensionExpression.ConcatenateExpression): + Used to combine dimension values to a single + dimension. For example, dimension "country, + city": concatenate(country, ", ", city). + + This field is a member of `oneof`_ ``one_expression``. + """ + + class CaseExpression(proto.Message): + r"""Used to convert a dimension value to a single case. + + Attributes: + dimension_name (str): + Name of a dimension. The name must refer back + to a name in dimensions field of the request. + """ + + dimension_name: str = proto.Field( + proto.STRING, + number=1, + ) + + class ConcatenateExpression(proto.Message): + r"""Used to combine dimension values to a single dimension. + + Attributes: + dimension_names (MutableSequence[str]): + Names of dimensions. The names must refer + back to names in the dimensions field of the + request. + delimiter (str): + The delimiter placed between dimension names. + + Delimiters are often single characters such as "|" or "," + but can be longer strings. If a dimension value contains the + delimiter, both will be present in response with no + distinction. For example if dimension 1 value = "US,FR", + dimension 2 value = "JP", and delimiter = ",", then the + response will contain "US,FR,JP". + """ + + dimension_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + delimiter: str = proto.Field( + proto.STRING, + number=2, + ) + + lower_case: CaseExpression = proto.Field( + proto.MESSAGE, + number=4, + oneof='one_expression', + message=CaseExpression, + ) + upper_case: CaseExpression = proto.Field( + proto.MESSAGE, + number=5, + oneof='one_expression', + message=CaseExpression, + ) + concatenate: ConcatenateExpression = proto.Field( + proto.MESSAGE, + number=6, + oneof='one_expression', + message=ConcatenateExpression, + ) + + +class Metric(proto.Message): + r"""The quantitative measurements of a report. For example, the metric + ``eventCount`` is the total number of events. Requests are allowed + up to 10 metrics. + + Attributes: + name (str): + The name of the metric. See the `API + Metrics `__ + for the list of metric names supported by core reporting + methods such as ``runReport`` and ``batchRunReports``. See + `Realtime + Metrics `__ + for the list of metric names supported by the + ``runRealtimeReport`` method. See `Funnel + Metrics `__ + for the list of metric names supported by the + ``runFunnelReport`` method. + + If ``expression`` is specified, ``name`` can be any string + that you would like within the allowed character set. For + example if ``expression`` is ``screenPageViews/sessions``, + you could call that metric's name = ``viewsPerSession``. + Metric names that you choose must match the regular + expression ``^[a-zA-Z0-9_]$``. + + Metrics are referenced by ``name`` in ``metricFilter``, + ``orderBys``, and metric ``expression``. + expression (str): + A mathematical expression for derived metrics. For example, + the metric Event count per user is + ``eventCount/totalUsers``. + invisible (bool): + Indicates if a metric is invisible in the report response. + If a metric is invisible, the metric will not produce a + column in the response, but can be used in ``metricFilter``, + ``orderBys``, or a metric ``expression``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + expression: str = proto.Field( + proto.STRING, + number=2, + ) + invisible: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class FilterExpression(proto.Message): + r"""To express dimension or metric filters. The fields in the + same FilterExpression need to be either all dimensions or all + metrics. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + and_group (google.analytics.data_v1alpha.types.FilterExpressionList): + The FilterExpressions in and_group have an AND relationship. + + This field is a member of `oneof`_ ``expr``. + or_group (google.analytics.data_v1alpha.types.FilterExpressionList): + The FilterExpressions in or_group have an OR relationship. + + This field is a member of `oneof`_ ``expr``. + not_expression (google.analytics.data_v1alpha.types.FilterExpression): + The FilterExpression is NOT of not_expression. + + This field is a member of `oneof`_ ``expr``. + filter (google.analytics.data_v1alpha.types.Filter): + A primitive filter. In the same + FilterExpression, all of the filter's field + names need to be either all dimensions or all + metrics. + + This field is a member of `oneof`_ ``expr``. + """ + + and_group: 'FilterExpressionList' = proto.Field( + proto.MESSAGE, + number=1, + oneof='expr', + message='FilterExpressionList', + ) + or_group: 'FilterExpressionList' = proto.Field( + proto.MESSAGE, + number=2, + oneof='expr', + message='FilterExpressionList', + ) + not_expression: 'FilterExpression' = proto.Field( + proto.MESSAGE, + number=3, + oneof='expr', + message='FilterExpression', + ) + filter: 'Filter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='expr', + message='Filter', + ) + + +class FilterExpressionList(proto.Message): + r"""A list of filter expressions. + + Attributes: + expressions (MutableSequence[google.analytics.data_v1alpha.types.FilterExpression]): + A list of filter expressions. + """ + + expressions: MutableSequence['FilterExpression'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FilterExpression', + ) + + +class Filter(proto.Message): + r"""An expression to filter dimension or metric values. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field_name (str): + The dimension name or metric name. Must be a + name defined in dimensions or metrics. + string_filter (google.analytics.data_v1alpha.types.StringFilter): + Strings related filter. + + This field is a member of `oneof`_ ``one_filter``. + in_list_filter (google.analytics.data_v1alpha.types.InListFilter): + A filter for in list values. + + This field is a member of `oneof`_ ``one_filter``. + numeric_filter (google.analytics.data_v1alpha.types.NumericFilter): + A filter for numeric or date values. + + This field is a member of `oneof`_ ``one_filter``. + between_filter (google.analytics.data_v1alpha.types.BetweenFilter): + A filter for between two values. + + This field is a member of `oneof`_ ``one_filter``. + empty_filter (google.analytics.data_v1alpha.types.EmptyFilter): + A filter for empty values such as "(not set)" + and "" values. + + This field is a member of `oneof`_ ``one_filter``. + """ + + field_name: str = proto.Field( + proto.STRING, + number=1, + ) + string_filter: 'StringFilter' = proto.Field( + proto.MESSAGE, + number=2, + oneof='one_filter', + message='StringFilter', + ) + in_list_filter: 'InListFilter' = proto.Field( + proto.MESSAGE, + number=3, + oneof='one_filter', + message='InListFilter', + ) + numeric_filter: 'NumericFilter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='one_filter', + message='NumericFilter', + ) + between_filter: 'BetweenFilter' = proto.Field( + proto.MESSAGE, + number=5, + oneof='one_filter', + message='BetweenFilter', + ) + empty_filter: 'EmptyFilter' = proto.Field( + proto.MESSAGE, + number=6, + oneof='one_filter', + message='EmptyFilter', + ) + + +class StringFilter(proto.Message): + r"""The filter for string + + Attributes: + match_type (google.analytics.data_v1alpha.types.StringFilter.MatchType): + The match type for this filter. + value (str): + The string value used for the matching. + case_sensitive (bool): + If true, the string value is case sensitive. + """ + class MatchType(proto.Enum): + r"""The match type of a string filter + + Values: + MATCH_TYPE_UNSPECIFIED (0): + Unspecified + EXACT (1): + Exact match of the string value. + BEGINS_WITH (2): + Begins with the string value. + ENDS_WITH (3): + Ends with the string value. + CONTAINS (4): + Contains the string value. + FULL_REGEXP (5): + Full match for the regular expression with + the string value. + PARTIAL_REGEXP (6): + Partial match for the regular expression with + the string value. + """ + MATCH_TYPE_UNSPECIFIED = 0 + EXACT = 1 + BEGINS_WITH = 2 + ENDS_WITH = 3 + CONTAINS = 4 + FULL_REGEXP = 5 + PARTIAL_REGEXP = 6 + + match_type: MatchType = proto.Field( + proto.ENUM, + number=1, + enum=MatchType, + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + case_sensitive: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class InListFilter(proto.Message): + r"""The result needs to be in a list of string values. + + Attributes: + values (MutableSequence[str]): + The list of string values. + Must be non-empty. + case_sensitive (bool): + If true, the string value is case sensitive. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + case_sensitive: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class NumericFilter(proto.Message): + r"""Filters for numeric or date values. + + Attributes: + operation (google.analytics.data_v1alpha.types.NumericFilter.Operation): + The operation type for this filter. + value (google.analytics.data_v1alpha.types.NumericValue): + A numeric value or a date value. + """ + class Operation(proto.Enum): + r"""The operation applied to a numeric filter + + Values: + OPERATION_UNSPECIFIED (0): + Unspecified. + EQUAL (1): + Equal + LESS_THAN (2): + Less than + LESS_THAN_OR_EQUAL (3): + Less than or equal + GREATER_THAN (4): + Greater than + GREATER_THAN_OR_EQUAL (5): + Greater than or equal + """ + OPERATION_UNSPECIFIED = 0 + EQUAL = 1 + LESS_THAN = 2 + LESS_THAN_OR_EQUAL = 3 + GREATER_THAN = 4 + GREATER_THAN_OR_EQUAL = 5 + + operation: Operation = proto.Field( + proto.ENUM, + number=1, + enum=Operation, + ) + value: 'NumericValue' = proto.Field( + proto.MESSAGE, + number=2, + message='NumericValue', + ) + + +class OrderBy(proto.Message): + r"""Order bys define how rows will be sorted in the response. For + example, ordering rows by descending event count is one + ordering, and ordering rows by the event name string is a + different ordering. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + metric (google.analytics.data_v1alpha.types.OrderBy.MetricOrderBy): + Sorts results by a metric's values. + + This field is a member of `oneof`_ ``one_order_by``. + dimension (google.analytics.data_v1alpha.types.OrderBy.DimensionOrderBy): + Sorts results by a dimension's values. + + This field is a member of `oneof`_ ``one_order_by``. + desc (bool): + If true, sorts by descending order. + """ + + class MetricOrderBy(proto.Message): + r"""Sorts by metric values. + + Attributes: + metric_name (str): + A metric name in the request to order by. + """ + + metric_name: str = proto.Field( + proto.STRING, + number=1, + ) + + class DimensionOrderBy(proto.Message): + r"""Sorts by dimension values. + + Attributes: + dimension_name (str): + A dimension name in the request to order by. + order_type (google.analytics.data_v1alpha.types.OrderBy.DimensionOrderBy.OrderType): + Controls the rule for dimension value + ordering. + """ + class OrderType(proto.Enum): + r"""Rule to order the string dimension values by. + + Values: + ORDER_TYPE_UNSPECIFIED (0): + Unspecified. + ALPHANUMERIC (1): + Alphanumeric sort by Unicode code point. For + example, "2" < "A" < "X" < "b" < "z". + CASE_INSENSITIVE_ALPHANUMERIC (2): + Case insensitive alphanumeric sort by lower + case Unicode code point. For example, "2" < "A" + < "b" < "X" < "z". + NUMERIC (3): + Dimension values are converted to numbers before sorting. + For example in NUMERIC sort, "25" < "100", and in + ``ALPHANUMERIC`` sort, "100" < "25". Non-numeric dimension + values all have equal ordering value below all numeric + values. + """ + ORDER_TYPE_UNSPECIFIED = 0 + ALPHANUMERIC = 1 + CASE_INSENSITIVE_ALPHANUMERIC = 2 + NUMERIC = 3 + + dimension_name: str = proto.Field( + proto.STRING, + number=1, + ) + order_type: 'OrderBy.DimensionOrderBy.OrderType' = proto.Field( + proto.ENUM, + number=2, + enum='OrderBy.DimensionOrderBy.OrderType', + ) + + metric: MetricOrderBy = proto.Field( + proto.MESSAGE, + number=1, + oneof='one_order_by', + message=MetricOrderBy, + ) + dimension: DimensionOrderBy = proto.Field( + proto.MESSAGE, + number=2, + oneof='one_order_by', + message=DimensionOrderBy, + ) + desc: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class BetweenFilter(proto.Message): + r"""To express that the result needs to be between two numbers + (inclusive). + + Attributes: + from_value (google.analytics.data_v1alpha.types.NumericValue): + Begins with this number. + to_value (google.analytics.data_v1alpha.types.NumericValue): + Ends with this number. + """ + + from_value: 'NumericValue' = proto.Field( + proto.MESSAGE, + number=1, + message='NumericValue', + ) + to_value: 'NumericValue' = proto.Field( + proto.MESSAGE, + number=2, + message='NumericValue', + ) + + +class EmptyFilter(proto.Message): + r"""Filter for empty values. + """ + + +class NumericValue(proto.Message): + r"""To represent a number. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + int64_value (int): + Integer value + + This field is a member of `oneof`_ ``one_value``. + double_value (float): + Double value + + This field is a member of `oneof`_ ``one_value``. + """ + + int64_value: int = proto.Field( + proto.INT64, + number=1, + oneof='one_value', + ) + double_value: float = proto.Field( + proto.DOUBLE, + number=2, + oneof='one_value', + ) + + +class CohortSpec(proto.Message): + r"""The specification of cohorts for a cohort report. + + Cohort reports create a time series of user retention for the + cohort. For example, you could select the cohort of users that were + acquired in the first week of September and follow that cohort for + the next six weeks. Selecting the users acquired in the first week + of September cohort is specified in the ``cohort`` object. Following + that cohort for the next six weeks is specified in the + ``cohortsRange`` object. + + For examples, see `Cohort Report + Examples `__. + + The report response could show a weekly time series where say your + app has retained 60% of this cohort after three weeks and 25% of + this cohort after six weeks. These two percentages can be calculated + by the metric ``cohortActiveUsers/cohortTotalUsers`` and will be + separate rows in the report. + + Attributes: + cohorts (MutableSequence[google.analytics.data_v1alpha.types.Cohort]): + Defines the selection criteria to group users + into cohorts. + Most cohort reports define only a single cohort. + If multiple cohorts are specified, each cohort + can be recognized in the report by their name. + cohorts_range (google.analytics.data_v1alpha.types.CohortsRange): + Cohort reports follow cohorts over an + extended reporting date range. This range + specifies an offset duration to follow the + cohorts over. + cohort_report_settings (google.analytics.data_v1alpha.types.CohortReportSettings): + Optional settings for a cohort report. + """ + + cohorts: MutableSequence['Cohort'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Cohort', + ) + cohorts_range: 'CohortsRange' = proto.Field( + proto.MESSAGE, + number=2, + message='CohortsRange', + ) + cohort_report_settings: 'CohortReportSettings' = proto.Field( + proto.MESSAGE, + number=3, + message='CohortReportSettings', + ) + + +class Cohort(proto.Message): + r"""Defines a cohort selection criteria. A cohort is a group of users + who share a common characteristic. For example, users with the same + ``firstSessionDate`` belong to the same cohort. + + Attributes: + name (str): + Assigns a name to this cohort. The dimension ``cohort`` is + valued to this name in a report response. If set, cannot + begin with ``cohort_`` or ``RESERVED_``. If not set, cohorts + are named by their zero based index ``cohort_0``, + ``cohort_1``, etc. + dimension (str): + Dimension used by the cohort. Required and only supports + ``firstSessionDate``. + date_range (google.analytics.data_v1alpha.types.DateRange): + The cohort selects users whose first touch date is between + start date and end date defined in the ``dateRange``. This + ``dateRange`` does not specify the full date range of event + data that is present in a cohort report. In a cohort report, + this ``dateRange`` is extended by the granularity and offset + present in the ``cohortsRange``; event data for the extended + reporting date range is present in a cohort report. + + In a cohort request, this ``dateRange`` is required and the + ``dateRanges`` in the ``RunReportRequest`` or + ``RunPivotReportRequest`` must be unspecified. + + This ``dateRange`` should generally be aligned with the + cohort's granularity. If ``CohortsRange`` uses daily + granularity, this ``dateRange`` can be a single day. If + ``CohortsRange`` uses weekly granularity, this ``dateRange`` + can be aligned to a week boundary, starting at Sunday and + ending Saturday. If ``CohortsRange`` uses monthly + granularity, this ``dateRange`` can be aligned to a month, + starting at the first and ending on the last day of the + month. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + dimension: str = proto.Field( + proto.STRING, + number=2, + ) + date_range: 'DateRange' = proto.Field( + proto.MESSAGE, + number=3, + message='DateRange', + ) + + +class CohortsRange(proto.Message): + r"""Configures the extended reporting date range for a cohort + report. Specifies an offset duration to follow the cohorts over. + + Attributes: + granularity (google.analytics.data_v1alpha.types.CohortsRange.Granularity): + Required. The granularity used to interpret the + ``startOffset`` and ``endOffset`` for the extended reporting + date range for a cohort report. + start_offset (int): + ``startOffset`` specifies the start date of the extended + reporting date range for a cohort report. ``startOffset`` is + commonly set to 0 so that reports contain data from the + acquisition of the cohort forward. + + If ``granularity`` is ``DAILY``, the ``startDate`` of the + extended reporting date range is ``startDate`` of the cohort + plus ``startOffset`` days. + + If ``granularity`` is ``WEEKLY``, the ``startDate`` of the + extended reporting date range is ``startDate`` of the cohort + plus ``startOffset * 7`` days. + + If ``granularity`` is ``MONTHLY``, the ``startDate`` of the + extended reporting date range is ``startDate`` of the cohort + plus ``startOffset * 30`` days. + end_offset (int): + Required. ``endOffset`` specifies the end date of the + extended reporting date range for a cohort report. + ``endOffset`` can be any positive integer but is commonly + set to 5 to 10 so that reports contain data on the cohort + for the next several granularity time periods. + + If ``granularity`` is ``DAILY``, the ``endDate`` of the + extended reporting date range is ``endDate`` of the cohort + plus ``endOffset`` days. + + If ``granularity`` is ``WEEKLY``, the ``endDate`` of the + extended reporting date range is ``endDate`` of the cohort + plus ``endOffset * 7`` days. + + If ``granularity`` is ``MONTHLY``, the ``endDate`` of the + extended reporting date range is ``endDate`` of the cohort + plus ``endOffset * 30`` days. + """ + class Granularity(proto.Enum): + r"""The granularity used to interpret the ``startOffset`` and + ``endOffset`` for the extended reporting date range for a cohort + report. + + Values: + GRANULARITY_UNSPECIFIED (0): + Should never be specified. + DAILY (1): + Daily granularity. Commonly used if the cohort's + ``dateRange`` is a single day and the request contains + ``cohortNthDay``. + WEEKLY (2): + Weekly granularity. Commonly used if the cohort's + ``dateRange`` is a week in duration (starting on Sunday and + ending on Saturday) and the request contains + ``cohortNthWeek``. + MONTHLY (3): + Monthly granularity. Commonly used if the cohort's + ``dateRange`` is a month in duration and the request + contains ``cohortNthMonth``. + """ + GRANULARITY_UNSPECIFIED = 0 + DAILY = 1 + WEEKLY = 2 + MONTHLY = 3 + + granularity: Granularity = proto.Field( + proto.ENUM, + number=1, + enum=Granularity, + ) + start_offset: int = proto.Field( + proto.INT32, + number=2, + ) + end_offset: int = proto.Field( + proto.INT32, + number=3, + ) + + +class CohortReportSettings(proto.Message): + r"""Optional settings of a cohort report. + + Attributes: + accumulate (bool): + If true, accumulates the result from first touch day to the + end day. Not supported in ``RunReportRequest``. + """ + + accumulate: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ResponseMetaData(proto.Message): + r"""Response's metadata carrying additional information about the + report content. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + data_loss_from_other_row (bool): + If true, indicates some buckets of dimension combinations + are rolled into "(other)" row. This can happen for high + cardinality reports. + + The metadata parameter dataLossFromOtherRow is populated + based on the aggregated data table used in the report. The + parameter will be accurately populated regardless of the + filters and limits in the report. + + For example, the (other) row could be dropped from the + report because the request contains a filter on + sessionSource = google. This parameter will still be + populated if data loss from other row was present in the + input aggregate data used to generate this report. + + To learn more, see `About the (other) row and data + sampling `__. + schema_restriction_response (google.analytics.data_v1alpha.types.ResponseMetaData.SchemaRestrictionResponse): + Describes the schema restrictions actively enforced in + creating this report. To learn more, see `Access and + data-restriction + management `__. + + This field is a member of `oneof`_ ``_schema_restriction_response``. + currency_code (str): + The currency code used in this report. Intended to be used + in formatting currency metrics like ``purchaseRevenue`` for + visualization. If currency_code was specified in the + request, this response parameter will echo the request + parameter; otherwise, this response parameter is the + property's current currency_code. + + Currency codes are string encodings of currency types from + the ISO 4217 standard + (https://en.wikipedia.org/wiki/ISO_4217); for example "USD", + "EUR", "JPY". To learn more, see + https://support.google.com/analytics/answer/9796179. + + This field is a member of `oneof`_ ``_currency_code``. + time_zone (str): + The property's current timezone. Intended to be used to + interpret time-based dimensions like ``hour`` and + ``minute``. Formatted as strings from the IANA Time Zone + database (https://www.iana.org/time-zones); for example + "America/New_York" or "Asia/Tokyo". + + This field is a member of `oneof`_ ``_time_zone``. + empty_reason (str): + If empty reason is specified, the report is + empty for this reason. + + This field is a member of `oneof`_ ``_empty_reason``. + subject_to_thresholding (bool): + If ``subjectToThresholding`` is true, this report is subject + to thresholding and only returns data that meets the minimum + aggregation thresholds. It is possible for a request to be + subject to thresholding thresholding and no data is absent + from the report, and this happens when all data is above the + thresholds. To learn more, see `Data + thresholds `__ + and `About Demographics and + Interests `__. + + This field is a member of `oneof`_ ``_subject_to_thresholding``. + sampling_metadatas (MutableSequence[google.analytics.data_v1alpha.types.SamplingMetadata]): + If this report's results are + `sampled `__, + this describes the percentage of events used in this report. + One ``samplingMetadatas`` is populated for each date range. + Each ``samplingMetadatas`` corresponds to a date range in + the order that date ranges were specified in the request. + + However if the results are not sampled, this field will not + be defined. + """ + + class SchemaRestrictionResponse(proto.Message): + r"""The schema restrictions actively enforced in creating this report. + To learn more, see `Access and data-restriction + management `__. + + Attributes: + active_metric_restrictions (MutableSequence[google.analytics.data_v1alpha.types.ResponseMetaData.SchemaRestrictionResponse.ActiveMetricRestriction]): + All restrictions actively enforced in creating the report. + For example, ``purchaseRevenue`` always has the restriction + type ``REVENUE_DATA``. However, this active response + restriction is only populated if the user's custom role + disallows access to ``REVENUE_DATA``. + """ + + class ActiveMetricRestriction(proto.Message): + r"""A metric actively restricted in creating the report. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + metric_name (str): + The name of the restricted metric. + + This field is a member of `oneof`_ ``_metric_name``. + restricted_metric_types (MutableSequence[google.analytics.data_v1alpha.types.RestrictedMetricType]): + The reason for this metric's restriction. + """ + + metric_name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + restricted_metric_types: MutableSequence['RestrictedMetricType'] = proto.RepeatedField( + proto.ENUM, + number=2, + enum='RestrictedMetricType', + ) + + active_metric_restrictions: MutableSequence['ResponseMetaData.SchemaRestrictionResponse.ActiveMetricRestriction'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ResponseMetaData.SchemaRestrictionResponse.ActiveMetricRestriction', + ) + + data_loss_from_other_row: bool = proto.Field( + proto.BOOL, + number=3, + ) + schema_restriction_response: SchemaRestrictionResponse = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=SchemaRestrictionResponse, + ) + currency_code: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + time_zone: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + empty_reason: str = proto.Field( + proto.STRING, + number=7, + optional=True, + ) + subject_to_thresholding: bool = proto.Field( + proto.BOOL, + number=8, + optional=True, + ) + sampling_metadatas: MutableSequence['SamplingMetadata'] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='SamplingMetadata', + ) + + +class DimensionHeader(proto.Message): + r"""Describes a dimension column in the report. Dimensions + requested in a report produce column entries within rows and + DimensionHeaders. However, dimensions used exclusively within + filters or expressions do not produce columns in a report; + correspondingly, those dimensions do not produce headers. + + Attributes: + name (str): + The dimension's name. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class MetricHeader(proto.Message): + r"""Describes a metric column in the report. Visible metrics + requested in a report produce column entries within rows and + MetricHeaders. However, metrics used exclusively within filters + or expressions do not produce columns in a report; + correspondingly, those metrics do not produce headers. + + Attributes: + name (str): + The metric's name. + type_ (google.analytics.data_v1alpha.types.MetricType): + The metric's data type. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'MetricType' = proto.Field( + proto.ENUM, + number=2, + enum='MetricType', + ) + + +class Row(proto.Message): + r"""Report data for each row. For example if RunReportRequest contains: + + .. code:: none + + "dimensions": [ + { + "name": "eventName" + }, + { + "name": "countryId" + } + ], + "metrics": [ + { + "name": "eventCount" + } + ] + + One row with 'in_app_purchase' as the eventName, 'JP' as the + countryId, and 15 as the eventCount, would be: + + .. code:: none + + "dimensionValues": [ + { + "value": "in_app_purchase" + }, + { + "value": "JP" + } + ], + "metricValues": [ + { + "value": "15" + } + ] + + Attributes: + dimension_values (MutableSequence[google.analytics.data_v1alpha.types.DimensionValue]): + List of requested dimension values. In a PivotReport, + dimension_values are only listed for dimensions included in + a pivot. + metric_values (MutableSequence[google.analytics.data_v1alpha.types.MetricValue]): + List of requested visible metric values. + """ + + dimension_values: MutableSequence['DimensionValue'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DimensionValue', + ) + metric_values: MutableSequence['MetricValue'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='MetricValue', + ) + + +class DimensionValue(proto.Message): + r"""The value of a dimension. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + Value as a string if the dimension type is a + string. + + This field is a member of `oneof`_ ``one_value``. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + oneof='one_value', + ) + + +class MetricValue(proto.Message): + r"""The value of a metric. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + Measurement value. See MetricHeader for type. + + This field is a member of `oneof`_ ``one_value``. + """ + + value: str = proto.Field( + proto.STRING, + number=4, + oneof='one_value', + ) + + +class PropertyQuota(proto.Message): + r"""Current state of all quotas for this Analytics Property. If + any quota for a property is exhausted, all requests to that + property will return Resource Exhausted errors. + + Attributes: + tokens_per_day (google.analytics.data_v1alpha.types.QuotaStatus): + Standard Analytics Properties can use up to + 200,000 tokens per day; Analytics 360 Properties + can use 2,000,000 tokens per day. Most requests + consume fewer than 10 tokens. + tokens_per_hour (google.analytics.data_v1alpha.types.QuotaStatus): + Standard Analytics Properties can use up to + 40,000 tokens per hour; Analytics 360 Properties + can use 400,000 tokens per hour. An API request + consumes a single number of tokens, and that + number is deducted from all of the hourly, + daily, and per project hourly quotas. + concurrent_requests (google.analytics.data_v1alpha.types.QuotaStatus): + Standard Analytics Properties can send up to + 10 concurrent requests; Analytics 360 Properties + can use up to 50 concurrent requests. + server_errors_per_project_per_hour (google.analytics.data_v1alpha.types.QuotaStatus): + Standard Analytics Properties and cloud + project pairs can have up to 10 server errors + per hour; Analytics 360 Properties and cloud + project pairs can have up to 50 server errors + per hour. + potentially_thresholded_requests_per_hour (google.analytics.data_v1alpha.types.QuotaStatus): + Analytics Properties can send up to 120 + requests with potentially thresholded dimensions + per hour. In a batch request, each report + request is individually counted for this quota + if the request contains potentially thresholded + dimensions. + tokens_per_project_per_hour (google.analytics.data_v1alpha.types.QuotaStatus): + Analytics Properties can use up to 35% of + their tokens per project per hour. This amounts + to standard Analytics Properties can use up to + 14,000 tokens per project per hour, and + Analytics 360 Properties can use 140,000 tokens + per project per hour. An API request consumes a + single number of tokens, and that number is + deducted from all of the hourly, daily, and per + project hourly quotas. + """ + + tokens_per_day: 'QuotaStatus' = proto.Field( + proto.MESSAGE, + number=1, + message='QuotaStatus', + ) + tokens_per_hour: 'QuotaStatus' = proto.Field( + proto.MESSAGE, + number=2, + message='QuotaStatus', + ) + concurrent_requests: 'QuotaStatus' = proto.Field( + proto.MESSAGE, + number=3, + message='QuotaStatus', + ) + server_errors_per_project_per_hour: 'QuotaStatus' = proto.Field( + proto.MESSAGE, + number=4, + message='QuotaStatus', + ) + potentially_thresholded_requests_per_hour: 'QuotaStatus' = proto.Field( + proto.MESSAGE, + number=5, + message='QuotaStatus', + ) + tokens_per_project_per_hour: 'QuotaStatus' = proto.Field( + proto.MESSAGE, + number=6, + message='QuotaStatus', + ) + + +class QuotaStatus(proto.Message): + r"""Current state for a particular quota group. + + Attributes: + consumed (int): + Quota consumed by this request. + remaining (int): + Quota remaining after this request. + """ + + consumed: int = proto.Field( + proto.INT32, + number=1, + ) + remaining: int = proto.Field( + proto.INT32, + number=2, + ) + + +class FunnelBreakdown(proto.Message): + r"""Breakdowns add a dimension to the funnel table sub report + response. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + breakdown_dimension (google.analytics.data_v1alpha.types.Dimension): + The dimension column added to the funnel table sub report + response. The breakdown dimension breaks down each funnel + step. A valid ``breakdownDimension`` is required if + ``funnelBreakdown`` is specified. + limit (int): + The maximum number of distinct values of the breakdown + dimension to return in the response. A ``limit`` of ``5`` is + used if limit is not specified. Limit must exceed zero and + cannot exceed 15. + + This field is a member of `oneof`_ ``_limit``. + """ + + breakdown_dimension: 'Dimension' = proto.Field( + proto.MESSAGE, + number=1, + message='Dimension', + ) + limit: int = proto.Field( + proto.INT64, + number=2, + optional=True, + ) + + +class FunnelNextAction(proto.Message): + r"""Next actions state the value for a dimension after the user has + achieved a step but before the same user has achieved the next step. + For example if the ``nextActionDimension`` is ``eventName``, then + ``nextActionDimension`` in the ``i``\ th funnel step row will return + first event after the event that qualified the user into the + ``i``\ th funnel step but before the user achieved the ``i+1``\ th + funnel step. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + next_action_dimension (google.analytics.data_v1alpha.types.Dimension): + The dimension column added to the funnel visualization sub + report response. The next action dimension returns the next + dimension value of this dimension after the user has + attained the ``i``\ th funnel step. + + ``nextActionDimension`` currently only supports + ``eventName`` and most Page / Screen dimensions like + ``pageTitle`` and ``pagePath``. ``nextActionDimension`` + cannot be a dimension expression. + limit (int): + The maximum number of distinct values of the breakdown + dimension to return in the response. A ``limit`` of ``5`` is + used if limit is not specified. Limit must exceed zero and + cannot exceed 5. + + This field is a member of `oneof`_ ``_limit``. + """ + + next_action_dimension: 'Dimension' = proto.Field( + proto.MESSAGE, + number=1, + message='Dimension', + ) + limit: int = proto.Field( + proto.INT64, + number=2, + optional=True, + ) + + +class Funnel(proto.Message): + r"""Configures the funnel in a funnel report request. A funnel + reports on users as they pass through a sequence of steps. + + Funnel exploration lets you visualize the steps your users take + to complete a task and quickly see how well they are succeeding + or failing at each step. For example, how do prospects become + shoppers and then become buyers? How do one time buyers become + repeat buyers? With this information, you can improve + inefficient or abandoned customer journeys. + + Attributes: + is_open_funnel (bool): + In an open funnel, users can enter the funnel + in any step, and in a closed funnel, users must + enter the funnel in the first step. Optional. If + unspecified, a closed funnel is used. + steps (MutableSequence[google.analytics.data_v1alpha.types.FunnelStep]): + The sequential steps of this funnel. + """ + + is_open_funnel: bool = proto.Field( + proto.BOOL, + number=1, + ) + steps: MutableSequence['FunnelStep'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='FunnelStep', + ) + + +class FunnelStep(proto.Message): + r"""Steps define the user journey you want to measure. Steps + contain one or more conditions that your users must meet to be + included in that step of the funnel journey. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The distinctive name for this step. If unspecified, steps + will be named by a 1 based indexed name (for example "0. ", + "1. ", etc.). This name defines string value returned by the + ``funnelStepName`` dimension. For example, specifying + ``name = Purchase`` in the request's third funnel step will + produce ``3. Purchase`` in the funnel report response. + is_directly_followed_by (bool): + If true, this step must directly follow the previous step. + If false, there can be events between the previous step and + this step. If unspecified, ``isDirectlyFollowedBy`` is + treated as false. + within_duration_from_prior_step (google.protobuf.duration_pb2.Duration): + If specified, this step must complete within this duration + of the completion of the prior step. + ``withinDurationFromPriorStep`` is inclusive of the endpoint + at the microsecond granularity. For example a duration of 5 + seconds can be completed at 4.9 or 5.0 seconds, but not 5 + seconds and 1 microsecond. + + ``withinDurationFromPriorStep`` is optional, and if + unspecified, steps may be separated by any time duration. + + This field is a member of `oneof`_ ``_within_duration_from_prior_step``. + filter_expression (google.analytics.data_v1alpha.types.FunnelFilterExpression): + The condition that your users must meet to be + included in this step of the funnel journey. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + is_directly_followed_by: bool = proto.Field( + proto.BOOL, + number=2, + ) + within_duration_from_prior_step: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=duration_pb2.Duration, + ) + filter_expression: 'FunnelFilterExpression' = proto.Field( + proto.MESSAGE, + number=4, + message='FunnelFilterExpression', + ) + + +class FunnelSubReport(proto.Message): + r"""Funnel sub reports contain the dimension and metric data + values. For example, 12 users reached the second step of the + funnel. + + Attributes: + dimension_headers (MutableSequence[google.analytics.data_v1alpha.types.DimensionHeader]): + Describes dimension columns. Funnel reports + always include the funnel step dimension in sub + report responses. Additional dimensions like + breakdowns, dates, and next actions may be + present in the response if requested. + metric_headers (MutableSequence[google.analytics.data_v1alpha.types.MetricHeader]): + Describes metric columns. Funnel reports + always include active users in sub report + responses. The funnel table includes additional + metrics like completion rate, abandonments, and + abandonments rate. + rows (MutableSequence[google.analytics.data_v1alpha.types.Row]): + Rows of dimension value combinations and + metric values in the report. + metadata (google.analytics.data_v1alpha.types.FunnelResponseMetadata): + Metadata for the funnel report. + """ + + dimension_headers: MutableSequence['DimensionHeader'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DimensionHeader', + ) + metric_headers: MutableSequence['MetricHeader'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='MetricHeader', + ) + rows: MutableSequence['Row'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Row', + ) + metadata: 'FunnelResponseMetadata' = proto.Field( + proto.MESSAGE, + number=4, + message='FunnelResponseMetadata', + ) + + +class UserSegment(proto.Message): + r"""User segments are subsets of users who engaged with your site + or app. For example, users who have previously purchased; users + who added items to their shopping carts, but didn’t complete a + purchase. + + Attributes: + user_inclusion_criteria (google.analytics.data_v1alpha.types.UserSegmentCriteria): + Defines which users are included in this + segment. Optional. + exclusion (google.analytics.data_v1alpha.types.UserSegmentExclusion): + Defines which users are excluded in this + segment. Optional. + """ + + user_inclusion_criteria: 'UserSegmentCriteria' = proto.Field( + proto.MESSAGE, + number=1, + message='UserSegmentCriteria', + ) + exclusion: 'UserSegmentExclusion' = proto.Field( + proto.MESSAGE, + number=2, + message='UserSegmentExclusion', + ) + + +class UserSegmentCriteria(proto.Message): + r"""A user matches a criteria if the user's events meet the + conditions in the criteria. + + Attributes: + and_condition_groups (MutableSequence[google.analytics.data_v1alpha.types.UserSegmentConditionGroup]): + A user matches this criteria if the user matches each of + these ``andConditionGroups`` and each of the + ``andSequenceGroups``. ``andConditionGroups`` may be empty + if ``andSequenceGroups`` are specified. + and_sequence_groups (MutableSequence[google.analytics.data_v1alpha.types.UserSegmentSequenceGroup]): + A user matches this criteria if the user matches each of + these ``andSequenceGroups`` and each of the + ``andConditionGroups``. ``andSequenceGroups`` may be empty + if ``andConditionGroups`` are specified. + """ + + and_condition_groups: MutableSequence['UserSegmentConditionGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='UserSegmentConditionGroup', + ) + and_sequence_groups: MutableSequence['UserSegmentSequenceGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='UserSegmentSequenceGroup', + ) + + +class UserSegmentConditionGroup(proto.Message): + r"""Conditions tell Analytics what data to include in or exclude + from the segment. + + Attributes: + condition_scoping (google.analytics.data_v1alpha.types.UserCriteriaScoping): + Data is included or excluded from the segment based on if it + matches the condition group. This scoping defines how many + events the ``segmentFilterExpression`` is evaluated on + before the condition group is determined to be matched or + not. For example if + ``conditionScoping = USER_CRITERIA_WITHIN_SAME_SESSION``, + the expression is evaluated on all events in a session, and + then, the condition group is determined to be matched or not + for this user. For example if + ``conditionScoping = USER_CRITERIA_WITHIN_SAME_EVENT``, the + expression is evaluated on a single event, and then, the + condition group is determined to be matched or not for this + user. + + Optional. If unspecified, + ``conditionScoping = ACROSS_ALL_SESSIONS`` is used. + segment_filter_expression (google.analytics.data_v1alpha.types.SegmentFilterExpression): + Data is included or excluded from the segment + based on if it matches this expression. + Expressions express criteria on dimension, + metrics, and/or parameters. + """ + + condition_scoping: 'UserCriteriaScoping' = proto.Field( + proto.ENUM, + number=1, + enum='UserCriteriaScoping', + ) + segment_filter_expression: 'SegmentFilterExpression' = proto.Field( + proto.MESSAGE, + number=2, + message='SegmentFilterExpression', + ) + + +class UserSegmentSequenceGroup(proto.Message): + r"""Define conditions that must occur in a specific order for the + user to be a member of the segment. + + Attributes: + sequence_scoping (google.analytics.data_v1alpha.types.UserCriteriaScoping): + All sequence steps must be satisfied in the scoping for the + user to match the sequence. For example if + ``sequenceScoping = USER_CRITERIA_WITHIN_SAME_SESSION``, all + sequence steps must complete within one session for the user + to match the sequence. + ``sequenceScoping = USER_CRITERIA_WITHIN_SAME_EVENT`` is not + supported. + + Optional. If unspecified, + ``conditionScoping = ACROSS_ALL_SESSIONS`` is used. + sequence_maximum_duration (google.protobuf.duration_pb2.Duration): + Defines the time period in which the whole sequence must + occur; for example, 30 Minutes. ``sequenceMaximumDuration`` + is inclusive of the endpoint at the microsecond granularity. + For example a sequence with a maximum duration of 5 seconds + can be completed at 4.9 or 5.0 seconds, but not 5 seconds + and 1 microsecond. + + ``sequenceMaximumDuration`` is optional, and if unspecified, + sequences can be completed in any time duration. + user_sequence_steps (MutableSequence[google.analytics.data_v1alpha.types.UserSequenceStep]): + An ordered sequence of condition steps. A user's events must + complete each step in order for the user to match the + ``UserSegmentSequenceGroup``. + """ + + sequence_scoping: 'UserCriteriaScoping' = proto.Field( + proto.ENUM, + number=1, + enum='UserCriteriaScoping', + ) + sequence_maximum_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + user_sequence_steps: MutableSequence['UserSequenceStep'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='UserSequenceStep', + ) + + +class UserSequenceStep(proto.Message): + r"""A condition that must occur in the specified step order for + this user to match the sequence. + + Attributes: + is_directly_followed_by (bool): + If true, the event satisfying this step must be the very + next event after the event satifying the last step. If + false, this step indirectly follows the prior step; for + example, there may be events between the prior step and this + step. ``isDirectlyFollowedBy`` must be false for the first + step. + step_scoping (google.analytics.data_v1alpha.types.UserCriteriaScoping): + This sequence step must be satisfied in the scoping for the + user to match the sequence. For example if + ``sequenceScoping = WITHIN_SAME_SESSION``, this sequence + steps must complete within one session for the user to match + the sequence. ``stepScoping = ACROSS_ALL_SESSIONS`` is only + allowed if the ``sequenceScoping = ACROSS_ALL_SESSIONS``. + + Optional. If unspecified, ``stepScoping`` uses the same + ``UserCriteriaScoping`` as the ``sequenceScoping``. + segment_filter_expression (google.analytics.data_v1alpha.types.SegmentFilterExpression): + A user matches this sequence step if their + events match this expression. Expressions + express criteria on dimension, metrics, and/or + parameters. + """ + + is_directly_followed_by: bool = proto.Field( + proto.BOOL, + number=1, + ) + step_scoping: 'UserCriteriaScoping' = proto.Field( + proto.ENUM, + number=2, + enum='UserCriteriaScoping', + ) + segment_filter_expression: 'SegmentFilterExpression' = proto.Field( + proto.MESSAGE, + number=3, + message='SegmentFilterExpression', + ) + + +class UserSegmentExclusion(proto.Message): + r"""Specifies which users are excluded in this segment. + + Attributes: + user_exclusion_duration (google.analytics.data_v1alpha.types.UserExclusionDuration): + Specifies how long an exclusion will last if a user matches + the ``userExclusionCriteria``. + + Optional. If unspecified, ``userExclusionDuration`` of + ``USER_EXCLUSION_TEMPORARY`` is used. + user_exclusion_criteria (google.analytics.data_v1alpha.types.UserSegmentCriteria): + If a user meets this condition, the user is excluded from + membership in the segment for the ``userExclusionDuration``. + """ + + user_exclusion_duration: 'UserExclusionDuration' = proto.Field( + proto.ENUM, + number=1, + enum='UserExclusionDuration', + ) + user_exclusion_criteria: 'UserSegmentCriteria' = proto.Field( + proto.MESSAGE, + number=2, + message='UserSegmentCriteria', + ) + + +class SessionSegment(proto.Message): + r"""Session segments are subsets of the sessions that occurred on + your site or app: for example, all the sessions that originated + from a particular advertising campaign. + + Attributes: + session_inclusion_criteria (google.analytics.data_v1alpha.types.SessionSegmentCriteria): + Defines which sessions are included in this + segment. Optional. + exclusion (google.analytics.data_v1alpha.types.SessionSegmentExclusion): + Defines which sessions are excluded in this + segment. Optional. + """ + + session_inclusion_criteria: 'SessionSegmentCriteria' = proto.Field( + proto.MESSAGE, + number=1, + message='SessionSegmentCriteria', + ) + exclusion: 'SessionSegmentExclusion' = proto.Field( + proto.MESSAGE, + number=2, + message='SessionSegmentExclusion', + ) + + +class SessionSegmentCriteria(proto.Message): + r"""A session matches a criteria if the session's events meet the + conditions in the criteria. + + Attributes: + and_condition_groups (MutableSequence[google.analytics.data_v1alpha.types.SessionSegmentConditionGroup]): + A session matches this criteria if the session matches each + of these ``andConditionGroups``. + """ + + and_condition_groups: MutableSequence['SessionSegmentConditionGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SessionSegmentConditionGroup', + ) + + +class SessionSegmentConditionGroup(proto.Message): + r"""Conditions tell Analytics what data to include in or exclude + from the segment. + + Attributes: + condition_scoping (google.analytics.data_v1alpha.types.SessionCriteriaScoping): + Data is included or excluded from the segment based on if it + matches the condition group. This scoping defines how many + events the ``segmentFilterExpression`` is evaluated on + before the condition group is determined to be matched or + not. For example if + ``conditionScoping = SESSION_CRITERIA_WITHIN_SAME_SESSION``, + the expression is evaluated on all events in a session, and + then, the condition group is determined to be matched or not + for this session. For example if + ``conditionScoping = SESSION_CRITERIA_WITHIN_SAME_EVENT``, + the expression is evaluated on a single event, and then, the + condition group is determined to be matched or not for this + session. + + Optional. If unspecified, a ``conditionScoping`` of + ``WITHIN_SAME_SESSION`` is used. + segment_filter_expression (google.analytics.data_v1alpha.types.SegmentFilterExpression): + Data is included or excluded from the segment + based on if it matches this expression. + Expressions express criteria on dimension, + metrics, and/or parameters. + """ + + condition_scoping: 'SessionCriteriaScoping' = proto.Field( + proto.ENUM, + number=1, + enum='SessionCriteriaScoping', + ) + segment_filter_expression: 'SegmentFilterExpression' = proto.Field( + proto.MESSAGE, + number=2, + message='SegmentFilterExpression', + ) + + +class SessionSegmentExclusion(proto.Message): + r"""Specifies which sessions are excluded in this segment. + + Attributes: + session_exclusion_duration (google.analytics.data_v1alpha.types.SessionExclusionDuration): + Specifies how long an exclusion will last if a session + matches the ``sessionExclusionCriteria``. + + Optional. If unspecified, a ``sessionExclusionDuration`` of + ``SESSION_EXCLUSION_TEMPORARY`` is used. + session_exclusion_criteria (google.analytics.data_v1alpha.types.SessionSegmentCriteria): + If a session meets this condition, the session is excluded + from membership in the segment for the + ``sessionExclusionDuration``. + """ + + session_exclusion_duration: 'SessionExclusionDuration' = proto.Field( + proto.ENUM, + number=1, + enum='SessionExclusionDuration', + ) + session_exclusion_criteria: 'SessionSegmentCriteria' = proto.Field( + proto.MESSAGE, + number=2, + message='SessionSegmentCriteria', + ) + + +class EventSegment(proto.Message): + r"""Event segments are subsets of events that were triggered on your + site or app. for example, all purchase events made in a particular + location; app_exception events that occurred on a specific operating + system. + + Attributes: + event_inclusion_criteria (google.analytics.data_v1alpha.types.EventSegmentCriteria): + Defines which events are included in this + segment. Optional. + exclusion (google.analytics.data_v1alpha.types.EventSegmentExclusion): + Defines which events are excluded in this + segment. Optional. + """ + + event_inclusion_criteria: 'EventSegmentCriteria' = proto.Field( + proto.MESSAGE, + number=1, + message='EventSegmentCriteria', + ) + exclusion: 'EventSegmentExclusion' = proto.Field( + proto.MESSAGE, + number=2, + message='EventSegmentExclusion', + ) + + +class EventSegmentCriteria(proto.Message): + r"""An event matches a criteria if the event meet the conditions + in the criteria. + + Attributes: + and_condition_groups (MutableSequence[google.analytics.data_v1alpha.types.EventSegmentConditionGroup]): + An event matches this criteria if the event matches each of + these ``andConditionGroups``. + """ + + and_condition_groups: MutableSequence['EventSegmentConditionGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='EventSegmentConditionGroup', + ) + + +class EventSegmentConditionGroup(proto.Message): + r"""Conditions tell Analytics what data to include in or exclude + from the segment. + + Attributes: + condition_scoping (google.analytics.data_v1alpha.types.EventCriteriaScoping): + ``conditionScoping`` should always be + ``EVENT_CRITERIA_WITHIN_SAME_EVENT``. + + Optional. If unspecified, a ``conditionScoping`` of + ``EVENT_CRITERIA_WITHIN_SAME_EVENT`` is used. + segment_filter_expression (google.analytics.data_v1alpha.types.SegmentFilterExpression): + Data is included or excluded from the segment + based on if it matches this expression. + Expressions express criteria on dimension, + metrics, and/or parameters. + """ + + condition_scoping: 'EventCriteriaScoping' = proto.Field( + proto.ENUM, + number=1, + enum='EventCriteriaScoping', + ) + segment_filter_expression: 'SegmentFilterExpression' = proto.Field( + proto.MESSAGE, + number=2, + message='SegmentFilterExpression', + ) + + +class EventSegmentExclusion(proto.Message): + r"""Specifies which events are excluded in this segment. + + Attributes: + event_exclusion_duration (google.analytics.data_v1alpha.types.EventExclusionDuration): + ``eventExclusionDuration`` should always be + ``PERMANENTLY_EXCLUDE``. + + Optional. If unspecified, an ``eventExclusionDuration`` of + ``EVENT_EXCLUSION_PERMANENT`` is used. + event_exclusion_criteria (google.analytics.data_v1alpha.types.EventSegmentCriteria): + If an event meets this condition, the event is excluded from + membership in the segment for the + ``eventExclusionDuration``. + """ + + event_exclusion_duration: 'EventExclusionDuration' = proto.Field( + proto.ENUM, + number=1, + enum='EventExclusionDuration', + ) + event_exclusion_criteria: 'EventSegmentCriteria' = proto.Field( + proto.MESSAGE, + number=2, + message='EventSegmentCriteria', + ) + + +class Segment(proto.Message): + r"""A segment is a subset of your Analytics data. For example, of your + entire set of users, one segment might be users from a particular + country or city. Another segment might be users who purchase a + particular line of products or who visit a specific part of your + site or trigger certain events in your app. + + To learn more, see `Segment + Builder `__. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name for this segment. If unspecified, segments are + named "Segment". This name defines string value returned by + the ``segment`` dimension. The ``segment`` dimension + prefixes segment names by the 1-based index number of the + segment in the request (for example "1. Segment", "2. + Segment", etc.). + user_segment (google.analytics.data_v1alpha.types.UserSegment): + User segments are subsets of users who + engaged with your site or app. + + This field is a member of `oneof`_ ``one_segment_scope``. + session_segment (google.analytics.data_v1alpha.types.SessionSegment): + Session segments are subsets of the sessions + that occurred on your site or app. + + This field is a member of `oneof`_ ``one_segment_scope``. + event_segment (google.analytics.data_v1alpha.types.EventSegment): + Event segments are subsets of events that + were triggered on your site or app. + + This field is a member of `oneof`_ ``one_segment_scope``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + user_segment: 'UserSegment' = proto.Field( + proto.MESSAGE, + number=2, + oneof='one_segment_scope', + message='UserSegment', + ) + session_segment: 'SessionSegment' = proto.Field( + proto.MESSAGE, + number=3, + oneof='one_segment_scope', + message='SessionSegment', + ) + event_segment: 'EventSegment' = proto.Field( + proto.MESSAGE, + number=4, + oneof='one_segment_scope', + message='EventSegment', + ) + + +class SegmentFilterExpression(proto.Message): + r"""Expresses combinations of segment filters. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + and_group (google.analytics.data_v1alpha.types.SegmentFilterExpressionList): + The SegmentFilterExpression in ``andGroup`` have an AND + relationship. + + This field is a member of `oneof`_ ``expr``. + or_group (google.analytics.data_v1alpha.types.SegmentFilterExpressionList): + The SegmentFilterExpression in ``orGroup`` have an OR + relationship. + + This field is a member of `oneof`_ ``expr``. + not_expression (google.analytics.data_v1alpha.types.SegmentFilterExpression): + The SegmentFilterExpression is NOT of ``notExpression``. + + This field is a member of `oneof`_ ``expr``. + segment_filter (google.analytics.data_v1alpha.types.SegmentFilter): + A primitive segment filter. + + This field is a member of `oneof`_ ``expr``. + segment_event_filter (google.analytics.data_v1alpha.types.SegmentEventFilter): + Creates a filter that matches events of a + single event name. If a parameter filter + expression is specified, only the subset of + events that match both the single event name and + the parameter filter expressions match this + event filter. + + This field is a member of `oneof`_ ``expr``. + """ + + and_group: 'SegmentFilterExpressionList' = proto.Field( + proto.MESSAGE, + number=1, + oneof='expr', + message='SegmentFilterExpressionList', + ) + or_group: 'SegmentFilterExpressionList' = proto.Field( + proto.MESSAGE, + number=2, + oneof='expr', + message='SegmentFilterExpressionList', + ) + not_expression: 'SegmentFilterExpression' = proto.Field( + proto.MESSAGE, + number=3, + oneof='expr', + message='SegmentFilterExpression', + ) + segment_filter: 'SegmentFilter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='expr', + message='SegmentFilter', + ) + segment_event_filter: 'SegmentEventFilter' = proto.Field( + proto.MESSAGE, + number=5, + oneof='expr', + message='SegmentEventFilter', + ) + + +class SegmentFilterExpressionList(proto.Message): + r"""A list of segment filter expressions. + + Attributes: + expressions (MutableSequence[google.analytics.data_v1alpha.types.SegmentFilterExpression]): + The list of segment filter expressions + """ + + expressions: MutableSequence['SegmentFilterExpression'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SegmentFilterExpression', + ) + + +class SegmentFilter(proto.Message): + r"""An expression to filter dimension or metric values. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field_name (str): + The dimension name or metric name. + string_filter (google.analytics.data_v1alpha.types.StringFilter): + Strings related filter. + + This field is a member of `oneof`_ ``one_filter``. + in_list_filter (google.analytics.data_v1alpha.types.InListFilter): + A filter for in list values. + + This field is a member of `oneof`_ ``one_filter``. + numeric_filter (google.analytics.data_v1alpha.types.NumericFilter): + A filter for numeric or date values. + + This field is a member of `oneof`_ ``one_filter``. + between_filter (google.analytics.data_v1alpha.types.BetweenFilter): + A filter for between two values. + + This field is a member of `oneof`_ ``one_filter``. + filter_scoping (google.analytics.data_v1alpha.types.SegmentFilterScoping): + Specifies the scope for the filter. + """ + + field_name: str = proto.Field( + proto.STRING, + number=1, + ) + string_filter: 'StringFilter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='one_filter', + message='StringFilter', + ) + in_list_filter: 'InListFilter' = proto.Field( + proto.MESSAGE, + number=5, + oneof='one_filter', + message='InListFilter', + ) + numeric_filter: 'NumericFilter' = proto.Field( + proto.MESSAGE, + number=6, + oneof='one_filter', + message='NumericFilter', + ) + between_filter: 'BetweenFilter' = proto.Field( + proto.MESSAGE, + number=7, + oneof='one_filter', + message='BetweenFilter', + ) + filter_scoping: 'SegmentFilterScoping' = proto.Field( + proto.MESSAGE, + number=8, + message='SegmentFilterScoping', + ) + + +class SegmentFilterScoping(proto.Message): + r"""Scopings specify how the dimensions & metrics of multiple + events should be considered when evaluating a segment filter. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + at_any_point_in_time (bool): + If ``atAnyPointInTime`` is true, this filter evaluates to + true for all events if it evaluates to true for any event in + the date range of the request. + + This ``atAnyPointInTime`` parameter does not extend the date + range of events in the report. If ``atAnyPointInTime`` is + true, only events within the report's date range are + considered when evaluating this filter. + + This ``atAnyPointInTime`` is only able to be specified if + the criteria scoping is ``ACROSS_ALL_SESSIONS`` and is not + able to be specified in sequences. + + If the criteria scoping is ``ACROSS_ALL_SESSIONS``, + ``atAnyPointInTime`` = false is used if unspecified. + + This field is a member of `oneof`_ ``_at_any_point_in_time``. + """ + + at_any_point_in_time: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + + +class SegmentEventFilter(proto.Message): + r"""Creates a filter that matches events of a single event name. + If a parameter filter expression is specified, only the subset + of events that match both the single event name and the + parameter filter expressions match this event filter. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + event_name (str): + This filter matches events of this single + event name. Event name is required. + + This field is a member of `oneof`_ ``_event_name``. + segment_parameter_filter_expression (google.analytics.data_v1alpha.types.SegmentParameterFilterExpression): + If specified, this filter matches events that + match both the single event name and the + parameter filter expressions. + + Inside the parameter filter expression, only + parameter filters are available. + + This field is a member of `oneof`_ ``_segment_parameter_filter_expression``. + """ + + event_name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + segment_parameter_filter_expression: 'SegmentParameterFilterExpression' = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message='SegmentParameterFilterExpression', + ) + + +class SegmentParameterFilterExpression(proto.Message): + r"""Expresses combinations of segment filter on parameters. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + and_group (google.analytics.data_v1alpha.types.SegmentParameterFilterExpressionList): + The SegmentParameterFilterExpression in ``andGroup`` have an + AND relationship. + + This field is a member of `oneof`_ ``expr``. + or_group (google.analytics.data_v1alpha.types.SegmentParameterFilterExpressionList): + The SegmentParameterFilterExpression in ``orGroup`` have an + OR relationship. + + This field is a member of `oneof`_ ``expr``. + not_expression (google.analytics.data_v1alpha.types.SegmentParameterFilterExpression): + The SegmentParameterFilterExpression is NOT of + ``notExpression``. + + This field is a member of `oneof`_ ``expr``. + segment_parameter_filter (google.analytics.data_v1alpha.types.SegmentParameterFilter): + A primitive segment parameter filter. + + This field is a member of `oneof`_ ``expr``. + """ + + and_group: 'SegmentParameterFilterExpressionList' = proto.Field( + proto.MESSAGE, + number=1, + oneof='expr', + message='SegmentParameterFilterExpressionList', + ) + or_group: 'SegmentParameterFilterExpressionList' = proto.Field( + proto.MESSAGE, + number=2, + oneof='expr', + message='SegmentParameterFilterExpressionList', + ) + not_expression: 'SegmentParameterFilterExpression' = proto.Field( + proto.MESSAGE, + number=3, + oneof='expr', + message='SegmentParameterFilterExpression', + ) + segment_parameter_filter: 'SegmentParameterFilter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='expr', + message='SegmentParameterFilter', + ) + + +class SegmentParameterFilterExpressionList(proto.Message): + r"""A list of segment parameter filter expressions. + + Attributes: + expressions (MutableSequence[google.analytics.data_v1alpha.types.SegmentParameterFilterExpression]): + The list of segment parameter filter + expressions. + """ + + expressions: MutableSequence['SegmentParameterFilterExpression'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SegmentParameterFilterExpression', + ) + + +class SegmentParameterFilter(proto.Message): + r"""An expression to filter parameter values in a segment. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + event_parameter_name (str): + This filter will be evaluated on the specified event + parameter. Event parameters are logged as parameters of the + event. Event parameters include fields like + "firebase_screen" & "currency". + + Event parameters can only be used in segments & funnels and + can only be used in a descendent filter from an EventFilter. + In a descendent filter from an EventFilter either event or + item parameters should be used. + + This field is a member of `oneof`_ ``one_parameter``. + item_parameter_name (str): + This filter will be evaluated on the specified item + parameter. Item parameters are logged as parameters in the + item array. Item parameters include fields like "item_name" + & "item_category". + + Item parameters can only be used in segments & funnels and + can only be used in a descendent filter from an EventFilter. + In a descendent filter from an EventFilter either event or + item parameters should be used. + + Item parameters are only available in ecommerce events. To + learn more about ecommerce events, see the [Measure + ecommerce] + (https://developers.google.com/analytics/devguides/collection/ga4/ecommerce) + guide. + + This field is a member of `oneof`_ ``one_parameter``. + string_filter (google.analytics.data_v1alpha.types.StringFilter): + Strings related filter. + + This field is a member of `oneof`_ ``one_filter``. + in_list_filter (google.analytics.data_v1alpha.types.InListFilter): + A filter for in list values. + + This field is a member of `oneof`_ ``one_filter``. + numeric_filter (google.analytics.data_v1alpha.types.NumericFilter): + A filter for numeric or date values. + + This field is a member of `oneof`_ ``one_filter``. + between_filter (google.analytics.data_v1alpha.types.BetweenFilter): + A filter for between two values. + + This field is a member of `oneof`_ ``one_filter``. + filter_scoping (google.analytics.data_v1alpha.types.SegmentParameterFilterScoping): + Specifies the scope for the filter. + """ + + event_parameter_name: str = proto.Field( + proto.STRING, + number=1, + oneof='one_parameter', + ) + item_parameter_name: str = proto.Field( + proto.STRING, + number=2, + oneof='one_parameter', + ) + string_filter: 'StringFilter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='one_filter', + message='StringFilter', + ) + in_list_filter: 'InListFilter' = proto.Field( + proto.MESSAGE, + number=5, + oneof='one_filter', + message='InListFilter', + ) + numeric_filter: 'NumericFilter' = proto.Field( + proto.MESSAGE, + number=6, + oneof='one_filter', + message='NumericFilter', + ) + between_filter: 'BetweenFilter' = proto.Field( + proto.MESSAGE, + number=7, + oneof='one_filter', + message='BetweenFilter', + ) + filter_scoping: 'SegmentParameterFilterScoping' = proto.Field( + proto.MESSAGE, + number=8, + message='SegmentParameterFilterScoping', + ) + + +class SegmentParameterFilterScoping(proto.Message): + r"""Scopings specify how multiple events should be considered + when evaluating a segment parameter filter. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + in_any_n_day_period (int): + Accumulates the parameter over the specified period of days + before applying the filter. Only supported if criteria + scoping is ``ACROSS_ALL_SESSIONS`` or + ``WITHIN_SAME_SESSION``. Only supported if the parameter is + ``event_count``. + + For example if ``inAnyNDayPeriod`` is 3, the event_name is + "purchase", the event parameter is "event_count", and the + Filter's criteria is greater than 5, this filter will + accumulate the event count of purchase events over every 3 + consecutive day period in the report's date range; a user + will pass this Filter's criteria to be included in this + segment if their count of purchase events exceeds 5 in any 3 + consecutive day period. For example, the periods 2021-11-01 + to 2021-11-03, 2021-11-02 to 2021-11-04, 2021-11-03 to + 2021-11-05, and etc. will be considered. + + The date range is not extended for the purpose of having a + full N day window near the start of the date range. For + example if a report is for 2021-11-01 to 2021-11-10 and + ``inAnyNDayPeriod`` = 3, the first two day period will be + effectively shortened because no event data outside the + report's date range will be read. For example, the first + four periods will effectively be: 2021-11-01 to 2021-11-01, + 2021-11-01 to 2021-11-02, 2021-11-01 to 2021-11-03, and + 2021-11-02 to 2021-11-04. + + ``inAnyNDayPeriod`` is optional. If not specified, the + ``segmentParameterFilter`` is applied to each event + individually. + + This field is a member of `oneof`_ ``_in_any_n_day_period``. + """ + + in_any_n_day_period: int = proto.Field( + proto.INT64, + number=1, + optional=True, + ) + + +class FunnelFilterExpression(proto.Message): + r"""Expresses combinations of funnel filters. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + and_group (google.analytics.data_v1alpha.types.FunnelFilterExpressionList): + The FunnelFilterExpression in ``andGroup`` have an AND + relationship. + + This field is a member of `oneof`_ ``expr``. + or_group (google.analytics.data_v1alpha.types.FunnelFilterExpressionList): + The FunnelFilterExpression in ``orGroup`` have an OR + relationship. + + This field is a member of `oneof`_ ``expr``. + not_expression (google.analytics.data_v1alpha.types.FunnelFilterExpression): + The FunnelFilterExpression is NOT of ``notExpression``. + + This field is a member of `oneof`_ ``expr``. + funnel_field_filter (google.analytics.data_v1alpha.types.FunnelFieldFilter): + A funnel filter for a dimension or metric. + + This field is a member of `oneof`_ ``expr``. + funnel_event_filter (google.analytics.data_v1alpha.types.FunnelEventFilter): + Creates a filter that matches events of a + single event name. If a parameter filter + expression is specified, only the subset of + events that match both the single event name and + the parameter filter expressions match this + event filter. + + This field is a member of `oneof`_ ``expr``. + """ + + and_group: 'FunnelFilterExpressionList' = proto.Field( + proto.MESSAGE, + number=1, + oneof='expr', + message='FunnelFilterExpressionList', + ) + or_group: 'FunnelFilterExpressionList' = proto.Field( + proto.MESSAGE, + number=2, + oneof='expr', + message='FunnelFilterExpressionList', + ) + not_expression: 'FunnelFilterExpression' = proto.Field( + proto.MESSAGE, + number=3, + oneof='expr', + message='FunnelFilterExpression', + ) + funnel_field_filter: 'FunnelFieldFilter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='expr', + message='FunnelFieldFilter', + ) + funnel_event_filter: 'FunnelEventFilter' = proto.Field( + proto.MESSAGE, + number=5, + oneof='expr', + message='FunnelEventFilter', + ) + + +class FunnelFilterExpressionList(proto.Message): + r"""A list of funnel filter expressions. + + Attributes: + expressions (MutableSequence[google.analytics.data_v1alpha.types.FunnelFilterExpression]): + The list of funnel filter expressions. + """ + + expressions: MutableSequence['FunnelFilterExpression'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FunnelFilterExpression', + ) + + +class FunnelFieldFilter(proto.Message): + r"""An expression to filter dimension or metric values. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field_name (str): + The dimension name or metric name. + string_filter (google.analytics.data_v1alpha.types.StringFilter): + Strings related filter. + + This field is a member of `oneof`_ ``one_filter``. + in_list_filter (google.analytics.data_v1alpha.types.InListFilter): + A filter for in list values. + + This field is a member of `oneof`_ ``one_filter``. + numeric_filter (google.analytics.data_v1alpha.types.NumericFilter): + A filter for numeric or date values. + + This field is a member of `oneof`_ ``one_filter``. + between_filter (google.analytics.data_v1alpha.types.BetweenFilter): + A filter for between two values. + + This field is a member of `oneof`_ ``one_filter``. + """ + + field_name: str = proto.Field( + proto.STRING, + number=1, + ) + string_filter: 'StringFilter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='one_filter', + message='StringFilter', + ) + in_list_filter: 'InListFilter' = proto.Field( + proto.MESSAGE, + number=5, + oneof='one_filter', + message='InListFilter', + ) + numeric_filter: 'NumericFilter' = proto.Field( + proto.MESSAGE, + number=6, + oneof='one_filter', + message='NumericFilter', + ) + between_filter: 'BetweenFilter' = proto.Field( + proto.MESSAGE, + number=7, + oneof='one_filter', + message='BetweenFilter', + ) + + +class FunnelEventFilter(proto.Message): + r"""Creates a filter that matches events of a single event name. + If a parameter filter expression is specified, only the subset + of events that match both the single event name and the + parameter filter expressions match this event filter. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + event_name (str): + This filter matches events of this single + event name. Event name is required. + + This field is a member of `oneof`_ ``_event_name``. + funnel_parameter_filter_expression (google.analytics.data_v1alpha.types.FunnelParameterFilterExpression): + If specified, this filter matches events that + match both the single event name and the + parameter filter expressions. + + Inside the parameter filter expression, only + parameter filters are available. + + This field is a member of `oneof`_ ``_funnel_parameter_filter_expression``. + """ + + event_name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + funnel_parameter_filter_expression: 'FunnelParameterFilterExpression' = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message='FunnelParameterFilterExpression', + ) + + +class FunnelParameterFilterExpression(proto.Message): + r"""Expresses combinations of funnel filters on parameters. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + and_group (google.analytics.data_v1alpha.types.FunnelParameterFilterExpressionList): + The FunnelParameterFilterExpression in ``andGroup`` have an + AND relationship. + + This field is a member of `oneof`_ ``expr``. + or_group (google.analytics.data_v1alpha.types.FunnelParameterFilterExpressionList): + The FunnelParameterFilterExpression in ``orGroup`` have an + OR relationship. + + This field is a member of `oneof`_ ``expr``. + not_expression (google.analytics.data_v1alpha.types.FunnelParameterFilterExpression): + The FunnelParameterFilterExpression is NOT of + ``notExpression``. + + This field is a member of `oneof`_ ``expr``. + funnel_parameter_filter (google.analytics.data_v1alpha.types.FunnelParameterFilter): + A primitive funnel parameter filter. + + This field is a member of `oneof`_ ``expr``. + """ + + and_group: 'FunnelParameterFilterExpressionList' = proto.Field( + proto.MESSAGE, + number=1, + oneof='expr', + message='FunnelParameterFilterExpressionList', + ) + or_group: 'FunnelParameterFilterExpressionList' = proto.Field( + proto.MESSAGE, + number=2, + oneof='expr', + message='FunnelParameterFilterExpressionList', + ) + not_expression: 'FunnelParameterFilterExpression' = proto.Field( + proto.MESSAGE, + number=3, + oneof='expr', + message='FunnelParameterFilterExpression', + ) + funnel_parameter_filter: 'FunnelParameterFilter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='expr', + message='FunnelParameterFilter', + ) + + +class FunnelParameterFilterExpressionList(proto.Message): + r"""A list of funnel parameter filter expressions. + + Attributes: + expressions (MutableSequence[google.analytics.data_v1alpha.types.FunnelParameterFilterExpression]): + The list of funnel parameter filter + expressions. + """ + + expressions: MutableSequence['FunnelParameterFilterExpression'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FunnelParameterFilterExpression', + ) + + +class FunnelParameterFilter(proto.Message): + r"""An expression to filter parameter values in a funnel. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + event_parameter_name (str): + This filter will be evaluated on the specified event + parameter. Event parameters are logged as parameters of the + event. Event parameters include fields like + "firebase_screen" & "currency". + + Event parameters can only be used in segments & funnels and + can only be used in a descendent filter from an EventFilter. + In a descendent filter from an EventFilter either event or + item parameters should be used. + + This field is a member of `oneof`_ ``one_parameter``. + item_parameter_name (str): + This filter will be evaluated on the specified item + parameter. Item parameters are logged as parameters in the + item array. Item parameters include fields like "item_name" + & "item_category". + + Item parameters can only be used in segments & funnels and + can only be used in a descendent filter from an EventFilter. + In a descendent filter from an EventFilter either event or + item parameters should be used. + + Item parameters are only available in ecommerce events. To + learn more about ecommerce events, see the [Measure + ecommerce] + (https://developers.google.com/analytics/devguides/collection/ga4/ecommerce) + guide. + + This field is a member of `oneof`_ ``one_parameter``. + string_filter (google.analytics.data_v1alpha.types.StringFilter): + Strings related filter. + + This field is a member of `oneof`_ ``one_filter``. + in_list_filter (google.analytics.data_v1alpha.types.InListFilter): + A filter for in list values. + + This field is a member of `oneof`_ ``one_filter``. + numeric_filter (google.analytics.data_v1alpha.types.NumericFilter): + A filter for numeric or date values. + + This field is a member of `oneof`_ ``one_filter``. + between_filter (google.analytics.data_v1alpha.types.BetweenFilter): + A filter for between two values. + + This field is a member of `oneof`_ ``one_filter``. + """ + + event_parameter_name: str = proto.Field( + proto.STRING, + number=1, + oneof='one_parameter', + ) + item_parameter_name: str = proto.Field( + proto.STRING, + number=2, + oneof='one_parameter', + ) + string_filter: 'StringFilter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='one_filter', + message='StringFilter', + ) + in_list_filter: 'InListFilter' = proto.Field( + proto.MESSAGE, + number=5, + oneof='one_filter', + message='InListFilter', + ) + numeric_filter: 'NumericFilter' = proto.Field( + proto.MESSAGE, + number=6, + oneof='one_filter', + message='NumericFilter', + ) + between_filter: 'BetweenFilter' = proto.Field( + proto.MESSAGE, + number=7, + oneof='one_filter', + message='BetweenFilter', + ) + + +class FunnelResponseMetadata(proto.Message): + r"""The funnel report's response metadata carries additional + information about the funnel report. + + Attributes: + sampling_metadatas (MutableSequence[google.analytics.data_v1alpha.types.SamplingMetadata]): + If funnel report results are + `sampled `__, + this describes what percentage of events were used in this + funnel report. One ``samplingMetadatas`` is populated for + each date range. Each ``samplingMetadatas`` corresponds to a + date range in the order that date ranges were specified in + the request. + + However if the results are not sampled, this field will not + be defined. + """ + + sampling_metadatas: MutableSequence['SamplingMetadata'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SamplingMetadata', + ) + + +class SamplingMetadata(proto.Message): + r"""If funnel report results are + `sampled `__, + this metadata describes what percentage of events were used in this + funnel report for a date range. Sampling is the practice of + analyzing a subset of all data in order to uncover the meaningful + information in the larger data set. + + Attributes: + samples_read_count (int): + The total number of events read in this + sampled report for a date range. This is the + size of the subset this property's data that was + analyzed in this funnel report. + sampling_space_size (int): + The total number of events present in this property's data + that could have been analyzed in this funnel report for a + date range. Sampling uncovers the meaningful information + about the larger data set, and this is the size of the + larger data set. + + To calculate the percentage of available data that was used + in this funnel report, compute + ``samplesReadCount/samplingSpaceSize``. + """ + + samples_read_count: int = proto.Field( + proto.INT64, + number=1, + ) + sampling_space_size: int = proto.Field( + proto.INT64, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/mypy.ini b/owl-bot-staging/google-analytics-data/v1alpha/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-analytics-data/v1alpha/noxfile.py b/owl-bot-staging/google-analytics-data/v1alpha/noxfile.py new file mode 100644 index 000000000000..733469866995 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/noxfile.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-analytics-data' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.13" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/analytics/data_v1alpha/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/analytics/data_v1alpha/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_audience_list_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_audience_list_async.py new file mode 100644 index 000000000000..dac05686dbaa --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_audience_list_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAudienceList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateAudienceList_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_create_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + audience_list = data_v1alpha.AudienceList() + audience_list.audience = "audience_value" + + request = data_v1alpha.CreateAudienceListRequest( + parent="parent_value", + audience_list=audience_list, + ) + + # Make the request + operation = client.create_audience_list(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateAudienceList_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_audience_list_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_audience_list_sync.py new file mode 100644 index 000000000000..4b8c434b21e3 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_audience_list_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAudienceList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateAudienceList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_create_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + audience_list = data_v1alpha.AudienceList() + audience_list.audience = "audience_value" + + request = data_v1alpha.CreateAudienceListRequest( + parent="parent_value", + audience_list=audience_list, + ) + + # Make the request + operation = client.create_audience_list(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateAudienceList_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_recurring_audience_list_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_recurring_audience_list_async.py new file mode 100644 index 000000000000..b01e9568892b --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_recurring_audience_list_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateRecurringAudienceList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateRecurringAudienceList_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_create_recurring_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + recurring_audience_list = data_v1alpha.RecurringAudienceList() + recurring_audience_list.audience = "audience_value" + + request = data_v1alpha.CreateRecurringAudienceListRequest( + parent="parent_value", + recurring_audience_list=recurring_audience_list, + ) + + # Make the request + response = await client.create_recurring_audience_list(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateRecurringAudienceList_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_recurring_audience_list_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_recurring_audience_list_sync.py new file mode 100644 index 000000000000..5fb91f87e74a --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_recurring_audience_list_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateRecurringAudienceList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateRecurringAudienceList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_create_recurring_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + recurring_audience_list = data_v1alpha.RecurringAudienceList() + recurring_audience_list.audience = "audience_value" + + request = data_v1alpha.CreateRecurringAudienceListRequest( + parent="parent_value", + recurring_audience_list=recurring_audience_list, + ) + + # Make the request + response = client.create_recurring_audience_list(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateRecurringAudienceList_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_report_task_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_report_task_async.py new file mode 100644 index 000000000000..b2d9ee83426e --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_report_task_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReportTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateReportTask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_create_report_task(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.CreateReportTaskRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_report_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateReportTask_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_report_task_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_report_task_sync.py new file mode 100644 index 000000000000..cf96f9fc7285 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_create_report_task_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReportTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateReportTask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_create_report_task(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.CreateReportTaskRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_report_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateReportTask_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_async.py new file mode 100644 index 000000000000..22ad44e20ff8 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAudienceList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetAudienceList_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_get_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetAudienceListRequest( + name="name_value", + ) + + # Make the request + response = await client.get_audience_list(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetAudienceList_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_sync.py new file mode 100644 index 000000000000..d2cbdebb24b6 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAudienceList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetAudienceList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_get_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetAudienceListRequest( + name="name_value", + ) + + # Make the request + response = client.get_audience_list(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetAudienceList_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py new file mode 100644 index 000000000000..cfa47528bf6b --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPropertyQuotasSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = await client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py new file mode 100644 index 000000000000..964edacdbb5d --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPropertyQuotasSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_recurring_audience_list_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_recurring_audience_list_async.py new file mode 100644 index 000000000000..b88fc6fbc262 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_recurring_audience_list_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRecurringAudienceList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetRecurringAudienceList_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_get_recurring_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetRecurringAudienceListRequest( + name="name_value", + ) + + # Make the request + response = await client.get_recurring_audience_list(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetRecurringAudienceList_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_recurring_audience_list_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_recurring_audience_list_sync.py new file mode 100644 index 000000000000..c1f3d56e8e3d --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_recurring_audience_list_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRecurringAudienceList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetRecurringAudienceList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_get_recurring_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetRecurringAudienceListRequest( + name="name_value", + ) + + # Make the request + response = client.get_recurring_audience_list(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetRecurringAudienceList_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_report_task_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_report_task_async.py new file mode 100644 index 000000000000..70cbb41955f2 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_report_task_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetReportTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetReportTask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_get_report_task(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetReportTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_report_task(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetReportTask_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_report_task_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_report_task_sync.py new file mode 100644 index 000000000000..bf83d4ba6834 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_report_task_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetReportTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetReportTask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_get_report_task(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetReportTaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_report_task(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetReportTask_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_audience_lists_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_audience_lists_async.py new file mode 100644 index 000000000000..4f46700c59df --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_audience_lists_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAudienceLists +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListAudienceLists_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_list_audience_lists(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.ListAudienceListsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_audience_lists(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListAudienceLists_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_audience_lists_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_audience_lists_sync.py new file mode 100644 index 000000000000..24d5bbd47fa1 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_audience_lists_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAudienceLists +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListAudienceLists_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_list_audience_lists(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.ListAudienceListsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_audience_lists(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListAudienceLists_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_recurring_audience_lists_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_recurring_audience_lists_async.py new file mode 100644 index 000000000000..d254eb25a1f5 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_recurring_audience_lists_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRecurringAudienceLists +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListRecurringAudienceLists_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_list_recurring_audience_lists(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.ListRecurringAudienceListsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_recurring_audience_lists(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListRecurringAudienceLists_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_recurring_audience_lists_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_recurring_audience_lists_sync.py new file mode 100644 index 000000000000..cfb668741c25 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_recurring_audience_lists_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRecurringAudienceLists +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListRecurringAudienceLists_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_list_recurring_audience_lists(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.ListRecurringAudienceListsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_recurring_audience_lists(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListRecurringAudienceLists_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_report_tasks_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_report_tasks_async.py new file mode 100644 index 000000000000..052ad55d7c21 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_report_tasks_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListReportTasks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListReportTasks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_list_report_tasks(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.ListReportTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_report_tasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListReportTasks_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_report_tasks_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_report_tasks_sync.py new file mode 100644 index 000000000000..39b03f6132ce --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_list_report_tasks_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListReportTasks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListReportTasks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_list_report_tasks(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.ListReportTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_report_tasks(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListReportTasks_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_audience_list_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_audience_list_async.py new file mode 100644 index 000000000000..4bac4f04a176 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_audience_list_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryAudienceList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_QueryAudienceList_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_query_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.QueryAudienceListRequest( + name="name_value", + ) + + # Make the request + response = await client.query_audience_list(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_QueryAudienceList_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_audience_list_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_audience_list_sync.py new file mode 100644 index 000000000000..2bce28c2dcfc --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_audience_list_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryAudienceList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_QueryAudienceList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_query_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.QueryAudienceListRequest( + name="name_value", + ) + + # Make the request + response = client.query_audience_list(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_QueryAudienceList_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_report_task_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_report_task_async.py new file mode 100644 index 000000000000..eadba9513513 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_report_task_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryReportTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_QueryReportTask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_query_report_task(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.QueryReportTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.query_report_task(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_QueryReportTask_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_report_task_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_report_task_sync.py new file mode 100644 index 000000000000..4615fd787614 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_query_report_task_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryReportTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_QueryReportTask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_query_report_task(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.QueryReportTaskRequest( + name="name_value", + ) + + # Make the request + response = client.query_report_task(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_QueryReportTask_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_funnel_report_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_funnel_report_async.py new file mode 100644 index 000000000000..e1444685b594 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_funnel_report_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunFunnelReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_RunFunnelReport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_run_funnel_report(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.RunFunnelReportRequest( + ) + + # Make the request + response = await client.run_funnel_report(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_RunFunnelReport_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_funnel_report_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_funnel_report_sync.py new file mode 100644 index 000000000000..a6ab34f7eb3b --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_funnel_report_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunFunnelReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_RunFunnelReport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_run_funnel_report(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.RunFunnelReportRequest( + ) + + # Make the request + response = client.run_funnel_report(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_RunFunnelReport_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_async.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_async.py new file mode 100644 index 000000000000..f7f08496fcff --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SheetExportAudienceList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_SheetExportAudienceList_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_sheet_export_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.SheetExportAudienceListRequest( + name="name_value", + ) + + # Make the request + response = await client.sheet_export_audience_list(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_SheetExportAudienceList_async] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_sync.py b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_sync.py new file mode 100644 index 000000000000..f3132807c83f --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SheetExportAudienceList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_SheetExportAudienceList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_sheet_export_audience_list(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.SheetExportAudienceListRequest( + name="name_value", + ) + + # Make the request + response = client.sheet_export_audience_list(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_SheetExportAudienceList_sync] diff --git a/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json new file mode 100644 index 000000000000..15f8d0d6e6d8 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json @@ -0,0 +1,2285 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.analytics.data.v1alpha", + "version": "v1alpha" + } + ], + "language": "PYTHON", + "name": "google-analytics-data", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.create_audience_list", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.CreateAudienceList", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "CreateAudienceList" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.CreateAudienceListRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "audience_list", + "type": "google.analytics.data_v1alpha.types.AudienceList" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_audience_list" + }, + "description": "Sample for CreateAudienceList", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_create_audience_list_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateAudienceList_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_create_audience_list_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.create_audience_list", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.CreateAudienceList", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "CreateAudienceList" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.CreateAudienceListRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "audience_list", + "type": "google.analytics.data_v1alpha.types.AudienceList" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_audience_list" + }, + "description": "Sample for CreateAudienceList", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_create_audience_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateAudienceList_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_create_audience_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.create_recurring_audience_list", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.CreateRecurringAudienceList", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "CreateRecurringAudienceList" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.CreateRecurringAudienceListRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "recurring_audience_list", + "type": "google.analytics.data_v1alpha.types.RecurringAudienceList" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.RecurringAudienceList", + "shortName": "create_recurring_audience_list" + }, + "description": "Sample for CreateRecurringAudienceList", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_create_recurring_audience_list_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateRecurringAudienceList_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_create_recurring_audience_list_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.create_recurring_audience_list", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.CreateRecurringAudienceList", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "CreateRecurringAudienceList" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.CreateRecurringAudienceListRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "recurring_audience_list", + "type": "google.analytics.data_v1alpha.types.RecurringAudienceList" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.RecurringAudienceList", + "shortName": "create_recurring_audience_list" + }, + "description": "Sample for CreateRecurringAudienceList", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_create_recurring_audience_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateRecurringAudienceList_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_create_recurring_audience_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.create_report_task", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.CreateReportTask", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "CreateReportTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.CreateReportTaskRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "report_task", + "type": "google.analytics.data_v1alpha.types.ReportTask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_report_task" + }, + "description": "Sample for CreateReportTask", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_create_report_task_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateReportTask_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_create_report_task_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.create_report_task", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.CreateReportTask", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "CreateReportTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.CreateReportTaskRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "report_task", + "type": "google.analytics.data_v1alpha.types.ReportTask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_report_task" + }, + "description": "Sample for CreateReportTask", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_create_report_task_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_CreateReportTask_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_create_report_task_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.get_audience_list", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetAudienceList", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetAudienceList" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetAudienceListRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.AudienceList", + "shortName": "get_audience_list" + }, + "description": "Sample for GetAudienceList", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetAudienceList_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.get_audience_list", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetAudienceList", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetAudienceList" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetAudienceListRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.AudienceList", + "shortName": "get_audience_list" + }, + "description": "Sample for GetAudienceList", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetAudienceList_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.get_property_quotas_snapshot", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetPropertyQuotasSnapshot", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetPropertyQuotasSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.PropertyQuotasSnapshot", + "shortName": "get_property_quotas_snapshot" + }, + "description": "Sample for GetPropertyQuotasSnapshot", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.get_property_quotas_snapshot", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetPropertyQuotasSnapshot", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetPropertyQuotasSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.PropertyQuotasSnapshot", + "shortName": "get_property_quotas_snapshot" + }, + "description": "Sample for GetPropertyQuotasSnapshot", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.get_recurring_audience_list", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetRecurringAudienceList", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetRecurringAudienceList" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetRecurringAudienceListRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.RecurringAudienceList", + "shortName": "get_recurring_audience_list" + }, + "description": "Sample for GetRecurringAudienceList", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_recurring_audience_list_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetRecurringAudienceList_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_recurring_audience_list_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.get_recurring_audience_list", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetRecurringAudienceList", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetRecurringAudienceList" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetRecurringAudienceListRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.RecurringAudienceList", + "shortName": "get_recurring_audience_list" + }, + "description": "Sample for GetRecurringAudienceList", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_recurring_audience_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetRecurringAudienceList_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_recurring_audience_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.get_report_task", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetReportTask", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetReportTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetReportTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.ReportTask", + "shortName": "get_report_task" + }, + "description": "Sample for GetReportTask", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_report_task_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetReportTask_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_report_task_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.get_report_task", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetReportTask", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetReportTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetReportTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.ReportTask", + "shortName": "get_report_task" + }, + "description": "Sample for GetReportTask", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_report_task_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetReportTask_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_report_task_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.list_audience_lists", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.ListAudienceLists", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "ListAudienceLists" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.ListAudienceListsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.services.alpha_analytics_data.pagers.ListAudienceListsAsyncPager", + "shortName": "list_audience_lists" + }, + "description": "Sample for ListAudienceLists", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_list_audience_lists_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListAudienceLists_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_list_audience_lists_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.list_audience_lists", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.ListAudienceLists", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "ListAudienceLists" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.ListAudienceListsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.services.alpha_analytics_data.pagers.ListAudienceListsPager", + "shortName": "list_audience_lists" + }, + "description": "Sample for ListAudienceLists", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_list_audience_lists_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListAudienceLists_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_list_audience_lists_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.list_recurring_audience_lists", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.ListRecurringAudienceLists", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "ListRecurringAudienceLists" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.ListRecurringAudienceListsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.services.alpha_analytics_data.pagers.ListRecurringAudienceListsAsyncPager", + "shortName": "list_recurring_audience_lists" + }, + "description": "Sample for ListRecurringAudienceLists", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_list_recurring_audience_lists_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListRecurringAudienceLists_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_list_recurring_audience_lists_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.list_recurring_audience_lists", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.ListRecurringAudienceLists", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "ListRecurringAudienceLists" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.ListRecurringAudienceListsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.services.alpha_analytics_data.pagers.ListRecurringAudienceListsPager", + "shortName": "list_recurring_audience_lists" + }, + "description": "Sample for ListRecurringAudienceLists", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_list_recurring_audience_lists_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListRecurringAudienceLists_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_list_recurring_audience_lists_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.list_report_tasks", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.ListReportTasks", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "ListReportTasks" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.ListReportTasksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.services.alpha_analytics_data.pagers.ListReportTasksAsyncPager", + "shortName": "list_report_tasks" + }, + "description": "Sample for ListReportTasks", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_list_report_tasks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListReportTasks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_list_report_tasks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.list_report_tasks", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.ListReportTasks", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "ListReportTasks" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.ListReportTasksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.services.alpha_analytics_data.pagers.ListReportTasksPager", + "shortName": "list_report_tasks" + }, + "description": "Sample for ListReportTasks", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_list_report_tasks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_ListReportTasks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_list_report_tasks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.query_audience_list", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.QueryAudienceList", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "QueryAudienceList" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.QueryAudienceListRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.QueryAudienceListResponse", + "shortName": "query_audience_list" + }, + "description": "Sample for QueryAudienceList", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_query_audience_list_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_QueryAudienceList_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_query_audience_list_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.query_audience_list", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.QueryAudienceList", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "QueryAudienceList" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.QueryAudienceListRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.QueryAudienceListResponse", + "shortName": "query_audience_list" + }, + "description": "Sample for QueryAudienceList", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_query_audience_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_QueryAudienceList_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_query_audience_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.query_report_task", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.QueryReportTask", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "QueryReportTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.QueryReportTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.QueryReportTaskResponse", + "shortName": "query_report_task" + }, + "description": "Sample for QueryReportTask", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_query_report_task_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_QueryReportTask_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_query_report_task_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.query_report_task", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.QueryReportTask", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "QueryReportTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.QueryReportTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.QueryReportTaskResponse", + "shortName": "query_report_task" + }, + "description": "Sample for QueryReportTask", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_query_report_task_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_QueryReportTask_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_query_report_task_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.run_funnel_report", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.RunFunnelReport", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "RunFunnelReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.RunFunnelReportRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.RunFunnelReportResponse", + "shortName": "run_funnel_report" + }, + "description": "Sample for RunFunnelReport", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_run_funnel_report_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_RunFunnelReport_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_run_funnel_report_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.run_funnel_report", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.RunFunnelReport", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "RunFunnelReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.RunFunnelReportRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.RunFunnelReportResponse", + "shortName": "run_funnel_report" + }, + "description": "Sample for RunFunnelReport", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_run_funnel_report_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_RunFunnelReport_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_run_funnel_report_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.sheet_export_audience_list", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.SheetExportAudienceList", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "SheetExportAudienceList" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.SheetExportAudienceListRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.SheetExportAudienceListResponse", + "shortName": "sheet_export_audience_list" + }, + "description": "Sample for SheetExportAudienceList", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_SheetExportAudienceList_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.sheet_export_audience_list", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.SheetExportAudienceList", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "SheetExportAudienceList" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.SheetExportAudienceListRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.SheetExportAudienceListResponse", + "shortName": "sheet_export_audience_list" + }, + "description": "Sample for SheetExportAudienceList", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_SheetExportAudienceList_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-analytics-data/v1alpha/scripts/fixup_data_v1alpha_keywords.py b/owl-bot-staging/google-analytics-data/v1alpha/scripts/fixup_data_v1alpha_keywords.py new file mode 100644 index 000000000000..463b61dcee73 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/scripts/fixup_data_v1alpha_keywords.py @@ -0,0 +1,189 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dataCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_audience_list': ('parent', 'audience_list', ), + 'create_recurring_audience_list': ('parent', 'recurring_audience_list', ), + 'create_report_task': ('parent', 'report_task', ), + 'get_audience_list': ('name', ), + 'get_property_quotas_snapshot': ('name', ), + 'get_recurring_audience_list': ('name', ), + 'get_report_task': ('name', ), + 'list_audience_lists': ('parent', 'page_size', 'page_token', ), + 'list_recurring_audience_lists': ('parent', 'page_size', 'page_token', ), + 'list_report_tasks': ('parent', 'page_size', 'page_token', ), + 'query_audience_list': ('name', 'offset', 'limit', ), + 'query_report_task': ('name', 'offset', 'limit', ), + 'run_funnel_report': ('property', 'date_ranges', 'funnel', 'funnel_breakdown', 'funnel_next_action', 'funnel_visualization_type', 'segments', 'limit', 'dimension_filter', 'return_property_quota', ), + 'sheet_export_audience_list': ('name', 'offset', 'limit', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dataCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the data client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/setup.py b/owl-bot-staging/google-analytics-data/v1alpha/setup.py new file mode 100644 index 000000000000..bf63952e379c --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/setup.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-analytics-data' + + +description = "Google Analytics Data API client library" + +version = None + +with open(os.path.join(package_root, 'google/analytics/data/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.10.txt b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.11.txt b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.12.txt b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.13.txt b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.13.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.7.txt b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.8.txt b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.9.txt b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-analytics-data/v1alpha/tests/__init__.py b/owl-bot-staging/google-analytics-data/v1alpha/tests/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-analytics-data/v1alpha/tests/unit/__init__.py b/owl-bot-staging/google-analytics-data/v1alpha/tests/unit/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-analytics-data/v1alpha/tests/unit/gapic/__init__.py b/owl-bot-staging/google-analytics-data/v1alpha/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-analytics-data/v1alpha/tests/unit/gapic/data_v1alpha/__init__.py b/owl-bot-staging/google-analytics-data/v1alpha/tests/unit/gapic/data_v1alpha/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/tests/unit/gapic/data_v1alpha/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-analytics-data/v1alpha/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py b/owl-bot-staging/google-analytics-data/v1alpha/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py new file mode 100644 index 000000000000..b7ccfbb93f68 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1alpha/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py @@ -0,0 +1,11588 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.analytics.data_v1alpha.services.alpha_analytics_data import AlphaAnalyticsDataAsyncClient +from google.analytics.data_v1alpha.services.alpha_analytics_data import AlphaAnalyticsDataClient +from google.analytics.data_v1alpha.services.alpha_analytics_data import pagers +from google.analytics.data_v1alpha.services.alpha_analytics_data import transports +from google.analytics.data_v1alpha.types import analytics_data_api +from google.analytics.data_v1alpha.types import data +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AlphaAnalyticsDataClient._get_default_mtls_endpoint(None) is None + assert AlphaAnalyticsDataClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert AlphaAnalyticsDataClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert AlphaAnalyticsDataClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert AlphaAnalyticsDataClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert AlphaAnalyticsDataClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert AlphaAnalyticsDataClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert AlphaAnalyticsDataClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert AlphaAnalyticsDataClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + AlphaAnalyticsDataClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert AlphaAnalyticsDataClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert AlphaAnalyticsDataClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert AlphaAnalyticsDataClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + AlphaAnalyticsDataClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert AlphaAnalyticsDataClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert AlphaAnalyticsDataClient._get_client_cert_source(None, False) is None + assert AlphaAnalyticsDataClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert AlphaAnalyticsDataClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert AlphaAnalyticsDataClient._get_client_cert_source(None, True) is mock_default_cert_source + assert AlphaAnalyticsDataClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(AlphaAnalyticsDataClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AlphaAnalyticsDataClient)) +@mock.patch.object(AlphaAnalyticsDataAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AlphaAnalyticsDataAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = AlphaAnalyticsDataClient._DEFAULT_UNIVERSE + default_endpoint = AlphaAnalyticsDataClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = AlphaAnalyticsDataClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert AlphaAnalyticsDataClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert AlphaAnalyticsDataClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AlphaAnalyticsDataClient.DEFAULT_MTLS_ENDPOINT + assert AlphaAnalyticsDataClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert AlphaAnalyticsDataClient._get_api_endpoint(None, None, default_universe, "always") == AlphaAnalyticsDataClient.DEFAULT_MTLS_ENDPOINT + assert AlphaAnalyticsDataClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AlphaAnalyticsDataClient.DEFAULT_MTLS_ENDPOINT + assert AlphaAnalyticsDataClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert AlphaAnalyticsDataClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + AlphaAnalyticsDataClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert AlphaAnalyticsDataClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert AlphaAnalyticsDataClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert AlphaAnalyticsDataClient._get_universe_domain(None, None) == AlphaAnalyticsDataClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + AlphaAnalyticsDataClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (AlphaAnalyticsDataClient, "grpc"), + (AlphaAnalyticsDataAsyncClient, "grpc_asyncio"), + (AlphaAnalyticsDataClient, "rest"), +]) +def test_alpha_analytics_data_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'analyticsdata.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://analyticsdata.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.AlphaAnalyticsDataGrpcTransport, "grpc"), + (transports.AlphaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AlphaAnalyticsDataRestTransport, "rest"), +]) +def test_alpha_analytics_data_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (AlphaAnalyticsDataClient, "grpc"), + (AlphaAnalyticsDataAsyncClient, "grpc_asyncio"), + (AlphaAnalyticsDataClient, "rest"), +]) +def test_alpha_analytics_data_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'analyticsdata.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://analyticsdata.googleapis.com' + ) + + +def test_alpha_analytics_data_client_get_transport_class(): + transport = AlphaAnalyticsDataClient.get_transport_class() + available_transports = [ + transports.AlphaAnalyticsDataGrpcTransport, + transports.AlphaAnalyticsDataRestTransport, + ] + assert transport in available_transports + + transport = AlphaAnalyticsDataClient.get_transport_class("grpc") + assert transport == transports.AlphaAnalyticsDataGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataGrpcTransport, "grpc"), + (AlphaAnalyticsDataAsyncClient, transports.AlphaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio"), + (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataRestTransport, "rest"), +]) +@mock.patch.object(AlphaAnalyticsDataClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AlphaAnalyticsDataClient)) +@mock.patch.object(AlphaAnalyticsDataAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AlphaAnalyticsDataAsyncClient)) +def test_alpha_analytics_data_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AlphaAnalyticsDataClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AlphaAnalyticsDataClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataGrpcTransport, "grpc", "true"), + (AlphaAnalyticsDataAsyncClient, transports.AlphaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataGrpcTransport, "grpc", "false"), + (AlphaAnalyticsDataAsyncClient, transports.AlphaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataRestTransport, "rest", "true"), + (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataRestTransport, "rest", "false"), +]) +@mock.patch.object(AlphaAnalyticsDataClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AlphaAnalyticsDataClient)) +@mock.patch.object(AlphaAnalyticsDataAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AlphaAnalyticsDataAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_alpha_analytics_data_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + AlphaAnalyticsDataClient, AlphaAnalyticsDataAsyncClient +]) +@mock.patch.object(AlphaAnalyticsDataClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlphaAnalyticsDataClient)) +@mock.patch.object(AlphaAnalyticsDataAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlphaAnalyticsDataAsyncClient)) +def test_alpha_analytics_data_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + AlphaAnalyticsDataClient, AlphaAnalyticsDataAsyncClient +]) +@mock.patch.object(AlphaAnalyticsDataClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AlphaAnalyticsDataClient)) +@mock.patch.object(AlphaAnalyticsDataAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AlphaAnalyticsDataAsyncClient)) +def test_alpha_analytics_data_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = AlphaAnalyticsDataClient._DEFAULT_UNIVERSE + default_endpoint = AlphaAnalyticsDataClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = AlphaAnalyticsDataClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataGrpcTransport, "grpc"), + (AlphaAnalyticsDataAsyncClient, transports.AlphaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio"), + (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataRestTransport, "rest"), +]) +def test_alpha_analytics_data_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataGrpcTransport, "grpc", grpc_helpers), + (AlphaAnalyticsDataAsyncClient, transports.AlphaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataRestTransport, "rest", None), +]) +def test_alpha_analytics_data_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_alpha_analytics_data_client_client_options_from_dict(): + with mock.patch('google.analytics.data_v1alpha.services.alpha_analytics_data.transports.AlphaAnalyticsDataGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = AlphaAnalyticsDataClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataGrpcTransport, "grpc", grpc_helpers), + (AlphaAnalyticsDataAsyncClient, transports.AlphaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_alpha_analytics_data_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "analyticsdata.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/analytics', + 'https://www.googleapis.com/auth/analytics.readonly', + 'https://www.googleapis.com/auth/drive', + 'https://www.googleapis.com/auth/drive.file', + 'https://www.googleapis.com/auth/spreadsheets', +), + scopes=None, + default_host="analyticsdata.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.RunFunnelReportRequest, + dict, +]) +def test_run_funnel_report(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_funnel_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RunFunnelReportResponse( + kind='kind_value', + ) + response = client.run_funnel_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.RunFunnelReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunFunnelReportResponse) + assert response.kind == 'kind_value' + + +def test_run_funnel_report_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.RunFunnelReportRequest( + property='property_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_funnel_report), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.run_funnel_report(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.RunFunnelReportRequest( + property='property_value', + ) + +def test_run_funnel_report_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_funnel_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_funnel_report] = mock_rpc + request = {} + client.run_funnel_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_funnel_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_funnel_report_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.run_funnel_report in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.run_funnel_report] = mock_rpc + + request = {} + await client.run_funnel_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.run_funnel_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_funnel_report_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.RunFunnelReportRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_funnel_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RunFunnelReportResponse( + kind='kind_value', + )) + response = await client.run_funnel_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.RunFunnelReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunFunnelReportResponse) + assert response.kind == 'kind_value' + + +@pytest.mark.asyncio +async def test_run_funnel_report_async_from_dict(): + await test_run_funnel_report_async(request_type=dict) + +def test_run_funnel_report_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.RunFunnelReportRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_funnel_report), + '__call__') as call: + call.return_value = analytics_data_api.RunFunnelReportResponse() + client.run_funnel_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_run_funnel_report_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.RunFunnelReportRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_funnel_report), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RunFunnelReportResponse()) + await client.run_funnel_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.CreateAudienceListRequest, + dict, +]) +def test_create_audience_list(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.CreateAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_audience_list_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.CreateAudienceListRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_list), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_audience_list(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.CreateAudienceListRequest( + parent='parent_value', + ) + +def test_create_audience_list_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_audience_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_audience_list] = mock_rpc + request = {} + client.create_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_audience_list_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_audience_list in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_audience_list] = mock_rpc + + request = {} + await client.create_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_audience_list_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.CreateAudienceListRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.CreateAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_audience_list_async_from_dict(): + await test_create_audience_list_async(request_type=dict) + +def test_create_audience_list_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CreateAudienceListRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_list), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_audience_list_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CreateAudienceListRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_list), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_audience_list_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_audience_list( + parent='parent_value', + audience_list=analytics_data_api.AudienceList(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].audience_list + mock_val = analytics_data_api.AudienceList(name='name_value') + assert arg == mock_val + + +def test_create_audience_list_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_audience_list( + analytics_data_api.CreateAudienceListRequest(), + parent='parent_value', + audience_list=analytics_data_api.AudienceList(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_audience_list_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_audience_list( + parent='parent_value', + audience_list=analytics_data_api.AudienceList(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].audience_list + mock_val = analytics_data_api.AudienceList(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_audience_list_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_audience_list( + analytics_data_api.CreateAudienceListRequest(), + parent='parent_value', + audience_list=analytics_data_api.AudienceList(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.QueryAudienceListRequest, + dict, +]) +def test_query_audience_list(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.QueryAudienceListResponse( + row_count=992, + ) + response = client.query_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.QueryAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.QueryAudienceListResponse) + assert response.row_count == 992 + + +def test_query_audience_list_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.QueryAudienceListRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_list), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.query_audience_list(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.QueryAudienceListRequest( + name='name_value', + ) + +def test_query_audience_list_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.query_audience_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.query_audience_list] = mock_rpc + request = {} + client.query_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.query_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_query_audience_list_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.query_audience_list in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.query_audience_list] = mock_rpc + + request = {} + await client.query_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.query_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_query_audience_list_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.QueryAudienceListRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.QueryAudienceListResponse( + row_count=992, + )) + response = await client.query_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.QueryAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.QueryAudienceListResponse) + assert response.row_count == 992 + + +@pytest.mark.asyncio +async def test_query_audience_list_async_from_dict(): + await test_query_audience_list_async(request_type=dict) + +def test_query_audience_list_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.QueryAudienceListRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_list), + '__call__') as call: + call.return_value = analytics_data_api.QueryAudienceListResponse() + client.query_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_query_audience_list_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.QueryAudienceListRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_list), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.QueryAudienceListResponse()) + await client.query_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_query_audience_list_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.QueryAudienceListResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.query_audience_list( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_query_audience_list_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.query_audience_list( + analytics_data_api.QueryAudienceListRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_query_audience_list_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.QueryAudienceListResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.QueryAudienceListResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.query_audience_list( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_query_audience_list_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.query_audience_list( + analytics_data_api.QueryAudienceListRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.SheetExportAudienceListRequest, + dict, +]) +def test_sheet_export_audience_list(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sheet_export_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.SheetExportAudienceListResponse( + spreadsheet_uri='spreadsheet_uri_value', + spreadsheet_id='spreadsheet_id_value', + row_count=992, + ) + response = client.sheet_export_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.SheetExportAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) + assert response.spreadsheet_uri == 'spreadsheet_uri_value' + assert response.spreadsheet_id == 'spreadsheet_id_value' + assert response.row_count == 992 + + +def test_sheet_export_audience_list_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.SheetExportAudienceListRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sheet_export_audience_list), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.sheet_export_audience_list(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.SheetExportAudienceListRequest( + name='name_value', + ) + +def test_sheet_export_audience_list_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.sheet_export_audience_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.sheet_export_audience_list] = mock_rpc + request = {} + client.sheet_export_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.sheet_export_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_sheet_export_audience_list_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.sheet_export_audience_list in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.sheet_export_audience_list] = mock_rpc + + request = {} + await client.sheet_export_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.sheet_export_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_sheet_export_audience_list_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.SheetExportAudienceListRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sheet_export_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.SheetExportAudienceListResponse( + spreadsheet_uri='spreadsheet_uri_value', + spreadsheet_id='spreadsheet_id_value', + row_count=992, + )) + response = await client.sheet_export_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.SheetExportAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) + assert response.spreadsheet_uri == 'spreadsheet_uri_value' + assert response.spreadsheet_id == 'spreadsheet_id_value' + assert response.row_count == 992 + + +@pytest.mark.asyncio +async def test_sheet_export_audience_list_async_from_dict(): + await test_sheet_export_audience_list_async(request_type=dict) + +def test_sheet_export_audience_list_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.SheetExportAudienceListRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sheet_export_audience_list), + '__call__') as call: + call.return_value = analytics_data_api.SheetExportAudienceListResponse() + client.sheet_export_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_sheet_export_audience_list_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.SheetExportAudienceListRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sheet_export_audience_list), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.SheetExportAudienceListResponse()) + await client.sheet_export_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_sheet_export_audience_list_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sheet_export_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.SheetExportAudienceListResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.sheet_export_audience_list( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_sheet_export_audience_list_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.sheet_export_audience_list( + analytics_data_api.SheetExportAudienceListRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_sheet_export_audience_list_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sheet_export_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.SheetExportAudienceListResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.SheetExportAudienceListResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.sheet_export_audience_list( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_sheet_export_audience_list_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.sheet_export_audience_list( + analytics_data_api.SheetExportAudienceListRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.GetAudienceListRequest, + dict, +]) +def test_get_audience_list(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.AudienceList( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + state=analytics_data_api.AudienceList.State.CREATING, + creation_quota_tokens_charged=3070, + row_count=992, + error_message='error_message_value', + percentage_completed=0.2106, + recurring_audience_list='recurring_audience_list_value', + ) + response = client.get_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.GetAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.AudienceList) + assert response.name == 'name_value' + assert response.audience == 'audience_value' + assert response.audience_display_name == 'audience_display_name_value' + assert response.state == analytics_data_api.AudienceList.State.CREATING + assert response.creation_quota_tokens_charged == 3070 + assert response.row_count == 992 + assert response.error_message == 'error_message_value' + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + assert response.recurring_audience_list == 'recurring_audience_list_value' + + +def test_get_audience_list_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.GetAudienceListRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_list), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_audience_list(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.GetAudienceListRequest( + name='name_value', + ) + +def test_get_audience_list_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_audience_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_audience_list] = mock_rpc + request = {} + client.get_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_audience_list_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_audience_list in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_audience_list] = mock_rpc + + request = {} + await client.get_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_audience_list_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.GetAudienceListRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.AudienceList( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + state=analytics_data_api.AudienceList.State.CREATING, + creation_quota_tokens_charged=3070, + row_count=992, + error_message='error_message_value', + percentage_completed=0.2106, + recurring_audience_list='recurring_audience_list_value', + )) + response = await client.get_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.GetAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.AudienceList) + assert response.name == 'name_value' + assert response.audience == 'audience_value' + assert response.audience_display_name == 'audience_display_name_value' + assert response.state == analytics_data_api.AudienceList.State.CREATING + assert response.creation_quota_tokens_charged == 3070 + assert response.row_count == 992 + assert response.error_message == 'error_message_value' + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + assert response.recurring_audience_list == 'recurring_audience_list_value' + + +@pytest.mark.asyncio +async def test_get_audience_list_async_from_dict(): + await test_get_audience_list_async(request_type=dict) + +def test_get_audience_list_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetAudienceListRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_list), + '__call__') as call: + call.return_value = analytics_data_api.AudienceList() + client.get_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_audience_list_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetAudienceListRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_list), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.AudienceList()) + await client.get_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_audience_list_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.AudienceList() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_audience_list( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_audience_list_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_audience_list( + analytics_data_api.GetAudienceListRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_audience_list_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.AudienceList() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.AudienceList()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_audience_list( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_audience_list_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_audience_list( + analytics_data_api.GetAudienceListRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.ListAudienceListsRequest, + dict, +]) +def test_list_audience_lists(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListAudienceListsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListAudienceListsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAudienceListsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_audience_lists_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.ListAudienceListsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_audience_lists(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListAudienceListsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_audience_lists_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_audience_lists in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_audience_lists] = mock_rpc + request = {} + client.list_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_audience_lists(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_audience_lists_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_audience_lists in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_audience_lists] = mock_rpc + + request = {} + await client.list_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_audience_lists(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_audience_lists_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.ListAudienceListsRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListAudienceListsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListAudienceListsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAudienceListsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_audience_lists_async_from_dict(): + await test_list_audience_lists_async(request_type=dict) + +def test_list_audience_lists_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListAudienceListsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__') as call: + call.return_value = analytics_data_api.ListAudienceListsResponse() + client.list_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_audience_lists_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListAudienceListsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListAudienceListsResponse()) + await client.list_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_audience_lists_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListAudienceListsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_audience_lists( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_audience_lists_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_audience_lists( + analytics_data_api.ListAudienceListsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_audience_lists_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListAudienceListsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListAudienceListsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_audience_lists( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_audience_lists_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_audience_lists( + analytics_data_api.ListAudienceListsRequest(), + parent='parent_value', + ) + + +def test_list_audience_lists_pager(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + next_page_token='abc', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[], + next_page_token='def', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_audience_lists(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.AudienceList) + for i in results) +def test_list_audience_lists_pages(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + next_page_token='abc', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[], + next_page_token='def', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + ), + RuntimeError, + ) + pages = list(client.list_audience_lists(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_audience_lists_async_pager(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + next_page_token='abc', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[], + next_page_token='def', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_audience_lists(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analytics_data_api.AudienceList) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_audience_lists_async_pages(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + next_page_token='abc', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[], + next_page_token='def', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_audience_lists(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.CreateRecurringAudienceListRequest, + dict, +]) +def test_create_recurring_audience_list(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RecurringAudienceList( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + active_days_remaining=2213, + audience_lists=['audience_lists_value'], + ) + response = client.create_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.CreateRecurringAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == 'name_value' + assert response.audience == 'audience_value' + assert response.audience_display_name == 'audience_display_name_value' + assert response.active_days_remaining == 2213 + assert response.audience_lists == ['audience_lists_value'] + + +def test_create_recurring_audience_list_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.CreateRecurringAudienceListRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_recurring_audience_list(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.CreateRecurringAudienceListRequest( + parent='parent_value', + ) + +def test_create_recurring_audience_list_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_recurring_audience_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_recurring_audience_list] = mock_rpc + request = {} + client.create_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_recurring_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_recurring_audience_list_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_recurring_audience_list in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_recurring_audience_list] = mock_rpc + + request = {} + await client.create_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_recurring_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_recurring_audience_list_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.CreateRecurringAudienceListRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RecurringAudienceList( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + active_days_remaining=2213, + audience_lists=['audience_lists_value'], + )) + response = await client.create_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.CreateRecurringAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == 'name_value' + assert response.audience == 'audience_value' + assert response.audience_display_name == 'audience_display_name_value' + assert response.active_days_remaining == 2213 + assert response.audience_lists == ['audience_lists_value'] + + +@pytest.mark.asyncio +async def test_create_recurring_audience_list_async_from_dict(): + await test_create_recurring_audience_list_async(request_type=dict) + +def test_create_recurring_audience_list_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CreateRecurringAudienceListRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), + '__call__') as call: + call.return_value = analytics_data_api.RecurringAudienceList() + client.create_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_recurring_audience_list_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CreateRecurringAudienceListRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RecurringAudienceList()) + await client.create_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_recurring_audience_list_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RecurringAudienceList() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_recurring_audience_list( + parent='parent_value', + recurring_audience_list=analytics_data_api.RecurringAudienceList(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].recurring_audience_list + mock_val = analytics_data_api.RecurringAudienceList(name='name_value') + assert arg == mock_val + + +def test_create_recurring_audience_list_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_recurring_audience_list( + analytics_data_api.CreateRecurringAudienceListRequest(), + parent='parent_value', + recurring_audience_list=analytics_data_api.RecurringAudienceList(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_recurring_audience_list_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RecurringAudienceList() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RecurringAudienceList()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_recurring_audience_list( + parent='parent_value', + recurring_audience_list=analytics_data_api.RecurringAudienceList(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].recurring_audience_list + mock_val = analytics_data_api.RecurringAudienceList(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_recurring_audience_list_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_recurring_audience_list( + analytics_data_api.CreateRecurringAudienceListRequest(), + parent='parent_value', + recurring_audience_list=analytics_data_api.RecurringAudienceList(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.GetRecurringAudienceListRequest, + dict, +]) +def test_get_recurring_audience_list(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_recurring_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RecurringAudienceList( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + active_days_remaining=2213, + audience_lists=['audience_lists_value'], + ) + response = client.get_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.GetRecurringAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == 'name_value' + assert response.audience == 'audience_value' + assert response.audience_display_name == 'audience_display_name_value' + assert response.active_days_remaining == 2213 + assert response.audience_lists == ['audience_lists_value'] + + +def test_get_recurring_audience_list_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.GetRecurringAudienceListRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_recurring_audience_list), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_recurring_audience_list(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.GetRecurringAudienceListRequest( + name='name_value', + ) + +def test_get_recurring_audience_list_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_recurring_audience_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_recurring_audience_list] = mock_rpc + request = {} + client.get_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_recurring_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_recurring_audience_list_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_recurring_audience_list in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_recurring_audience_list] = mock_rpc + + request = {} + await client.get_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_recurring_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_recurring_audience_list_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.GetRecurringAudienceListRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_recurring_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RecurringAudienceList( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + active_days_remaining=2213, + audience_lists=['audience_lists_value'], + )) + response = await client.get_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.GetRecurringAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == 'name_value' + assert response.audience == 'audience_value' + assert response.audience_display_name == 'audience_display_name_value' + assert response.active_days_remaining == 2213 + assert response.audience_lists == ['audience_lists_value'] + + +@pytest.mark.asyncio +async def test_get_recurring_audience_list_async_from_dict(): + await test_get_recurring_audience_list_async(request_type=dict) + +def test_get_recurring_audience_list_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetRecurringAudienceListRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_recurring_audience_list), + '__call__') as call: + call.return_value = analytics_data_api.RecurringAudienceList() + client.get_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_recurring_audience_list_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetRecurringAudienceListRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_recurring_audience_list), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RecurringAudienceList()) + await client.get_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_recurring_audience_list_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_recurring_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RecurringAudienceList() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_recurring_audience_list( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_recurring_audience_list_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_recurring_audience_list( + analytics_data_api.GetRecurringAudienceListRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_recurring_audience_list_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_recurring_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RecurringAudienceList() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RecurringAudienceList()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_recurring_audience_list( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_recurring_audience_list_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_recurring_audience_list( + analytics_data_api.GetRecurringAudienceListRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.ListRecurringAudienceListsRequest, + dict, +]) +def test_list_recurring_audience_lists(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListRecurringAudienceListsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_recurring_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListRecurringAudienceListsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRecurringAudienceListsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_recurring_audience_lists_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.ListRecurringAudienceListsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_recurring_audience_lists(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListRecurringAudienceListsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_recurring_audience_lists_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_recurring_audience_lists in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_recurring_audience_lists] = mock_rpc + request = {} + client.list_recurring_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_recurring_audience_lists(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_recurring_audience_lists_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_recurring_audience_lists in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_recurring_audience_lists] = mock_rpc + + request = {} + await client.list_recurring_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_recurring_audience_lists(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_recurring_audience_lists_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.ListRecurringAudienceListsRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListRecurringAudienceListsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_recurring_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListRecurringAudienceListsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRecurringAudienceListsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_recurring_audience_lists_async_from_dict(): + await test_list_recurring_audience_lists_async(request_type=dict) + +def test_list_recurring_audience_lists_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListRecurringAudienceListsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__') as call: + call.return_value = analytics_data_api.ListRecurringAudienceListsResponse() + client.list_recurring_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_recurring_audience_lists_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListRecurringAudienceListsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListRecurringAudienceListsResponse()) + await client.list_recurring_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_recurring_audience_lists_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListRecurringAudienceListsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_recurring_audience_lists( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_recurring_audience_lists_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_recurring_audience_lists( + analytics_data_api.ListRecurringAudienceListsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_recurring_audience_lists_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListRecurringAudienceListsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListRecurringAudienceListsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_recurring_audience_lists( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_recurring_audience_lists_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_recurring_audience_lists( + analytics_data_api.ListRecurringAudienceListsRequest(), + parent='parent_value', + ) + + +def test_list_recurring_audience_lists_pager(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + next_page_token='abc', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[], + next_page_token='def', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_recurring_audience_lists(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.RecurringAudienceList) + for i in results) +def test_list_recurring_audience_lists_pages(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + next_page_token='abc', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[], + next_page_token='def', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + ), + RuntimeError, + ) + pages = list(client.list_recurring_audience_lists(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_recurring_audience_lists_async_pager(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + next_page_token='abc', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[], + next_page_token='def', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_recurring_audience_lists(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analytics_data_api.RecurringAudienceList) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_recurring_audience_lists_async_pages(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + next_page_token='abc', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[], + next_page_token='def', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_recurring_audience_lists(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.GetPropertyQuotasSnapshotRequest, + dict, +]) +def test_get_property_quotas_snapshot(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_property_quotas_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.PropertyQuotasSnapshot( + name='name_value', + ) + response = client.get_property_quotas_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == 'name_value' + + +def test_get_property_quotas_snapshot_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.GetPropertyQuotasSnapshotRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_property_quotas_snapshot), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_property_quotas_snapshot(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest( + name='name_value', + ) + +def test_get_property_quotas_snapshot_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_property_quotas_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_property_quotas_snapshot] = mock_rpc + request = {} + client.get_property_quotas_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_property_quotas_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_property_quotas_snapshot_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_property_quotas_snapshot in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_property_quotas_snapshot] = mock_rpc + + request = {} + await client.get_property_quotas_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_property_quotas_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_property_quotas_snapshot_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_property_quotas_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.PropertyQuotasSnapshot( + name='name_value', + )) + response = await client.get_property_quotas_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_property_quotas_snapshot_async_from_dict(): + await test_get_property_quotas_snapshot_async(request_type=dict) + +def test_get_property_quotas_snapshot_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_property_quotas_snapshot), + '__call__') as call: + call.return_value = analytics_data_api.PropertyQuotasSnapshot() + client.get_property_quotas_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_property_quotas_snapshot_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_property_quotas_snapshot), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.PropertyQuotasSnapshot()) + await client.get_property_quotas_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_property_quotas_snapshot_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_property_quotas_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.PropertyQuotasSnapshot() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_property_quotas_snapshot( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_property_quotas_snapshot_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_property_quotas_snapshot_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_property_quotas_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.PropertyQuotasSnapshot() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.PropertyQuotasSnapshot()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_property_quotas_snapshot( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_property_quotas_snapshot_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.CreateReportTaskRequest, + dict, +]) +def test_create_report_task(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.CreateReportTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_report_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.CreateReportTaskRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_task), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_report_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.CreateReportTaskRequest( + parent='parent_value', + ) + +def test_create_report_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_report_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_report_task] = mock_rpc + request = {} + client.create_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_report_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_report_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_report_task in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_report_task] = mock_rpc + + request = {} + await client.create_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_report_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_report_task_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.CreateReportTaskRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.CreateReportTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_report_task_async_from_dict(): + await test_create_report_task_async(request_type=dict) + +def test_create_report_task_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CreateReportTaskRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_report_task_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CreateReportTaskRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_report_task_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_report_task( + parent='parent_value', + report_task=analytics_data_api.ReportTask(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].report_task + mock_val = analytics_data_api.ReportTask(name='name_value') + assert arg == mock_val + + +def test_create_report_task_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_report_task( + analytics_data_api.CreateReportTaskRequest(), + parent='parent_value', + report_task=analytics_data_api.ReportTask(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_report_task_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_report_task( + parent='parent_value', + report_task=analytics_data_api.ReportTask(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].report_task + mock_val = analytics_data_api.ReportTask(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_report_task_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_report_task( + analytics_data_api.CreateReportTaskRequest(), + parent='parent_value', + report_task=analytics_data_api.ReportTask(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.QueryReportTaskRequest, + dict, +]) +def test_query_report_task(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.QueryReportTaskResponse( + row_count=992, + ) + response = client.query_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.QueryReportTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 + + +def test_query_report_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.QueryReportTaskRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_report_task), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.query_report_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.QueryReportTaskRequest( + name='name_value', + ) + +def test_query_report_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.query_report_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.query_report_task] = mock_rpc + request = {} + client.query_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.query_report_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_query_report_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.query_report_task in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.query_report_task] = mock_rpc + + request = {} + await client.query_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.query_report_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_query_report_task_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.QueryReportTaskRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.QueryReportTaskResponse( + row_count=992, + )) + response = await client.query_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.QueryReportTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 + + +@pytest.mark.asyncio +async def test_query_report_task_async_from_dict(): + await test_query_report_task_async(request_type=dict) + +def test_query_report_task_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.QueryReportTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_report_task), + '__call__') as call: + call.return_value = analytics_data_api.QueryReportTaskResponse() + client.query_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_query_report_task_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.QueryReportTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_report_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.QueryReportTaskResponse()) + await client.query_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_query_report_task_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.QueryReportTaskResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.query_report_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_query_report_task_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.query_report_task( + analytics_data_api.QueryReportTaskRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_query_report_task_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.QueryReportTaskResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.QueryReportTaskResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.query_report_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_query_report_task_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.query_report_task( + analytics_data_api.QueryReportTaskRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.GetReportTaskRequest, + dict, +]) +def test_get_report_task(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ReportTask( + name='name_value', + ) + response = client.get_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.GetReportTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == 'name_value' + + +def test_get_report_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.GetReportTaskRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_task), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_report_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.GetReportTaskRequest( + name='name_value', + ) + +def test_get_report_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_report_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_report_task] = mock_rpc + request = {} + client.get_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_report_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_report_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_report_task in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_report_task] = mock_rpc + + request = {} + await client.get_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_report_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_report_task_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.GetReportTaskRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ReportTask( + name='name_value', + )) + response = await client.get_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.GetReportTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_report_task_async_from_dict(): + await test_get_report_task_async(request_type=dict) + +def test_get_report_task_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetReportTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_task), + '__call__') as call: + call.return_value = analytics_data_api.ReportTask() + client.get_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_report_task_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetReportTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ReportTask()) + await client.get_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_report_task_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ReportTask() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_report_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_report_task_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_report_task( + analytics_data_api.GetReportTaskRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_report_task_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ReportTask() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ReportTask()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_report_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_report_task_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_report_task( + analytics_data_api.GetReportTaskRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.ListReportTasksRequest, + dict, +]) +def test_list_report_tasks(request_type, transport: str = 'grpc'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse( + next_page_token='next_page_token_value', + ) + response = client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListReportTasksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportTasksPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_report_tasks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.ListReportTasksRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_report_tasks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListReportTasksRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_report_tasks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_report_tasks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_report_tasks] = mock_rpc + request = {} + client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_report_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_report_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_report_tasks in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_report_tasks] = mock_rpc + + request = {} + await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_report_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_report_tasks_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.ListReportTasksRequest): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListReportTasksResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListReportTasksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportTasksAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_report_tasks_async_from_dict(): + await test_list_report_tasks_async(request_type=dict) + +def test_list_report_tasks_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListReportTasksRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__') as call: + call.return_value = analytics_data_api.ListReportTasksResponse() + client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_report_tasks_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListReportTasksRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListReportTasksResponse()) + await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_report_tasks_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_report_tasks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_report_tasks_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_report_tasks( + analytics_data_api.ListReportTasksRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_report_tasks_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListReportTasksResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_report_tasks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_report_tasks_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_report_tasks( + analytics_data_api.ListReportTasksRequest(), + parent='parent_value', + ) + + +def test_list_report_tasks_pager(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token='abc', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token='def', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_report_tasks(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.ReportTask) + for i in results) +def test_list_report_tasks_pages(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token='abc', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token='def', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, + ) + pages = list(client.list_report_tasks(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_report_tasks_async_pager(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token='abc', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token='def', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_report_tasks(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analytics_data_api.ReportTask) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_report_tasks_async_pages(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token='abc', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token='def', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_report_tasks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_run_funnel_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_funnel_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_funnel_report] = mock_rpc + + request = {} + client.run_funnel_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_funnel_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_audience_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_audience_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_audience_list] = mock_rpc + + request = {} + client.create_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_audience_list_rest_required_fields(request_type=analytics_data_api.CreateAudienceListRequest): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_audience_list(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_audience_list_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "audienceList", ))) + + +def test_create_audience_list_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'properties/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + audience_list=analytics_data_api.AudienceList(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_audience_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, args[1]) + + +def test_create_audience_list_rest_flattened_error(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_audience_list( + analytics_data_api.CreateAudienceListRequest(), + parent='parent_value', + audience_list=analytics_data_api.AudienceList(name='name_value'), + ) + + +def test_query_audience_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.query_audience_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.query_audience_list] = mock_rpc + + request = {} + client.query_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.query_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_query_audience_list_rest_required_fields(request_type=analytics_data_api.QueryAudienceListRequest): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.QueryAudienceListResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.query_audience_list(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_query_audience_list_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.query_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_query_audience_list_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.QueryAudienceListResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'properties/sample1/audienceLists/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.query_audience_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1alpha/{name=properties/*/audienceLists/*}:query" % client.transport._host, args[1]) + + +def test_query_audience_list_rest_flattened_error(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.query_audience_list( + analytics_data_api.QueryAudienceListRequest(), + name='name_value', + ) + + +def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.sheet_export_audience_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.sheet_export_audience_list] = mock_rpc + + request = {} + client.sheet_export_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.sheet_export_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_sheet_export_audience_list_rest_required_fields(request_type=analytics_data_api.SheetExportAudienceListRequest): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sheet_export_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sheet_export_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.SheetExportAudienceListResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.SheetExportAudienceListResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.sheet_export_audience_list(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_sheet_export_audience_list_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.sheet_export_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_sheet_export_audience_list_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.SheetExportAudienceListResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'properties/sample1/audienceLists/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.SheetExportAudienceListResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.sheet_export_audience_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1alpha/{name=properties/*/audienceLists/*}:exportSheet" % client.transport._host, args[1]) + + +def test_sheet_export_audience_list_rest_flattened_error(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.sheet_export_audience_list( + analytics_data_api.SheetExportAudienceListRequest(), + name='name_value', + ) + + +def test_get_audience_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_audience_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_audience_list] = mock_rpc + + request = {} + client.get_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_audience_list_rest_required_fields(request_type=analytics_data_api.GetAudienceListRequest): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.AudienceList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.AudienceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_audience_list(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_audience_list_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_audience_list_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.AudienceList() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'properties/sample1/audienceLists/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.AudienceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_audience_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1alpha/{name=properties/*/audienceLists/*}" % client.transport._host, args[1]) + + +def test_get_audience_list_rest_flattened_error(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_audience_list( + analytics_data_api.GetAudienceListRequest(), + name='name_value', + ) + + +def test_list_audience_lists_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_audience_lists in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_audience_lists] = mock_rpc + + request = {} + client.list_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_audience_lists(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_audience_lists_rest_required_fields(request_type=analytics_data_api.ListAudienceListsRequest): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_audience_lists._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_audience_lists._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListAudienceListsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_audience_lists(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_audience_lists_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_audience_lists._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_audience_lists_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListAudienceListsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'properties/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_audience_lists(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, args[1]) + + +def test_list_audience_lists_rest_flattened_error(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_audience_lists( + analytics_data_api.ListAudienceListsRequest(), + parent='parent_value', + ) + + +def test_list_audience_lists_rest_pager(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + next_page_token='abc', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[], + next_page_token='def', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(analytics_data_api.ListAudienceListsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'properties/sample1'} + + pager = client.list_audience_lists(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.AudienceList) + for i in results) + + pages = list(client.list_audience_lists(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_recurring_audience_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_recurring_audience_list] = mock_rpc + + request = {} + client.create_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_recurring_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_recurring_audience_list_rest_required_fields(request_type=analytics_data_api.CreateRecurringAudienceListRequest): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_recurring_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_recurring_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RecurringAudienceList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_recurring_audience_list(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_recurring_audience_list_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_recurring_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "recurringAudienceList", ))) + + +def test_create_recurring_audience_list_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RecurringAudienceList() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'properties/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + recurring_audience_list=analytics_data_api.RecurringAudienceList(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_recurring_audience_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1alpha/{parent=properties/*}/recurringAudienceLists" % client.transport._host, args[1]) + + +def test_create_recurring_audience_list_rest_flattened_error(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_recurring_audience_list( + analytics_data_api.CreateRecurringAudienceListRequest(), + parent='parent_value', + recurring_audience_list=analytics_data_api.RecurringAudienceList(name='name_value'), + ) + + +def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_recurring_audience_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_recurring_audience_list] = mock_rpc + + request = {} + client.get_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_recurring_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_recurring_audience_list_rest_required_fields(request_type=analytics_data_api.GetRecurringAudienceListRequest): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_recurring_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_recurring_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RecurringAudienceList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_recurring_audience_list(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_recurring_audience_list_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_recurring_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_recurring_audience_list_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RecurringAudienceList() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'properties/sample1/recurringAudienceLists/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_recurring_audience_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1alpha/{name=properties/*/recurringAudienceLists/*}" % client.transport._host, args[1]) + + +def test_get_recurring_audience_list_rest_flattened_error(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_recurring_audience_list( + analytics_data_api.GetRecurringAudienceListRequest(), + name='name_value', + ) + + +def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_recurring_audience_lists in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_recurring_audience_lists] = mock_rpc + + request = {} + client.list_recurring_audience_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_recurring_audience_lists(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_recurring_audience_lists_rest_required_fields(request_type=analytics_data_api.ListRecurringAudienceListsRequest): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListRecurringAudienceListsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_recurring_audience_lists(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_recurring_audience_lists_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_recurring_audience_lists._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_recurring_audience_lists_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListRecurringAudienceListsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'properties/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_recurring_audience_lists(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1alpha/{parent=properties/*}/recurringAudienceLists" % client.transport._host, args[1]) + + +def test_list_recurring_audience_lists_rest_flattened_error(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_recurring_audience_lists( + analytics_data_api.ListRecurringAudienceListsRequest(), + parent='parent_value', + ) + + +def test_list_recurring_audience_lists_rest_pager(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + next_page_token='abc', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[], + next_page_token='def', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(analytics_data_api.ListRecurringAudienceListsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'properties/sample1'} + + pager = client.list_recurring_audience_lists(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.RecurringAudienceList) + for i in results) + + pages = list(client.list_recurring_audience_lists(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_property_quotas_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_property_quotas_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_property_quotas_snapshot] = mock_rpc + + request = {} + client.get_property_quotas_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_property_quotas_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_property_quotas_snapshot_rest_required_fields(request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_property_quotas_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_property_quotas_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.PropertyQuotasSnapshot() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_property_quotas_snapshot(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_property_quotas_snapshot_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_property_quotas_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_property_quotas_snapshot_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.PropertyQuotasSnapshot() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'properties/sample1/propertyQuotasSnapshot'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_property_quotas_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1alpha/{name=properties/*/propertyQuotasSnapshot}" % client.transport._host, args[1]) + + +def test_get_property_quotas_snapshot_rest_flattened_error(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name='name_value', + ) + + +def test_create_report_task_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_report_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_report_task] = mock_rpc + + request = {} + client.create_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_report_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_report_task_rest_required_fields(request_type=analytics_data_api.CreateReportTaskRequest): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_report_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_report_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_report_task(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_report_task_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_report_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "reportTask", ))) + + +def test_create_report_task_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'properties/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + report_task=analytics_data_api.ReportTask(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_report_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1alpha/{parent=properties/*}/reportTasks" % client.transport._host, args[1]) + + +def test_create_report_task_rest_flattened_error(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_report_task( + analytics_data_api.CreateReportTaskRequest(), + parent='parent_value', + report_task=analytics_data_api.ReportTask(name='name_value'), + ) + + +def test_query_report_task_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.query_report_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.query_report_task] = mock_rpc + + request = {} + client.query_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.query_report_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_query_report_task_rest_required_fields(request_type=analytics_data_api.QueryReportTaskRequest): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_report_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_report_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.QueryReportTaskResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.QueryReportTaskResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.query_report_task(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_query_report_task_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.query_report_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_query_report_task_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.QueryReportTaskResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'properties/sample1/reportTasks/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.QueryReportTaskResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.query_report_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1alpha/{name=properties/*/reportTasks/*}:query" % client.transport._host, args[1]) + + +def test_query_report_task_rest_flattened_error(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.query_report_task( + analytics_data_api.QueryReportTaskRequest(), + name='name_value', + ) + + +def test_get_report_task_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_report_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_report_task] = mock_rpc + + request = {} + client.get_report_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_report_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_report_task_rest_required_fields(request_type=analytics_data_api.GetReportTaskRequest): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_report_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_report_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ReportTask() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.ReportTask.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_report_task(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_report_task_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_report_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_report_task_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ReportTask() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'properties/sample1/reportTasks/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.ReportTask.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_report_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1alpha/{name=properties/*/reportTasks/*}" % client.transport._host, args[1]) + + +def test_get_report_task_rest_flattened_error(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_report_task( + analytics_data_api.GetReportTaskRequest(), + name='name_value', + ) + + +def test_list_report_tasks_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_report_tasks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_report_tasks] = mock_rpc + + request = {} + client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_report_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_report_tasks_rest_required_fields(request_type=analytics_data_api.ListReportTasksRequest): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_report_tasks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_report_tasks._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListReportTasksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.ListReportTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_report_tasks(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_report_tasks_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_report_tasks._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_report_tasks_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListReportTasksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'properties/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.ListReportTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_report_tasks(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1alpha/{parent=properties/*}/reportTasks" % client.transport._host, args[1]) + + +def test_list_report_tasks_rest_flattened_error(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_report_tasks( + analytics_data_api.ListReportTasksRequest(), + parent='parent_value', + ) + + +def test_list_report_tasks_rest_pager(transport: str = 'rest'): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token='abc', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token='def', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(analytics_data_api.ListReportTasksResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'properties/sample1'} + + pager = client.list_report_tasks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.ReportTask) + for i in results) + + pages = list(client.list_report_tasks(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AlphaAnalyticsDataGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AlphaAnalyticsDataGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlphaAnalyticsDataClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AlphaAnalyticsDataGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AlphaAnalyticsDataClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AlphaAnalyticsDataClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AlphaAnalyticsDataGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlphaAnalyticsDataClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AlphaAnalyticsDataGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AlphaAnalyticsDataGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AlphaAnalyticsDataGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.AlphaAnalyticsDataGrpcTransport, + transports.AlphaAnalyticsDataGrpcAsyncIOTransport, + transports.AlphaAnalyticsDataRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = AlphaAnalyticsDataClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_funnel_report_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_funnel_report), + '__call__') as call: + call.return_value = analytics_data_api.RunFunnelReportResponse() + client.run_funnel_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunFunnelReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_audience_list_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_list), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CreateAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_audience_list_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_list), + '__call__') as call: + call.return_value = analytics_data_api.QueryAudienceListResponse() + client.query_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.QueryAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_sheet_export_audience_list_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.sheet_export_audience_list), + '__call__') as call: + call.return_value = analytics_data_api.SheetExportAudienceListResponse() + client.sheet_export_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.SheetExportAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_audience_list_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_list), + '__call__') as call: + call.return_value = analytics_data_api.AudienceList() + client.get_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_audience_lists_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__') as call: + call.return_value = analytics_data_api.ListAudienceListsResponse() + client.list_audience_lists(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.ListAudienceListsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_recurring_audience_list_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), + '__call__') as call: + call.return_value = analytics_data_api.RecurringAudienceList() + client.create_recurring_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CreateRecurringAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_recurring_audience_list_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_recurring_audience_list), + '__call__') as call: + call.return_value = analytics_data_api.RecurringAudienceList() + client.get_recurring_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetRecurringAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_recurring_audience_lists_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__') as call: + call.return_value = analytics_data_api.ListRecurringAudienceListsResponse() + client.list_recurring_audience_lists(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.ListRecurringAudienceListsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_property_quotas_snapshot_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_property_quotas_snapshot), + '__call__') as call: + call.return_value = analytics_data_api.PropertyQuotasSnapshot() + client.get_property_quotas_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetPropertyQuotasSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_report_task_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_report_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_report_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CreateReportTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_report_task_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_report_task), + '__call__') as call: + call.return_value = analytics_data_api.QueryReportTaskResponse() + client.query_report_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.QueryReportTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_report_task_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_report_task), + '__call__') as call: + call.return_value = analytics_data_api.ReportTask() + client.get_report_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetReportTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_report_tasks_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__') as call: + call.return_value = analytics_data_api.ListReportTasksResponse() + client.list_report_tasks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.ListReportTasksRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = AlphaAnalyticsDataAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_run_funnel_report_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_funnel_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RunFunnelReportResponse( + kind='kind_value', + )) + await client.run_funnel_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunFunnelReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_audience_list_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CreateAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_query_audience_list_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.QueryAudienceListResponse( + row_count=992, + )) + await client.query_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.QueryAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_sheet_export_audience_list_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.sheet_export_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.SheetExportAudienceListResponse( + spreadsheet_uri='spreadsheet_uri_value', + spreadsheet_id='spreadsheet_id_value', + row_count=992, + )) + await client.sheet_export_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.SheetExportAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_audience_list_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.AudienceList( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + state=analytics_data_api.AudienceList.State.CREATING, + creation_quota_tokens_charged=3070, + row_count=992, + error_message='error_message_value', + percentage_completed=0.2106, + recurring_audience_list='recurring_audience_list_value', + )) + await client.get_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_audience_lists_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListAudienceListsResponse( + next_page_token='next_page_token_value', + )) + await client.list_audience_lists(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.ListAudienceListsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_recurring_audience_list_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RecurringAudienceList( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + active_days_remaining=2213, + audience_lists=['audience_lists_value'], + )) + await client.create_recurring_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CreateRecurringAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_recurring_audience_list_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_recurring_audience_list), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RecurringAudienceList( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + active_days_remaining=2213, + audience_lists=['audience_lists_value'], + )) + await client.get_recurring_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetRecurringAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_recurring_audience_lists_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListRecurringAudienceListsResponse( + next_page_token='next_page_token_value', + )) + await client.list_recurring_audience_lists(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.ListRecurringAudienceListsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_property_quotas_snapshot_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_property_quotas_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.PropertyQuotasSnapshot( + name='name_value', + )) + await client.get_property_quotas_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetPropertyQuotasSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_report_task_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_report_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CreateReportTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_query_report_task_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.QueryReportTaskResponse( + row_count=992, + )) + await client.query_report_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.QueryReportTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_report_task_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_report_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ReportTask( + name='name_value', + )) + await client.get_report_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetReportTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_report_tasks_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListReportTasksResponse( + next_page_token='next_page_token_value', + )) + await client.list_report_tasks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.ListReportTasksRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = AlphaAnalyticsDataClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_run_funnel_report_rest_bad_request(request_type=analytics_data_api.RunFunnelReportRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.run_funnel_report(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.RunFunnelReportRequest, + dict, +]) +def test_run_funnel_report_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RunFunnelReportResponse( + kind='kind_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.RunFunnelReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.run_funnel_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunFunnelReportResponse) + assert response.kind == 'kind_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_funnel_report_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_run_funnel_report") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_run_funnel_report") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.RunFunnelReportRequest.pb(analytics_data_api.RunFunnelReportRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.RunFunnelReportResponse.to_json(analytics_data_api.RunFunnelReportResponse()) + req.return_value.content = return_value + + request = analytics_data_api.RunFunnelReportRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.RunFunnelReportResponse() + + client.run_funnel_report(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_audience_list_rest_bad_request(request_type=analytics_data_api.CreateAudienceListRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_audience_list(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.CreateAudienceListRequest, + dict, +]) +def test_create_audience_list_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request_init["audience_list"] = {'name': 'name_value', 'audience': 'audience_value', 'audience_display_name': 'audience_display_name_value', 'dimensions': [{'dimension_name': 'dimension_name_value'}], 'state': 1, 'begin_creating_time': {'seconds': 751, 'nanos': 543}, 'creation_quota_tokens_charged': 3070, 'row_count': 992, 'error_message': 'error_message_value', 'percentage_completed': 0.2106, 'recurring_audience_list': 'recurring_audience_list_value', 'webhook_notification': {'uri': 'uri_value', 'channel_token': 'channel_token_value'}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateAudienceListRequest.meta.fields["audience_list"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["audience_list"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["audience_list"][field])): + del request_init["audience_list"][field][i][subfield] + else: + del request_init["audience_list"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_audience_list(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_audience_list_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_create_audience_list") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_create_audience_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.CreateAudienceListRequest.pb(analytics_data_api.CreateAudienceListRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = analytics_data_api.CreateAudienceListRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_audience_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_query_audience_list_rest_bad_request(request_type=analytics_data_api.QueryAudienceListRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/audienceLists/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.query_audience_list(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.QueryAudienceListRequest, + dict, +]) +def test_query_audience_list_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/audienceLists/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.QueryAudienceListResponse( + row_count=992, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.query_audience_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.QueryAudienceListResponse) + assert response.row_count == 992 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_query_audience_list_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_query_audience_list") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_query_audience_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.QueryAudienceListRequest.pb(analytics_data_api.QueryAudienceListRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.QueryAudienceListResponse.to_json(analytics_data_api.QueryAudienceListResponse()) + req.return_value.content = return_value + + request = analytics_data_api.QueryAudienceListRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.QueryAudienceListResponse() + + client.query_audience_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_sheet_export_audience_list_rest_bad_request(request_type=analytics_data_api.SheetExportAudienceListRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/audienceLists/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.sheet_export_audience_list(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.SheetExportAudienceListRequest, + dict, +]) +def test_sheet_export_audience_list_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/audienceLists/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.SheetExportAudienceListResponse( + spreadsheet_uri='spreadsheet_uri_value', + spreadsheet_id='spreadsheet_id_value', + row_count=992, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.SheetExportAudienceListResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.sheet_export_audience_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) + assert response.spreadsheet_uri == 'spreadsheet_uri_value' + assert response.spreadsheet_id == 'spreadsheet_id_value' + assert response.row_count == 992 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_sheet_export_audience_list_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_sheet_export_audience_list") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_sheet_export_audience_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.SheetExportAudienceListRequest.pb(analytics_data_api.SheetExportAudienceListRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.SheetExportAudienceListResponse.to_json(analytics_data_api.SheetExportAudienceListResponse()) + req.return_value.content = return_value + + request = analytics_data_api.SheetExportAudienceListRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.SheetExportAudienceListResponse() + + client.sheet_export_audience_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_audience_list_rest_bad_request(request_type=analytics_data_api.GetAudienceListRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/audienceLists/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_audience_list(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.GetAudienceListRequest, + dict, +]) +def test_get_audience_list_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/audienceLists/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.AudienceList( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + state=analytics_data_api.AudienceList.State.CREATING, + creation_quota_tokens_charged=3070, + row_count=992, + error_message='error_message_value', + percentage_completed=0.2106, + recurring_audience_list='recurring_audience_list_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.AudienceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_audience_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.AudienceList) + assert response.name == 'name_value' + assert response.audience == 'audience_value' + assert response.audience_display_name == 'audience_display_name_value' + assert response.state == analytics_data_api.AudienceList.State.CREATING + assert response.creation_quota_tokens_charged == 3070 + assert response.row_count == 992 + assert response.error_message == 'error_message_value' + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + assert response.recurring_audience_list == 'recurring_audience_list_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_audience_list_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_get_audience_list") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_get_audience_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.GetAudienceListRequest.pb(analytics_data_api.GetAudienceListRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.AudienceList.to_json(analytics_data_api.AudienceList()) + req.return_value.content = return_value + + request = analytics_data_api.GetAudienceListRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.AudienceList() + + client.get_audience_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_audience_lists_rest_bad_request(request_type=analytics_data_api.ListAudienceListsRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_audience_lists(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.ListAudienceListsRequest, + dict, +]) +def test_list_audience_lists_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListAudienceListsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_audience_lists(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAudienceListsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_audience_lists_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_list_audience_lists") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_list_audience_lists") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.ListAudienceListsRequest.pb(analytics_data_api.ListAudienceListsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.ListAudienceListsResponse.to_json(analytics_data_api.ListAudienceListsResponse()) + req.return_value.content = return_value + + request = analytics_data_api.ListAudienceListsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.ListAudienceListsResponse() + + client.list_audience_lists(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_recurring_audience_list_rest_bad_request(request_type=analytics_data_api.CreateRecurringAudienceListRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_recurring_audience_list(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.CreateRecurringAudienceListRequest, + dict, +]) +def test_create_recurring_audience_list_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request_init["recurring_audience_list"] = {'name': 'name_value', 'audience': 'audience_value', 'audience_display_name': 'audience_display_name_value', 'dimensions': [{'dimension_name': 'dimension_name_value'}], 'active_days_remaining': 2213, 'audience_lists': ['audience_lists_value1', 'audience_lists_value2'], 'webhook_notification': {'uri': 'uri_value', 'channel_token': 'channel_token_value'}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateRecurringAudienceListRequest.meta.fields["recurring_audience_list"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["recurring_audience_list"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["recurring_audience_list"][field])): + del request_init["recurring_audience_list"][field][i][subfield] + else: + del request_init["recurring_audience_list"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RecurringAudienceList( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + active_days_remaining=2213, + audience_lists=['audience_lists_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_recurring_audience_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == 'name_value' + assert response.audience == 'audience_value' + assert response.audience_display_name == 'audience_display_name_value' + assert response.active_days_remaining == 2213 + assert response.audience_lists == ['audience_lists_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_recurring_audience_list_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_create_recurring_audience_list") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_create_recurring_audience_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.CreateRecurringAudienceListRequest.pb(analytics_data_api.CreateRecurringAudienceListRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.RecurringAudienceList.to_json(analytics_data_api.RecurringAudienceList()) + req.return_value.content = return_value + + request = analytics_data_api.CreateRecurringAudienceListRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.RecurringAudienceList() + + client.create_recurring_audience_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_recurring_audience_list_rest_bad_request(request_type=analytics_data_api.GetRecurringAudienceListRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/recurringAudienceLists/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_recurring_audience_list(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.GetRecurringAudienceListRequest, + dict, +]) +def test_get_recurring_audience_list_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/recurringAudienceLists/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RecurringAudienceList( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + active_days_remaining=2213, + audience_lists=['audience_lists_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_recurring_audience_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == 'name_value' + assert response.audience == 'audience_value' + assert response.audience_display_name == 'audience_display_name_value' + assert response.active_days_remaining == 2213 + assert response.audience_lists == ['audience_lists_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_recurring_audience_list_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_get_recurring_audience_list") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_get_recurring_audience_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.GetRecurringAudienceListRequest.pb(analytics_data_api.GetRecurringAudienceListRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.RecurringAudienceList.to_json(analytics_data_api.RecurringAudienceList()) + req.return_value.content = return_value + + request = analytics_data_api.GetRecurringAudienceListRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.RecurringAudienceList() + + client.get_recurring_audience_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_recurring_audience_lists_rest_bad_request(request_type=analytics_data_api.ListRecurringAudienceListsRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_recurring_audience_lists(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.ListRecurringAudienceListsRequest, + dict, +]) +def test_list_recurring_audience_lists_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListRecurringAudienceListsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_recurring_audience_lists(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRecurringAudienceListsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_list_recurring_audience_lists") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_list_recurring_audience_lists") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.ListRecurringAudienceListsRequest.pb(analytics_data_api.ListRecurringAudienceListsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.ListRecurringAudienceListsResponse.to_json(analytics_data_api.ListRecurringAudienceListsResponse()) + req.return_value.content = return_value + + request = analytics_data_api.ListRecurringAudienceListsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.ListRecurringAudienceListsResponse() + + client.list_recurring_audience_lists(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_property_quotas_snapshot_rest_bad_request(request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/propertyQuotasSnapshot'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_property_quotas_snapshot(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.GetPropertyQuotasSnapshotRequest, + dict, +]) +def test_get_property_quotas_snapshot_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/propertyQuotasSnapshot'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.PropertyQuotasSnapshot( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_property_quotas_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_property_quotas_snapshot_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_get_property_quotas_snapshot") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_get_property_quotas_snapshot") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.GetPropertyQuotasSnapshotRequest.pb(analytics_data_api.GetPropertyQuotasSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.PropertyQuotasSnapshot.to_json(analytics_data_api.PropertyQuotasSnapshot()) + req.return_value.content = return_value + + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.PropertyQuotasSnapshot() + + client.get_property_quotas_snapshot(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_report_task_rest_bad_request(request_type=analytics_data_api.CreateReportTaskRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_report_task(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.CreateReportTaskRequest, + dict, +]) +def test_create_report_task_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request_init["report_task"] = {'name': 'name_value', 'report_definition': {'dimensions': [{'name': 'name_value', 'dimension_expression': {'lower_case': {'dimension_name': 'dimension_name_value'}, 'upper_case': {}, 'concatenate': {'dimension_names': ['dimension_names_value1', 'dimension_names_value2'], 'delimiter': 'delimiter_value'}}}], 'metrics': [{'name': 'name_value', 'expression': 'expression_value', 'invisible': True}], 'date_ranges': [{'start_date': 'start_date_value', 'end_date': 'end_date_value', 'name': 'name_value'}], 'dimension_filter': {'and_group': {'expressions': {}}, 'or_group': {}, 'not_expression': {}, 'filter': {'field_name': 'field_name_value', 'string_filter': {'match_type': 1, 'value': 'value_value', 'case_sensitive': True}, 'in_list_filter': {'values': ['values_value1', 'values_value2'], 'case_sensitive': True}, 'numeric_filter': {'operation': 1, 'value': {'int64_value': 1073, 'double_value': 0.12710000000000002}}, 'between_filter': {'from_value': {}, 'to_value': {}}, 'empty_filter': {}}}, 'metric_filter': {}, 'offset': 647, 'limit': 543, 'metric_aggregations': [1], 'order_bys': [{'metric': {'metric_name': 'metric_name_value'}, 'dimension': {'dimension_name': 'dimension_name_value', 'order_type': 1}, 'desc': True}], 'currency_code': 'currency_code_value', 'cohort_spec': {'cohorts': [{'name': 'name_value', 'dimension': 'dimension_value', 'date_range': {}}], 'cohorts_range': {'granularity': 1, 'start_offset': 1300, 'end_offset': 1053}, 'cohort_report_settings': {'accumulate': True}}, 'keep_empty_rows': True, 'sampling_level': 1}, 'report_metadata': {'state': 1, 'begin_creating_time': {'seconds': 751, 'nanos': 543}, 'creation_quota_tokens_charged': 3070, 'task_row_count': 1522, 'error_message': 'error_message_value', 'total_row_count': 1635}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateReportTaskRequest.meta.fields["report_task"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["report_task"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["report_task"][field])): + del request_init["report_task"][field][i][subfield] + else: + del request_init["report_task"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_report_task(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_report_task_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_create_report_task") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_create_report_task") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.CreateReportTaskRequest.pb(analytics_data_api.CreateReportTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = analytics_data_api.CreateReportTaskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_report_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_query_report_task_rest_bad_request(request_type=analytics_data_api.QueryReportTaskRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/reportTasks/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.query_report_task(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.QueryReportTaskRequest, + dict, +]) +def test_query_report_task_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/reportTasks/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.QueryReportTaskResponse( + row_count=992, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.QueryReportTaskResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.query_report_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_query_report_task_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_query_report_task") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_query_report_task") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.QueryReportTaskRequest.pb(analytics_data_api.QueryReportTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.QueryReportTaskResponse.to_json(analytics_data_api.QueryReportTaskResponse()) + req.return_value.content = return_value + + request = analytics_data_api.QueryReportTaskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.QueryReportTaskResponse() + + client.query_report_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_report_task_rest_bad_request(request_type=analytics_data_api.GetReportTaskRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/reportTasks/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_report_task(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.GetReportTaskRequest, + dict, +]) +def test_get_report_task_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/reportTasks/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ReportTask( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.ReportTask.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_report_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_report_task_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_get_report_task") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_get_report_task") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.GetReportTaskRequest.pb(analytics_data_api.GetReportTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.ReportTask.to_json(analytics_data_api.ReportTask()) + req.return_value.content = return_value + + request = analytics_data_api.GetReportTaskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.ReportTask() + + client.get_report_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_report_tasks_rest_bad_request(request_type=analytics_data_api.ListReportTasksRequest): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_report_tasks(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.ListReportTasksRequest, + dict, +]) +def test_list_report_tasks_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListReportTasksResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.ListReportTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_report_tasks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportTasksPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_report_tasks_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "post_list_report_tasks") as post, \ + mock.patch.object(transports.AlphaAnalyticsDataRestInterceptor, "pre_list_report_tasks") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.ListReportTasksRequest.pb(analytics_data_api.ListReportTasksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.ListReportTasksResponse.to_json(analytics_data_api.ListReportTasksResponse()) + req.return_value.content = return_value + + request = analytics_data_api.ListReportTasksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.ListReportTasksResponse() + + client.list_report_tasks(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +def test_initialize_client_w_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_funnel_report_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_funnel_report), + '__call__') as call: + client.run_funnel_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunFunnelReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_audience_list_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_list), + '__call__') as call: + client.create_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CreateAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_audience_list_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_list), + '__call__') as call: + client.query_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.QueryAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_sheet_export_audience_list_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.sheet_export_audience_list), + '__call__') as call: + client.sheet_export_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.SheetExportAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_audience_list_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_list), + '__call__') as call: + client.get_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_audience_lists_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + '__call__') as call: + client.list_audience_lists(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.ListAudienceListsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_recurring_audience_list_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), + '__call__') as call: + client.create_recurring_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CreateRecurringAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_recurring_audience_list_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_recurring_audience_list), + '__call__') as call: + client.get_recurring_audience_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetRecurringAudienceListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_recurring_audience_lists_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + '__call__') as call: + client.list_recurring_audience_lists(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.ListRecurringAudienceListsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_property_quotas_snapshot_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_property_quotas_snapshot), + '__call__') as call: + client.get_property_quotas_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetPropertyQuotasSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_report_task_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_report_task), + '__call__') as call: + client.create_report_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CreateReportTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_report_task_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_report_task), + '__call__') as call: + client.query_report_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.QueryReportTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_report_task_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_report_task), + '__call__') as call: + client.get_report_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetReportTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_report_tasks_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + '__call__') as call: + client.list_report_tasks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.ListReportTasksRequest() + + assert args[0] == request_msg + + +def test_alpha_analytics_data_rest_lro_client(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AlphaAnalyticsDataGrpcTransport, + ) + +def test_alpha_analytics_data_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AlphaAnalyticsDataTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_alpha_analytics_data_base_transport(): + # Instantiate the base transport. + with mock.patch('google.analytics.data_v1alpha.services.alpha_analytics_data.transports.AlphaAnalyticsDataTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.AlphaAnalyticsDataTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'run_funnel_report', + 'create_audience_list', + 'query_audience_list', + 'sheet_export_audience_list', + 'get_audience_list', + 'list_audience_lists', + 'create_recurring_audience_list', + 'get_recurring_audience_list', + 'list_recurring_audience_lists', + 'get_property_quotas_snapshot', + 'create_report_task', + 'query_report_task', + 'get_report_task', + 'list_report_tasks', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_alpha_analytics_data_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.analytics.data_v1alpha.services.alpha_analytics_data.transports.AlphaAnalyticsDataTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AlphaAnalyticsDataTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/analytics', + 'https://www.googleapis.com/auth/analytics.readonly', + 'https://www.googleapis.com/auth/drive', + 'https://www.googleapis.com/auth/drive.file', + 'https://www.googleapis.com/auth/spreadsheets', +), + quota_project_id="octopus", + ) + + +def test_alpha_analytics_data_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.analytics.data_v1alpha.services.alpha_analytics_data.transports.AlphaAnalyticsDataTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AlphaAnalyticsDataTransport() + adc.assert_called_once() + + +def test_alpha_analytics_data_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AlphaAnalyticsDataClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/analytics', + 'https://www.googleapis.com/auth/analytics.readonly', + 'https://www.googleapis.com/auth/drive', + 'https://www.googleapis.com/auth/drive.file', + 'https://www.googleapis.com/auth/spreadsheets', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AlphaAnalyticsDataGrpcTransport, + transports.AlphaAnalyticsDataGrpcAsyncIOTransport, + ], +) +def test_alpha_analytics_data_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/analytics', 'https://www.googleapis.com/auth/analytics.readonly', 'https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/drive.file', 'https://www.googleapis.com/auth/spreadsheets',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AlphaAnalyticsDataGrpcTransport, + transports.AlphaAnalyticsDataGrpcAsyncIOTransport, + transports.AlphaAnalyticsDataRestTransport, + ], +) +def test_alpha_analytics_data_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AlphaAnalyticsDataGrpcTransport, grpc_helpers), + (transports.AlphaAnalyticsDataGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_alpha_analytics_data_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "analyticsdata.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/analytics', + 'https://www.googleapis.com/auth/analytics.readonly', + 'https://www.googleapis.com/auth/drive', + 'https://www.googleapis.com/auth/drive.file', + 'https://www.googleapis.com/auth/spreadsheets', +), + scopes=["1", "2"], + default_host="analyticsdata.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.AlphaAnalyticsDataGrpcTransport, transports.AlphaAnalyticsDataGrpcAsyncIOTransport]) +def test_alpha_analytics_data_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_alpha_analytics_data_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.AlphaAnalyticsDataRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_alpha_analytics_data_host_no_port(transport_name): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='analyticsdata.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'analyticsdata.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://analyticsdata.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_alpha_analytics_data_host_with_port(transport_name): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='analyticsdata.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'analyticsdata.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://analyticsdata.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_alpha_analytics_data_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AlphaAnalyticsDataClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AlphaAnalyticsDataClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.run_funnel_report._session + session2 = client2.transport.run_funnel_report._session + assert session1 != session2 + session1 = client1.transport.create_audience_list._session + session2 = client2.transport.create_audience_list._session + assert session1 != session2 + session1 = client1.transport.query_audience_list._session + session2 = client2.transport.query_audience_list._session + assert session1 != session2 + session1 = client1.transport.sheet_export_audience_list._session + session2 = client2.transport.sheet_export_audience_list._session + assert session1 != session2 + session1 = client1.transport.get_audience_list._session + session2 = client2.transport.get_audience_list._session + assert session1 != session2 + session1 = client1.transport.list_audience_lists._session + session2 = client2.transport.list_audience_lists._session + assert session1 != session2 + session1 = client1.transport.create_recurring_audience_list._session + session2 = client2.transport.create_recurring_audience_list._session + assert session1 != session2 + session1 = client1.transport.get_recurring_audience_list._session + session2 = client2.transport.get_recurring_audience_list._session + assert session1 != session2 + session1 = client1.transport.list_recurring_audience_lists._session + session2 = client2.transport.list_recurring_audience_lists._session + assert session1 != session2 + session1 = client1.transport.get_property_quotas_snapshot._session + session2 = client2.transport.get_property_quotas_snapshot._session + assert session1 != session2 + session1 = client1.transport.create_report_task._session + session2 = client2.transport.create_report_task._session + assert session1 != session2 + session1 = client1.transport.query_report_task._session + session2 = client2.transport.query_report_task._session + assert session1 != session2 + session1 = client1.transport.get_report_task._session + session2 = client2.transport.get_report_task._session + assert session1 != session2 + session1 = client1.transport.list_report_tasks._session + session2 = client2.transport.list_report_tasks._session + assert session1 != session2 +def test_alpha_analytics_data_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AlphaAnalyticsDataGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_alpha_analytics_data_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AlphaAnalyticsDataGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.AlphaAnalyticsDataGrpcTransport, transports.AlphaAnalyticsDataGrpcAsyncIOTransport]) +def test_alpha_analytics_data_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.AlphaAnalyticsDataGrpcTransport, transports.AlphaAnalyticsDataGrpcAsyncIOTransport]) +def test_alpha_analytics_data_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_alpha_analytics_data_grpc_lro_client(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_alpha_analytics_data_grpc_lro_async_client(): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_audience_list_path(): + property = "squid" + audience_list = "clam" + expected = "properties/{property}/audienceLists/{audience_list}".format(property=property, audience_list=audience_list, ) + actual = AlphaAnalyticsDataClient.audience_list_path(property, audience_list) + assert expected == actual + + +def test_parse_audience_list_path(): + expected = { + "property": "whelk", + "audience_list": "octopus", + } + path = AlphaAnalyticsDataClient.audience_list_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_audience_list_path(path) + assert expected == actual + +def test_property_quotas_snapshot_path(): + property = "oyster" + expected = "properties/{property}/propertyQuotasSnapshot".format(property=property, ) + actual = AlphaAnalyticsDataClient.property_quotas_snapshot_path(property) + assert expected == actual + + +def test_parse_property_quotas_snapshot_path(): + expected = { + "property": "nudibranch", + } + path = AlphaAnalyticsDataClient.property_quotas_snapshot_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_property_quotas_snapshot_path(path) + assert expected == actual + +def test_recurring_audience_list_path(): + property = "cuttlefish" + recurring_audience_list = "mussel" + expected = "properties/{property}/recurringAudienceLists/{recurring_audience_list}".format(property=property, recurring_audience_list=recurring_audience_list, ) + actual = AlphaAnalyticsDataClient.recurring_audience_list_path(property, recurring_audience_list) + assert expected == actual + + +def test_parse_recurring_audience_list_path(): + expected = { + "property": "winkle", + "recurring_audience_list": "nautilus", + } + path = AlphaAnalyticsDataClient.recurring_audience_list_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_recurring_audience_list_path(path) + assert expected == actual + +def test_report_task_path(): + property = "scallop" + report_task = "abalone" + expected = "properties/{property}/reportTasks/{report_task}".format(property=property, report_task=report_task, ) + actual = AlphaAnalyticsDataClient.report_task_path(property, report_task) + assert expected == actual + + +def test_parse_report_task_path(): + expected = { + "property": "squid", + "report_task": "clam", + } + path = AlphaAnalyticsDataClient.report_task_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_report_task_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = AlphaAnalyticsDataClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = AlphaAnalyticsDataClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = AlphaAnalyticsDataClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = AlphaAnalyticsDataClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = AlphaAnalyticsDataClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = AlphaAnalyticsDataClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = AlphaAnalyticsDataClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = AlphaAnalyticsDataClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = AlphaAnalyticsDataClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = AlphaAnalyticsDataClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.AlphaAnalyticsDataTransport, '_prep_wrapped_messages') as prep: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.AlphaAnalyticsDataTransport, '_prep_wrapped_messages') as prep: + transport_class = AlphaAnalyticsDataClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataGrpcTransport), + (AlphaAnalyticsDataAsyncClient, transports.AlphaAnalyticsDataGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-analytics-data/v1beta/.coveragerc b/owl-bot-staging/google-analytics-data/v1beta/.coveragerc new file mode 100644 index 000000000000..d50b66c60b09 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/analytics/data/__init__.py + google/analytics/data/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-analytics-data/v1beta/.flake8 b/owl-bot-staging/google-analytics-data/v1beta/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-analytics-data/v1beta/MANIFEST.in b/owl-bot-staging/google-analytics-data/v1beta/MANIFEST.in new file mode 100644 index 000000000000..910ea96170d1 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/analytics/data *.py +recursive-include google/analytics/data_v1beta *.py diff --git a/owl-bot-staging/google-analytics-data/v1beta/README.rst b/owl-bot-staging/google-analytics-data/v1beta/README.rst new file mode 100644 index 000000000000..d62497b892c1 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Analytics Data API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Analytics Data API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-analytics-data/v1beta/docs/_static/custom.css b/owl-bot-staging/google-analytics-data/v1beta/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-analytics-data/v1beta/docs/conf.py b/owl-bot-staging/google-analytics-data/v1beta/docs/conf.py new file mode 100644 index 000000000000..d7245576d5f6 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-analytics-data documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-analytics-data" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Analytics Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-analytics-data-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-analytics-data.tex", + u"google-analytics-data Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-analytics-data", + u"Google Analytics Data Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-analytics-data", + u"google-analytics-data Documentation", + author, + "google-analytics-data", + "GAPIC library for Google Analytics Data API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-analytics-data/v1beta/docs/data_v1beta/beta_analytics_data.rst b/owl-bot-staging/google-analytics-data/v1beta/docs/data_v1beta/beta_analytics_data.rst new file mode 100644 index 000000000000..b5c0de656c3f --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/docs/data_v1beta/beta_analytics_data.rst @@ -0,0 +1,10 @@ +BetaAnalyticsData +----------------------------------- + +.. automodule:: google.analytics.data_v1beta.services.beta_analytics_data + :members: + :inherited-members: + +.. automodule:: google.analytics.data_v1beta.services.beta_analytics_data.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-analytics-data/v1beta/docs/data_v1beta/services_.rst b/owl-bot-staging/google-analytics-data/v1beta/docs/data_v1beta/services_.rst new file mode 100644 index 000000000000..769b3f87b75a --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/docs/data_v1beta/services_.rst @@ -0,0 +1,6 @@ +Services for Google Analytics Data v1beta API +============================================= +.. toctree:: + :maxdepth: 2 + + beta_analytics_data diff --git a/owl-bot-staging/google-analytics-data/v1beta/docs/data_v1beta/types_.rst b/owl-bot-staging/google-analytics-data/v1beta/docs/data_v1beta/types_.rst new file mode 100644 index 000000000000..2d6bf68e64d2 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/docs/data_v1beta/types_.rst @@ -0,0 +1,6 @@ +Types for Google Analytics Data v1beta API +========================================== + +.. automodule:: google.analytics.data_v1beta.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-analytics-data/v1beta/docs/index.rst b/owl-bot-staging/google-analytics-data/v1beta/docs/index.rst new file mode 100644 index 000000000000..70e623ebd902 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + data_v1beta/services_ + data_v1beta/types_ diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data/__init__.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data/__init__.py new file mode 100644 index 000000000000..83e62f69d2d1 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data/__init__.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.analytics.data import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.analytics.data_v1beta.services.beta_analytics_data.client import BetaAnalyticsDataClient +from google.analytics.data_v1beta.services.beta_analytics_data.async_client import BetaAnalyticsDataAsyncClient + +from google.analytics.data_v1beta.types.analytics_data_api import AudienceDimension +from google.analytics.data_v1beta.types.analytics_data_api import AudienceDimensionValue +from google.analytics.data_v1beta.types.analytics_data_api import AudienceExport +from google.analytics.data_v1beta.types.analytics_data_api import AudienceExportMetadata +from google.analytics.data_v1beta.types.analytics_data_api import AudienceRow +from google.analytics.data_v1beta.types.analytics_data_api import BatchRunPivotReportsRequest +from google.analytics.data_v1beta.types.analytics_data_api import BatchRunPivotReportsResponse +from google.analytics.data_v1beta.types.analytics_data_api import BatchRunReportsRequest +from google.analytics.data_v1beta.types.analytics_data_api import BatchRunReportsResponse +from google.analytics.data_v1beta.types.analytics_data_api import CheckCompatibilityRequest +from google.analytics.data_v1beta.types.analytics_data_api import CheckCompatibilityResponse +from google.analytics.data_v1beta.types.analytics_data_api import CreateAudienceExportRequest +from google.analytics.data_v1beta.types.analytics_data_api import GetAudienceExportRequest +from google.analytics.data_v1beta.types.analytics_data_api import GetMetadataRequest +from google.analytics.data_v1beta.types.analytics_data_api import ListAudienceExportsRequest +from google.analytics.data_v1beta.types.analytics_data_api import ListAudienceExportsResponse +from google.analytics.data_v1beta.types.analytics_data_api import Metadata +from google.analytics.data_v1beta.types.analytics_data_api import QueryAudienceExportRequest +from google.analytics.data_v1beta.types.analytics_data_api import QueryAudienceExportResponse +from google.analytics.data_v1beta.types.analytics_data_api import RunPivotReportRequest +from google.analytics.data_v1beta.types.analytics_data_api import RunPivotReportResponse +from google.analytics.data_v1beta.types.analytics_data_api import RunRealtimeReportRequest +from google.analytics.data_v1beta.types.analytics_data_api import RunRealtimeReportResponse +from google.analytics.data_v1beta.types.analytics_data_api import RunReportRequest +from google.analytics.data_v1beta.types.analytics_data_api import RunReportResponse +from google.analytics.data_v1beta.types.data import Cohort +from google.analytics.data_v1beta.types.data import CohortReportSettings +from google.analytics.data_v1beta.types.data import CohortSpec +from google.analytics.data_v1beta.types.data import CohortsRange +from google.analytics.data_v1beta.types.data import Comparison +from google.analytics.data_v1beta.types.data import ComparisonMetadata +from google.analytics.data_v1beta.types.data import DateRange +from google.analytics.data_v1beta.types.data import Dimension +from google.analytics.data_v1beta.types.data import DimensionCompatibility +from google.analytics.data_v1beta.types.data import DimensionExpression +from google.analytics.data_v1beta.types.data import DimensionHeader +from google.analytics.data_v1beta.types.data import DimensionMetadata +from google.analytics.data_v1beta.types.data import DimensionValue +from google.analytics.data_v1beta.types.data import Filter +from google.analytics.data_v1beta.types.data import FilterExpression +from google.analytics.data_v1beta.types.data import FilterExpressionList +from google.analytics.data_v1beta.types.data import Metric +from google.analytics.data_v1beta.types.data import MetricCompatibility +from google.analytics.data_v1beta.types.data import MetricHeader +from google.analytics.data_v1beta.types.data import MetricMetadata +from google.analytics.data_v1beta.types.data import MetricValue +from google.analytics.data_v1beta.types.data import MinuteRange +from google.analytics.data_v1beta.types.data import NumericValue +from google.analytics.data_v1beta.types.data import OrderBy +from google.analytics.data_v1beta.types.data import Pivot +from google.analytics.data_v1beta.types.data import PivotDimensionHeader +from google.analytics.data_v1beta.types.data import PivotHeader +from google.analytics.data_v1beta.types.data import PropertyQuota +from google.analytics.data_v1beta.types.data import QuotaStatus +from google.analytics.data_v1beta.types.data import ResponseMetaData +from google.analytics.data_v1beta.types.data import Row +from google.analytics.data_v1beta.types.data import SamplingMetadata +from google.analytics.data_v1beta.types.data import Compatibility +from google.analytics.data_v1beta.types.data import MetricAggregation +from google.analytics.data_v1beta.types.data import MetricType +from google.analytics.data_v1beta.types.data import RestrictedMetricType + +__all__ = ('BetaAnalyticsDataClient', + 'BetaAnalyticsDataAsyncClient', + 'AudienceDimension', + 'AudienceDimensionValue', + 'AudienceExport', + 'AudienceExportMetadata', + 'AudienceRow', + 'BatchRunPivotReportsRequest', + 'BatchRunPivotReportsResponse', + 'BatchRunReportsRequest', + 'BatchRunReportsResponse', + 'CheckCompatibilityRequest', + 'CheckCompatibilityResponse', + 'CreateAudienceExportRequest', + 'GetAudienceExportRequest', + 'GetMetadataRequest', + 'ListAudienceExportsRequest', + 'ListAudienceExportsResponse', + 'Metadata', + 'QueryAudienceExportRequest', + 'QueryAudienceExportResponse', + 'RunPivotReportRequest', + 'RunPivotReportResponse', + 'RunRealtimeReportRequest', + 'RunRealtimeReportResponse', + 'RunReportRequest', + 'RunReportResponse', + 'Cohort', + 'CohortReportSettings', + 'CohortSpec', + 'CohortsRange', + 'Comparison', + 'ComparisonMetadata', + 'DateRange', + 'Dimension', + 'DimensionCompatibility', + 'DimensionExpression', + 'DimensionHeader', + 'DimensionMetadata', + 'DimensionValue', + 'Filter', + 'FilterExpression', + 'FilterExpressionList', + 'Metric', + 'MetricCompatibility', + 'MetricHeader', + 'MetricMetadata', + 'MetricValue', + 'MinuteRange', + 'NumericValue', + 'OrderBy', + 'Pivot', + 'PivotDimensionHeader', + 'PivotHeader', + 'PropertyQuota', + 'QuotaStatus', + 'ResponseMetaData', + 'Row', + 'SamplingMetadata', + 'Compatibility', + 'MetricAggregation', + 'MetricType', + 'RestrictedMetricType', +) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data/gapic_version.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data/py.typed b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data/py.typed new file mode 100644 index 000000000000..1d549e500f60 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-analytics-data package uses inline types. diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/__init__.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/__init__.py new file mode 100644 index 000000000000..ea794b72048a --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/__init__.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.analytics.data_v1beta import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.beta_analytics_data import BetaAnalyticsDataClient +from .services.beta_analytics_data import BetaAnalyticsDataAsyncClient + +from .types.analytics_data_api import AudienceDimension +from .types.analytics_data_api import AudienceDimensionValue +from .types.analytics_data_api import AudienceExport +from .types.analytics_data_api import AudienceExportMetadata +from .types.analytics_data_api import AudienceRow +from .types.analytics_data_api import BatchRunPivotReportsRequest +from .types.analytics_data_api import BatchRunPivotReportsResponse +from .types.analytics_data_api import BatchRunReportsRequest +from .types.analytics_data_api import BatchRunReportsResponse +from .types.analytics_data_api import CheckCompatibilityRequest +from .types.analytics_data_api import CheckCompatibilityResponse +from .types.analytics_data_api import CreateAudienceExportRequest +from .types.analytics_data_api import GetAudienceExportRequest +from .types.analytics_data_api import GetMetadataRequest +from .types.analytics_data_api import ListAudienceExportsRequest +from .types.analytics_data_api import ListAudienceExportsResponse +from .types.analytics_data_api import Metadata +from .types.analytics_data_api import QueryAudienceExportRequest +from .types.analytics_data_api import QueryAudienceExportResponse +from .types.analytics_data_api import RunPivotReportRequest +from .types.analytics_data_api import RunPivotReportResponse +from .types.analytics_data_api import RunRealtimeReportRequest +from .types.analytics_data_api import RunRealtimeReportResponse +from .types.analytics_data_api import RunReportRequest +from .types.analytics_data_api import RunReportResponse +from .types.data import Cohort +from .types.data import CohortReportSettings +from .types.data import CohortSpec +from .types.data import CohortsRange +from .types.data import Comparison +from .types.data import ComparisonMetadata +from .types.data import DateRange +from .types.data import Dimension +from .types.data import DimensionCompatibility +from .types.data import DimensionExpression +from .types.data import DimensionHeader +from .types.data import DimensionMetadata +from .types.data import DimensionValue +from .types.data import Filter +from .types.data import FilterExpression +from .types.data import FilterExpressionList +from .types.data import Metric +from .types.data import MetricCompatibility +from .types.data import MetricHeader +from .types.data import MetricMetadata +from .types.data import MetricValue +from .types.data import MinuteRange +from .types.data import NumericValue +from .types.data import OrderBy +from .types.data import Pivot +from .types.data import PivotDimensionHeader +from .types.data import PivotHeader +from .types.data import PropertyQuota +from .types.data import QuotaStatus +from .types.data import ResponseMetaData +from .types.data import Row +from .types.data import SamplingMetadata +from .types.data import Compatibility +from .types.data import MetricAggregation +from .types.data import MetricType +from .types.data import RestrictedMetricType + +__all__ = ( + 'BetaAnalyticsDataAsyncClient', +'AudienceDimension', +'AudienceDimensionValue', +'AudienceExport', +'AudienceExportMetadata', +'AudienceRow', +'BatchRunPivotReportsRequest', +'BatchRunPivotReportsResponse', +'BatchRunReportsRequest', +'BatchRunReportsResponse', +'BetaAnalyticsDataClient', +'CheckCompatibilityRequest', +'CheckCompatibilityResponse', +'Cohort', +'CohortReportSettings', +'CohortSpec', +'CohortsRange', +'Comparison', +'ComparisonMetadata', +'Compatibility', +'CreateAudienceExportRequest', +'DateRange', +'Dimension', +'DimensionCompatibility', +'DimensionExpression', +'DimensionHeader', +'DimensionMetadata', +'DimensionValue', +'Filter', +'FilterExpression', +'FilterExpressionList', +'GetAudienceExportRequest', +'GetMetadataRequest', +'ListAudienceExportsRequest', +'ListAudienceExportsResponse', +'Metadata', +'Metric', +'MetricAggregation', +'MetricCompatibility', +'MetricHeader', +'MetricMetadata', +'MetricType', +'MetricValue', +'MinuteRange', +'NumericValue', +'OrderBy', +'Pivot', +'PivotDimensionHeader', +'PivotHeader', +'PropertyQuota', +'QueryAudienceExportRequest', +'QueryAudienceExportResponse', +'QuotaStatus', +'ResponseMetaData', +'RestrictedMetricType', +'Row', +'RunPivotReportRequest', +'RunPivotReportResponse', +'RunRealtimeReportRequest', +'RunRealtimeReportResponse', +'RunReportRequest', +'RunReportResponse', +'SamplingMetadata', +) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/gapic_metadata.json b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/gapic_metadata.json new file mode 100644 index 000000000000..a00d9bb919b6 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/gapic_metadata.json @@ -0,0 +1,193 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.analytics.data_v1beta", + "protoPackage": "google.analytics.data.v1beta", + "schema": "1.0", + "services": { + "BetaAnalyticsData": { + "clients": { + "grpc": { + "libraryClient": "BetaAnalyticsDataClient", + "rpcs": { + "BatchRunPivotReports": { + "methods": [ + "batch_run_pivot_reports" + ] + }, + "BatchRunReports": { + "methods": [ + "batch_run_reports" + ] + }, + "CheckCompatibility": { + "methods": [ + "check_compatibility" + ] + }, + "CreateAudienceExport": { + "methods": [ + "create_audience_export" + ] + }, + "GetAudienceExport": { + "methods": [ + "get_audience_export" + ] + }, + "GetMetadata": { + "methods": [ + "get_metadata" + ] + }, + "ListAudienceExports": { + "methods": [ + "list_audience_exports" + ] + }, + "QueryAudienceExport": { + "methods": [ + "query_audience_export" + ] + }, + "RunPivotReport": { + "methods": [ + "run_pivot_report" + ] + }, + "RunRealtimeReport": { + "methods": [ + "run_realtime_report" + ] + }, + "RunReport": { + "methods": [ + "run_report" + ] + } + } + }, + "grpc-async": { + "libraryClient": "BetaAnalyticsDataAsyncClient", + "rpcs": { + "BatchRunPivotReports": { + "methods": [ + "batch_run_pivot_reports" + ] + }, + "BatchRunReports": { + "methods": [ + "batch_run_reports" + ] + }, + "CheckCompatibility": { + "methods": [ + "check_compatibility" + ] + }, + "CreateAudienceExport": { + "methods": [ + "create_audience_export" + ] + }, + "GetAudienceExport": { + "methods": [ + "get_audience_export" + ] + }, + "GetMetadata": { + "methods": [ + "get_metadata" + ] + }, + "ListAudienceExports": { + "methods": [ + "list_audience_exports" + ] + }, + "QueryAudienceExport": { + "methods": [ + "query_audience_export" + ] + }, + "RunPivotReport": { + "methods": [ + "run_pivot_report" + ] + }, + "RunRealtimeReport": { + "methods": [ + "run_realtime_report" + ] + }, + "RunReport": { + "methods": [ + "run_report" + ] + } + } + }, + "rest": { + "libraryClient": "BetaAnalyticsDataClient", + "rpcs": { + "BatchRunPivotReports": { + "methods": [ + "batch_run_pivot_reports" + ] + }, + "BatchRunReports": { + "methods": [ + "batch_run_reports" + ] + }, + "CheckCompatibility": { + "methods": [ + "check_compatibility" + ] + }, + "CreateAudienceExport": { + "methods": [ + "create_audience_export" + ] + }, + "GetAudienceExport": { + "methods": [ + "get_audience_export" + ] + }, + "GetMetadata": { + "methods": [ + "get_metadata" + ] + }, + "ListAudienceExports": { + "methods": [ + "list_audience_exports" + ] + }, + "QueryAudienceExport": { + "methods": [ + "query_audience_export" + ] + }, + "RunPivotReport": { + "methods": [ + "run_pivot_report" + ] + }, + "RunRealtimeReport": { + "methods": [ + "run_realtime_report" + ] + }, + "RunReport": { + "methods": [ + "run_report" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/gapic_version.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/py.typed b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/py.typed new file mode 100644 index 000000000000..1d549e500f60 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-analytics-data package uses inline types. diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/__init__.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/__init__.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/__init__.py new file mode 100644 index 000000000000..62ab2b6cd6ac --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import BetaAnalyticsDataClient +from .async_client import BetaAnalyticsDataAsyncClient + +__all__ = ( + 'BetaAnalyticsDataClient', + 'BetaAnalyticsDataAsyncClient', +) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py new file mode 100644 index 000000000000..ac9b4aa53e41 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py @@ -0,0 +1,1439 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.analytics.data_v1beta import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.analytics.data_v1beta.services.beta_analytics_data import pagers +from google.analytics.data_v1beta.types import analytics_data_api +from google.analytics.data_v1beta.types import data +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import BetaAnalyticsDataTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import BetaAnalyticsDataGrpcAsyncIOTransport +from .client import BetaAnalyticsDataClient + + +class BetaAnalyticsDataAsyncClient: + """Google Analytics reporting data service.""" + + _client: BetaAnalyticsDataClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = BetaAnalyticsDataClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = BetaAnalyticsDataClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = BetaAnalyticsDataClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = BetaAnalyticsDataClient._DEFAULT_UNIVERSE + + audience_export_path = staticmethod(BetaAnalyticsDataClient.audience_export_path) + parse_audience_export_path = staticmethod(BetaAnalyticsDataClient.parse_audience_export_path) + metadata_path = staticmethod(BetaAnalyticsDataClient.metadata_path) + parse_metadata_path = staticmethod(BetaAnalyticsDataClient.parse_metadata_path) + common_billing_account_path = staticmethod(BetaAnalyticsDataClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(BetaAnalyticsDataClient.parse_common_billing_account_path) + common_folder_path = staticmethod(BetaAnalyticsDataClient.common_folder_path) + parse_common_folder_path = staticmethod(BetaAnalyticsDataClient.parse_common_folder_path) + common_organization_path = staticmethod(BetaAnalyticsDataClient.common_organization_path) + parse_common_organization_path = staticmethod(BetaAnalyticsDataClient.parse_common_organization_path) + common_project_path = staticmethod(BetaAnalyticsDataClient.common_project_path) + parse_common_project_path = staticmethod(BetaAnalyticsDataClient.parse_common_project_path) + common_location_path = staticmethod(BetaAnalyticsDataClient.common_location_path) + parse_common_location_path = staticmethod(BetaAnalyticsDataClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BetaAnalyticsDataAsyncClient: The constructed client. + """ + return BetaAnalyticsDataClient.from_service_account_info.__func__(BetaAnalyticsDataAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BetaAnalyticsDataAsyncClient: The constructed client. + """ + return BetaAnalyticsDataClient.from_service_account_file.__func__(BetaAnalyticsDataAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BetaAnalyticsDataClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> BetaAnalyticsDataTransport: + """Returns the transport used by the client instance. + + Returns: + BetaAnalyticsDataTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = BetaAnalyticsDataClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, BetaAnalyticsDataTransport, Callable[..., BetaAnalyticsDataTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the beta analytics data async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BetaAnalyticsDataTransport,Callable[..., BetaAnalyticsDataTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BetaAnalyticsDataTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = BetaAnalyticsDataClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def run_report(self, + request: Optional[Union[analytics_data_api.RunReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.RunReportResponse: + r"""Returns a customized report of your Google Analytics event data. + Reports contain statistics derived from data collected by the + Google Analytics tracking code. The data returned from the API + is as a table with columns for the requested dimensions and + metrics. Metrics are individual measurements of user activity on + your property, such as active users or event count. Dimensions + break down metrics across some common criteria, such as country + or event name. + + For a guide to constructing requests & understanding responses, + see `Creating a + Report `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_run_report(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.RunReportRequest( + ) + + # Make the request + response = await client.run_report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.RunReportRequest, dict]]): + The request object. The request to generate a report. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.RunReportResponse: + The response report table + corresponding to a request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.RunReportRequest): + request = analytics_data_api.RunReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.run_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_pivot_report(self, + request: Optional[Union[analytics_data_api.RunPivotReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.RunPivotReportResponse: + r"""Returns a customized pivot report of your Google + Analytics event data. Pivot reports are more advanced + and expressive formats than regular reports. In a pivot + report, dimensions are only visible if they are included + in a pivot. Multiple pivots can be specified to further + dissect your data. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_run_pivot_report(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.RunPivotReportRequest( + ) + + # Make the request + response = await client.run_pivot_report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.RunPivotReportRequest, dict]]): + The request object. The request to generate a pivot + report. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.RunPivotReportResponse: + The response pivot report table + corresponding to a pivot request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.RunPivotReportRequest): + request = analytics_data_api.RunPivotReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.run_pivot_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_run_reports(self, + request: Optional[Union[analytics_data_api.BatchRunReportsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.BatchRunReportsResponse: + r"""Returns multiple reports in a batch. All reports must + be for the same Google Analytics property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_batch_run_reports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.BatchRunReportsRequest( + ) + + # Make the request + response = await client.batch_run_reports(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.BatchRunReportsRequest, dict]]): + The request object. The batch request containing multiple + report requests. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.BatchRunReportsResponse: + The batch response containing + multiple reports. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.BatchRunReportsRequest): + request = analytics_data_api.BatchRunReportsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.batch_run_reports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_run_pivot_reports(self, + request: Optional[Union[analytics_data_api.BatchRunPivotReportsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.BatchRunPivotReportsResponse: + r"""Returns multiple pivot reports in a batch. All + reports must be for the same Google Analytics property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_batch_run_pivot_reports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.BatchRunPivotReportsRequest( + ) + + # Make the request + response = await client.batch_run_pivot_reports(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.BatchRunPivotReportsRequest, dict]]): + The request object. The batch request containing multiple + pivot report requests. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.BatchRunPivotReportsResponse: + The batch response containing + multiple pivot reports. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.BatchRunPivotReportsRequest): + request = analytics_data_api.BatchRunPivotReportsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.batch_run_pivot_reports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_metadata(self, + request: Optional[Union[analytics_data_api.GetMetadataRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.Metadata: + r"""Returns metadata for dimensions and metrics available in + reporting methods. Used to explore the dimensions and metrics. + In this method, a Google Analytics property identifier is + specified in the request, and the metadata response includes + Custom dimensions and metrics as well as Universal metadata. + + For example if a custom metric with parameter name + ``levels_unlocked`` is registered to a property, the Metadata + response will contain ``customEvent:levels_unlocked``. Universal + metadata are dimensions and metrics applicable to any property + such as ``country`` and ``totalUsers``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_get_metadata(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.GetMetadataRequest( + name="name_value", + ) + + # Make the request + response = await client.get_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.GetMetadataRequest, dict]]): + The request object. Request for a property's dimension + and metric metadata. + name (:class:`str`): + Required. The resource name of the metadata to retrieve. + This name field is specified in the URL path and not URL + parameters. Property is a numeric Google Analytics + property identifier. To learn more, see `where to find + your Property + ID `__. + + Example: properties/1234/metadata + + Set the Property ID to 0 for dimensions and metrics + common to all properties. In this special mode, this + method will not return custom dimensions and metrics. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.Metadata: + The dimensions, metrics and + comparisons currently accepted in + reporting methods. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetMetadataRequest): + request = analytics_data_api.GetMetadataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_metadata] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_realtime_report(self, + request: Optional[Union[analytics_data_api.RunRealtimeReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.RunRealtimeReportResponse: + r"""Returns a customized report of realtime event data for your + property. Events appear in realtime reports seconds after they + have been sent to the Google Analytics. Realtime reports show + events and usage data for the periods of time ranging from the + present moment to 30 minutes ago (up to 60 minutes for Google + Analytics 360 properties). + + For a guide to constructing realtime requests & understanding + responses, see `Creating a Realtime + Report `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_run_realtime_report(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.RunRealtimeReportRequest( + ) + + # Make the request + response = await client.run_realtime_report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.RunRealtimeReportRequest, dict]]): + The request object. The request to generate a realtime + report. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.RunRealtimeReportResponse: + The response realtime report table + corresponding to a request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.RunRealtimeReportRequest): + request = analytics_data_api.RunRealtimeReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.run_realtime_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def check_compatibility(self, + request: Optional[Union[analytics_data_api.CheckCompatibilityRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.CheckCompatibilityResponse: + r"""This compatibility method lists dimensions and + metrics that can be added to a report request and + maintain compatibility. This method fails if the + request's dimensions and metrics are incompatible. + + In Google Analytics, reports fail if they request + incompatible dimensions and/or metrics; in that case, + you will need to remove dimensions and/or metrics from + the incompatible report until the report is compatible. + + The Realtime and Core reports have different + compatibility rules. This method checks compatibility + for Core reports. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_check_compatibility(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.CheckCompatibilityRequest( + ) + + # Make the request + response = await client.check_compatibility(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.CheckCompatibilityRequest, dict]]): + The request object. The request for compatibility information for a report's + dimensions and metrics. Check compatibility provides a + preview of the compatibility of a report; fields shared + with the ``runReport`` request should be the same values + as in your ``runReport`` request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.CheckCompatibilityResponse: + The compatibility response with the + compatibility of each dimension & + metric. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.CheckCompatibilityRequest): + request = analytics_data_api.CheckCompatibilityRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.check_compatibility] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_audience_export(self, + request: Optional[Union[analytics_data_api.CreateAudienceExportRequest, dict]] = None, + *, + parent: Optional[str] = None, + audience_export: Optional[analytics_data_api.AudienceExport] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an audience export for later retrieval. This method + quickly returns the audience export's resource name and + initiates a long running asynchronous request to form an + audience export. To export the users in an audience export, + first create the audience export through this method and then + send the audience resource name to the ``QueryAudienceExport`` + method. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + An audience export is a snapshot of the users currently in the + audience at the time of audience export creation. Creating + audience exports for one audience on different days will return + different results as users enter and exit the audience. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + Audience exports contain the users in each audience. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_create_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + audience_export = data_v1beta.AudienceExport() + audience_export.audience = "audience_value" + + request = data_v1beta.CreateAudienceExportRequest( + parent="parent_value", + audience_export=audience_export, + ) + + # Make the request + operation = client.create_audience_export(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.CreateAudienceExportRequest, dict]]): + The request object. A request to create a new audience + export. + parent (:class:`str`): + Required. The parent resource where this audience export + will be created. Format: ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + audience_export (:class:`google.analytics.data_v1beta.types.AudienceExport`): + Required. The audience export to + create. + + This corresponds to the ``audience_export`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.analytics.data_v1beta.types.AudienceExport` An audience export is a list of users in an audience at the time of the + list's creation. One audience may have multiple + audience exports created for different days. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, audience_export]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.CreateAudienceExportRequest): + request = analytics_data_api.CreateAudienceExportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if audience_export is not None: + request.audience_export = audience_export + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_audience_export] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + analytics_data_api.AudienceExport, + metadata_type=analytics_data_api.AudienceExportMetadata, + ) + + # Done; return the response. + return response + + async def query_audience_export(self, + request: Optional[Union[analytics_data_api.QueryAudienceExportRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.QueryAudienceExportResponse: + r"""Retrieves an audience export of users. After creating an + audience, the users are not immediately available for exporting. + First, a request to ``CreateAudienceExport`` is necessary to + create an audience export of users, and then second, this method + is used to retrieve the users in the audience export. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_query_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.QueryAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = await client.query_audience_export(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.QueryAudienceExportRequest, dict]]): + The request object. A request to list users in an + audience export. + name (:class:`str`): + Required. The name of the audience export to retrieve + users from. Format: + ``properties/{property}/audienceExports/{audience_export}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.QueryAudienceExportResponse: + A list of users in an audience + export. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.QueryAudienceExportRequest): + request = analytics_data_api.QueryAudienceExportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.query_audience_export] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_audience_export(self, + request: Optional[Union[analytics_data_api.GetAudienceExportRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.AudienceExport: + r"""Gets configuration metadata about a specific audience export. + This method can be used to understand an audience export after + it has been created. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_get_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.GetAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = await client.get_audience_export(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.GetAudienceExportRequest, dict]]): + The request object. A request to retrieve configuration + metadata about a specific audience + export. + name (:class:`str`): + Required. The audience export resource name. Format: + ``properties/{property}/audienceExports/{audience_export}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.AudienceExport: + An audience export is a list of users + in an audience at the time of the list's + creation. One audience may have multiple + audience exports created for different + days. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetAudienceExportRequest): + request = analytics_data_api.GetAudienceExportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_audience_export] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_audience_exports(self, + request: Optional[Union[analytics_data_api.ListAudienceExportsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAudienceExportsAsyncPager: + r"""Lists all audience exports for a property. This method can be + used for you to find and reuse existing audience exports rather + than creating unnecessary new audience exports. The same + audience can have multiple audience exports that represent the + export of users that were in an audience on different days. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_list_audience_exports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.ListAudienceExportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_audience_exports(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.ListAudienceExportsRequest, dict]]): + The request object. A request to list all audience + exports for a property. + parent (:class:`str`): + Required. All audience exports for this property will be + listed in the response. Format: + ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.services.beta_analytics_data.pagers.ListAudienceExportsAsyncPager: + A list of all audience exports for a + property. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.ListAudienceExportsRequest): + request = analytics_data_api.ListAudienceExportsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_audience_exports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAudienceExportsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "BetaAnalyticsDataAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "BetaAnalyticsDataAsyncClient", +) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/client.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/client.py new file mode 100644 index 000000000000..e10e8dd7b159 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/client.py @@ -0,0 +1,1777 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.analytics.data_v1beta import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.analytics.data_v1beta.services.beta_analytics_data import pagers +from google.analytics.data_v1beta.types import analytics_data_api +from google.analytics.data_v1beta.types import data +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import BetaAnalyticsDataTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import BetaAnalyticsDataGrpcTransport +from .transports.grpc_asyncio import BetaAnalyticsDataGrpcAsyncIOTransport +from .transports.rest import BetaAnalyticsDataRestTransport + + +class BetaAnalyticsDataClientMeta(type): + """Metaclass for the BetaAnalyticsData client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[BetaAnalyticsDataTransport]] + _transport_registry["grpc"] = BetaAnalyticsDataGrpcTransport + _transport_registry["grpc_asyncio"] = BetaAnalyticsDataGrpcAsyncIOTransport + _transport_registry["rest"] = BetaAnalyticsDataRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[BetaAnalyticsDataTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BetaAnalyticsDataClient(metaclass=BetaAnalyticsDataClientMeta): + """Google Analytics reporting data service.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "analyticsdata.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "analyticsdata.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BetaAnalyticsDataClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BetaAnalyticsDataClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> BetaAnalyticsDataTransport: + """Returns the transport used by the client instance. + + Returns: + BetaAnalyticsDataTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def audience_export_path(property: str,audience_export: str,) -> str: + """Returns a fully-qualified audience_export string.""" + return "properties/{property}/audienceExports/{audience_export}".format(property=property, audience_export=audience_export, ) + + @staticmethod + def parse_audience_export_path(path: str) -> Dict[str,str]: + """Parses a audience_export path into its component segments.""" + m = re.match(r"^properties/(?P.+?)/audienceExports/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def metadata_path(property: str,) -> str: + """Returns a fully-qualified metadata string.""" + return "properties/{property}/metadata".format(property=property, ) + + @staticmethod + def parse_metadata_path(path: str) -> Dict[str,str]: + """Parses a metadata path into its component segments.""" + m = re.match(r"^properties/(?P.+?)/metadata$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = BetaAnalyticsDataClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = BetaAnalyticsDataClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = BetaAnalyticsDataClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = BetaAnalyticsDataClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, BetaAnalyticsDataTransport, Callable[..., BetaAnalyticsDataTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the beta analytics data client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BetaAnalyticsDataTransport,Callable[..., BetaAnalyticsDataTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BetaAnalyticsDataTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BetaAnalyticsDataClient._read_environment_variables() + self._client_cert_source = BetaAnalyticsDataClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = BetaAnalyticsDataClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, BetaAnalyticsDataTransport) + if transport_provided: + # transport is a BetaAnalyticsDataTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(BetaAnalyticsDataTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + BetaAnalyticsDataClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[BetaAnalyticsDataTransport], Callable[..., BetaAnalyticsDataTransport]] = ( + BetaAnalyticsDataClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BetaAnalyticsDataTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def run_report(self, + request: Optional[Union[analytics_data_api.RunReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.RunReportResponse: + r"""Returns a customized report of your Google Analytics event data. + Reports contain statistics derived from data collected by the + Google Analytics tracking code. The data returned from the API + is as a table with columns for the requested dimensions and + metrics. Metrics are individual measurements of user activity on + your property, such as active users or event count. Dimensions + break down metrics across some common criteria, such as country + or event name. + + For a guide to constructing requests & understanding responses, + see `Creating a + Report `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_run_report(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.RunReportRequest( + ) + + # Make the request + response = client.run_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.RunReportRequest, dict]): + The request object. The request to generate a report. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.RunReportResponse: + The response report table + corresponding to a request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.RunReportRequest): + request = analytics_data_api.RunReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_pivot_report(self, + request: Optional[Union[analytics_data_api.RunPivotReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.RunPivotReportResponse: + r"""Returns a customized pivot report of your Google + Analytics event data. Pivot reports are more advanced + and expressive formats than regular reports. In a pivot + report, dimensions are only visible if they are included + in a pivot. Multiple pivots can be specified to further + dissect your data. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_run_pivot_report(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.RunPivotReportRequest( + ) + + # Make the request + response = client.run_pivot_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.RunPivotReportRequest, dict]): + The request object. The request to generate a pivot + report. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.RunPivotReportResponse: + The response pivot report table + corresponding to a pivot request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.RunPivotReportRequest): + request = analytics_data_api.RunPivotReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_pivot_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_run_reports(self, + request: Optional[Union[analytics_data_api.BatchRunReportsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.BatchRunReportsResponse: + r"""Returns multiple reports in a batch. All reports must + be for the same Google Analytics property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_batch_run_reports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.BatchRunReportsRequest( + ) + + # Make the request + response = client.batch_run_reports(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.BatchRunReportsRequest, dict]): + The request object. The batch request containing multiple + report requests. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.BatchRunReportsResponse: + The batch response containing + multiple reports. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.BatchRunReportsRequest): + request = analytics_data_api.BatchRunReportsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_run_reports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_run_pivot_reports(self, + request: Optional[Union[analytics_data_api.BatchRunPivotReportsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.BatchRunPivotReportsResponse: + r"""Returns multiple pivot reports in a batch. All + reports must be for the same Google Analytics property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_batch_run_pivot_reports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.BatchRunPivotReportsRequest( + ) + + # Make the request + response = client.batch_run_pivot_reports(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.BatchRunPivotReportsRequest, dict]): + The request object. The batch request containing multiple + pivot report requests. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.BatchRunPivotReportsResponse: + The batch response containing + multiple pivot reports. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.BatchRunPivotReportsRequest): + request = analytics_data_api.BatchRunPivotReportsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_run_pivot_reports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_metadata(self, + request: Optional[Union[analytics_data_api.GetMetadataRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.Metadata: + r"""Returns metadata for dimensions and metrics available in + reporting methods. Used to explore the dimensions and metrics. + In this method, a Google Analytics property identifier is + specified in the request, and the metadata response includes + Custom dimensions and metrics as well as Universal metadata. + + For example if a custom metric with parameter name + ``levels_unlocked`` is registered to a property, the Metadata + response will contain ``customEvent:levels_unlocked``. Universal + metadata are dimensions and metrics applicable to any property + such as ``country`` and ``totalUsers``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_get_metadata(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.GetMetadataRequest( + name="name_value", + ) + + # Make the request + response = client.get_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.GetMetadataRequest, dict]): + The request object. Request for a property's dimension + and metric metadata. + name (str): + Required. The resource name of the metadata to retrieve. + This name field is specified in the URL path and not URL + parameters. Property is a numeric Google Analytics + property identifier. To learn more, see `where to find + your Property + ID `__. + + Example: properties/1234/metadata + + Set the Property ID to 0 for dimensions and metrics + common to all properties. In this special mode, this + method will not return custom dimensions and metrics. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.Metadata: + The dimensions, metrics and + comparisons currently accepted in + reporting methods. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetMetadataRequest): + request = analytics_data_api.GetMetadataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_metadata] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_realtime_report(self, + request: Optional[Union[analytics_data_api.RunRealtimeReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.RunRealtimeReportResponse: + r"""Returns a customized report of realtime event data for your + property. Events appear in realtime reports seconds after they + have been sent to the Google Analytics. Realtime reports show + events and usage data for the periods of time ranging from the + present moment to 30 minutes ago (up to 60 minutes for Google + Analytics 360 properties). + + For a guide to constructing realtime requests & understanding + responses, see `Creating a Realtime + Report `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_run_realtime_report(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.RunRealtimeReportRequest( + ) + + # Make the request + response = client.run_realtime_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.RunRealtimeReportRequest, dict]): + The request object. The request to generate a realtime + report. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.RunRealtimeReportResponse: + The response realtime report table + corresponding to a request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.RunRealtimeReportRequest): + request = analytics_data_api.RunRealtimeReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_realtime_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def check_compatibility(self, + request: Optional[Union[analytics_data_api.CheckCompatibilityRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.CheckCompatibilityResponse: + r"""This compatibility method lists dimensions and + metrics that can be added to a report request and + maintain compatibility. This method fails if the + request's dimensions and metrics are incompatible. + + In Google Analytics, reports fail if they request + incompatible dimensions and/or metrics; in that case, + you will need to remove dimensions and/or metrics from + the incompatible report until the report is compatible. + + The Realtime and Core reports have different + compatibility rules. This method checks compatibility + for Core reports. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_check_compatibility(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.CheckCompatibilityRequest( + ) + + # Make the request + response = client.check_compatibility(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.CheckCompatibilityRequest, dict]): + The request object. The request for compatibility information for a report's + dimensions and metrics. Check compatibility provides a + preview of the compatibility of a report; fields shared + with the ``runReport`` request should be the same values + as in your ``runReport`` request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.CheckCompatibilityResponse: + The compatibility response with the + compatibility of each dimension & + metric. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.CheckCompatibilityRequest): + request = analytics_data_api.CheckCompatibilityRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.check_compatibility] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("property", request.property), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_audience_export(self, + request: Optional[Union[analytics_data_api.CreateAudienceExportRequest, dict]] = None, + *, + parent: Optional[str] = None, + audience_export: Optional[analytics_data_api.AudienceExport] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates an audience export for later retrieval. This method + quickly returns the audience export's resource name and + initiates a long running asynchronous request to form an + audience export. To export the users in an audience export, + first create the audience export through this method and then + send the audience resource name to the ``QueryAudienceExport`` + method. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + An audience export is a snapshot of the users currently in the + audience at the time of audience export creation. Creating + audience exports for one audience on different days will return + different results as users enter and exit the audience. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + Audience exports contain the users in each audience. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_create_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + audience_export = data_v1beta.AudienceExport() + audience_export.audience = "audience_value" + + request = data_v1beta.CreateAudienceExportRequest( + parent="parent_value", + audience_export=audience_export, + ) + + # Make the request + operation = client.create_audience_export(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.CreateAudienceExportRequest, dict]): + The request object. A request to create a new audience + export. + parent (str): + Required. The parent resource where this audience export + will be created. Format: ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + audience_export (google.analytics.data_v1beta.types.AudienceExport): + Required. The audience export to + create. + + This corresponds to the ``audience_export`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.analytics.data_v1beta.types.AudienceExport` An audience export is a list of users in an audience at the time of the + list's creation. One audience may have multiple + audience exports created for different days. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, audience_export]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.CreateAudienceExportRequest): + request = analytics_data_api.CreateAudienceExportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if audience_export is not None: + request.audience_export = audience_export + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_audience_export] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + analytics_data_api.AudienceExport, + metadata_type=analytics_data_api.AudienceExportMetadata, + ) + + # Done; return the response. + return response + + def query_audience_export(self, + request: Optional[Union[analytics_data_api.QueryAudienceExportRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.QueryAudienceExportResponse: + r"""Retrieves an audience export of users. After creating an + audience, the users are not immediately available for exporting. + First, a request to ``CreateAudienceExport`` is necessary to + create an audience export of users, and then second, this method + is used to retrieve the users in the audience export. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_query_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.QueryAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = client.query_audience_export(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.QueryAudienceExportRequest, dict]): + The request object. A request to list users in an + audience export. + name (str): + Required. The name of the audience export to retrieve + users from. Format: + ``properties/{property}/audienceExports/{audience_export}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.QueryAudienceExportResponse: + A list of users in an audience + export. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.QueryAudienceExportRequest): + request = analytics_data_api.QueryAudienceExportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.query_audience_export] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_audience_export(self, + request: Optional[Union[analytics_data_api.GetAudienceExportRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.AudienceExport: + r"""Gets configuration metadata about a specific audience export. + This method can be used to understand an audience export after + it has been created. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_get_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.GetAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = client.get_audience_export(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.GetAudienceExportRequest, dict]): + The request object. A request to retrieve configuration + metadata about a specific audience + export. + name (str): + Required. The audience export resource name. Format: + ``properties/{property}/audienceExports/{audience_export}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.AudienceExport: + An audience export is a list of users + in an audience at the time of the list's + creation. One audience may have multiple + audience exports created for different + days. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetAudienceExportRequest): + request = analytics_data_api.GetAudienceExportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_audience_export] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_audience_exports(self, + request: Optional[Union[analytics_data_api.ListAudienceExportsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAudienceExportsPager: + r"""Lists all audience exports for a property. This method can be + used for you to find and reuse existing audience exports rather + than creating unnecessary new audience exports. The same + audience can have multiple audience exports that represent the + export of users that were in an audience on different days. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_list_audience_exports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.ListAudienceExportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_audience_exports(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.ListAudienceExportsRequest, dict]): + The request object. A request to list all audience + exports for a property. + parent (str): + Required. All audience exports for this property will be + listed in the response. Format: + ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.services.beta_analytics_data.pagers.ListAudienceExportsPager: + A list of all audience exports for a + property. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.ListAudienceExportsRequest): + request = analytics_data_api.ListAudienceExportsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_audience_exports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAudienceExportsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "BetaAnalyticsDataClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "BetaAnalyticsDataClient", +) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/pagers.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/pagers.py new file mode 100644 index 000000000000..93ee397fce37 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/pagers.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.analytics.data_v1beta.types import analytics_data_api + + +class ListAudienceExportsPager: + """A pager for iterating through ``list_audience_exports`` requests. + + This class thinly wraps an initial + :class:`google.analytics.data_v1beta.types.ListAudienceExportsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``audience_exports`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAudienceExports`` requests and continue to iterate + through the ``audience_exports`` field on the + corresponding responses. + + All the usual :class:`google.analytics.data_v1beta.types.ListAudienceExportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., analytics_data_api.ListAudienceExportsResponse], + request: analytics_data_api.ListAudienceExportsRequest, + response: analytics_data_api.ListAudienceExportsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.data_v1beta.types.ListAudienceExportsRequest): + The initial request object. + response (google.analytics.data_v1beta.types.ListAudienceExportsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_data_api.ListAudienceExportsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[analytics_data_api.ListAudienceExportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analytics_data_api.AudienceExport]: + for page in self.pages: + yield from page.audience_exports + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAudienceExportsAsyncPager: + """A pager for iterating through ``list_audience_exports`` requests. + + This class thinly wraps an initial + :class:`google.analytics.data_v1beta.types.ListAudienceExportsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``audience_exports`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAudienceExports`` requests and continue to iterate + through the ``audience_exports`` field on the + corresponding responses. + + All the usual :class:`google.analytics.data_v1beta.types.ListAudienceExportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[analytics_data_api.ListAudienceExportsResponse]], + request: analytics_data_api.ListAudienceExportsRequest, + response: analytics_data_api.ListAudienceExportsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.data_v1beta.types.ListAudienceExportsRequest): + The initial request object. + response (google.analytics.data_v1beta.types.ListAudienceExportsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_data_api.ListAudienceExportsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[analytics_data_api.ListAudienceExportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[analytics_data_api.AudienceExport]: + async def async_generator(): + async for page in self.pages: + for response in page.audience_exports: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/README.rst b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/README.rst new file mode 100644 index 000000000000..e409a11acb1b --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`BetaAnalyticsDataTransport` is the ABC for all transports. +- public child `BetaAnalyticsDataGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `BetaAnalyticsDataGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseBetaAnalyticsDataRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `BetaAnalyticsDataRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/__init__.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/__init__.py new file mode 100644 index 000000000000..f9131328e312 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import BetaAnalyticsDataTransport +from .grpc import BetaAnalyticsDataGrpcTransport +from .grpc_asyncio import BetaAnalyticsDataGrpcAsyncIOTransport +from .rest import BetaAnalyticsDataRestTransport +from .rest import BetaAnalyticsDataRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[BetaAnalyticsDataTransport]] +_transport_registry['grpc'] = BetaAnalyticsDataGrpcTransport +_transport_registry['grpc_asyncio'] = BetaAnalyticsDataGrpcAsyncIOTransport +_transport_registry['rest'] = BetaAnalyticsDataRestTransport + +__all__ = ( + 'BetaAnalyticsDataTransport', + 'BetaAnalyticsDataGrpcTransport', + 'BetaAnalyticsDataGrpcAsyncIOTransport', + 'BetaAnalyticsDataRestTransport', + 'BetaAnalyticsDataRestInterceptor', +) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py new file mode 100644 index 000000000000..4d8ccb8b1151 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.analytics.data_v1beta import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.analytics.data_v1beta.types import analytics_data_api +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class BetaAnalyticsDataTransport(abc.ABC): + """Abstract transport class for BetaAnalyticsData.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/analytics', + 'https://www.googleapis.com/auth/analytics.readonly', + ) + + DEFAULT_HOST: str = 'analyticsdata.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'analyticsdata.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.run_report: gapic_v1.method.wrap_method( + self.run_report, + default_timeout=60.0, + client_info=client_info, + ), + self.run_pivot_report: gapic_v1.method.wrap_method( + self.run_pivot_report, + default_timeout=60.0, + client_info=client_info, + ), + self.batch_run_reports: gapic_v1.method.wrap_method( + self.batch_run_reports, + default_timeout=60.0, + client_info=client_info, + ), + self.batch_run_pivot_reports: gapic_v1.method.wrap_method( + self.batch_run_pivot_reports, + default_timeout=60.0, + client_info=client_info, + ), + self.get_metadata: gapic_v1.method.wrap_method( + self.get_metadata, + default_timeout=60.0, + client_info=client_info, + ), + self.run_realtime_report: gapic_v1.method.wrap_method( + self.run_realtime_report, + default_timeout=60.0, + client_info=client_info, + ), + self.check_compatibility: gapic_v1.method.wrap_method( + self.check_compatibility, + default_timeout=60.0, + client_info=client_info, + ), + self.create_audience_export: gapic_v1.method.wrap_method( + self.create_audience_export, + default_timeout=None, + client_info=client_info, + ), + self.query_audience_export: gapic_v1.method.wrap_method( + self.query_audience_export, + default_timeout=None, + client_info=client_info, + ), + self.get_audience_export: gapic_v1.method.wrap_method( + self.get_audience_export, + default_timeout=None, + client_info=client_info, + ), + self.list_audience_exports: gapic_v1.method.wrap_method( + self.list_audience_exports, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def run_report(self) -> Callable[ + [analytics_data_api.RunReportRequest], + Union[ + analytics_data_api.RunReportResponse, + Awaitable[analytics_data_api.RunReportResponse] + ]]: + raise NotImplementedError() + + @property + def run_pivot_report(self) -> Callable[ + [analytics_data_api.RunPivotReportRequest], + Union[ + analytics_data_api.RunPivotReportResponse, + Awaitable[analytics_data_api.RunPivotReportResponse] + ]]: + raise NotImplementedError() + + @property + def batch_run_reports(self) -> Callable[ + [analytics_data_api.BatchRunReportsRequest], + Union[ + analytics_data_api.BatchRunReportsResponse, + Awaitable[analytics_data_api.BatchRunReportsResponse] + ]]: + raise NotImplementedError() + + @property + def batch_run_pivot_reports(self) -> Callable[ + [analytics_data_api.BatchRunPivotReportsRequest], + Union[ + analytics_data_api.BatchRunPivotReportsResponse, + Awaitable[analytics_data_api.BatchRunPivotReportsResponse] + ]]: + raise NotImplementedError() + + @property + def get_metadata(self) -> Callable[ + [analytics_data_api.GetMetadataRequest], + Union[ + analytics_data_api.Metadata, + Awaitable[analytics_data_api.Metadata] + ]]: + raise NotImplementedError() + + @property + def run_realtime_report(self) -> Callable[ + [analytics_data_api.RunRealtimeReportRequest], + Union[ + analytics_data_api.RunRealtimeReportResponse, + Awaitable[analytics_data_api.RunRealtimeReportResponse] + ]]: + raise NotImplementedError() + + @property + def check_compatibility(self) -> Callable[ + [analytics_data_api.CheckCompatibilityRequest], + Union[ + analytics_data_api.CheckCompatibilityResponse, + Awaitable[analytics_data_api.CheckCompatibilityResponse] + ]]: + raise NotImplementedError() + + @property + def create_audience_export(self) -> Callable[ + [analytics_data_api.CreateAudienceExportRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def query_audience_export(self) -> Callable[ + [analytics_data_api.QueryAudienceExportRequest], + Union[ + analytics_data_api.QueryAudienceExportResponse, + Awaitable[analytics_data_api.QueryAudienceExportResponse] + ]]: + raise NotImplementedError() + + @property + def get_audience_export(self) -> Callable[ + [analytics_data_api.GetAudienceExportRequest], + Union[ + analytics_data_api.AudienceExport, + Awaitable[analytics_data_api.AudienceExport] + ]]: + raise NotImplementedError() + + @property + def list_audience_exports(self) -> Callable[ + [analytics_data_api.ListAudienceExportsRequest], + Union[ + analytics_data_api.ListAudienceExportsResponse, + Awaitable[analytics_data_api.ListAudienceExportsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'BetaAnalyticsDataTransport', +) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py new file mode 100644 index 000000000000..128d3360765d --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py @@ -0,0 +1,672 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.analytics.data_v1beta.types import analytics_data_api +from google.longrunning import operations_pb2 # type: ignore +from .base import BetaAnalyticsDataTransport, DEFAULT_CLIENT_INFO + + +class BetaAnalyticsDataGrpcTransport(BetaAnalyticsDataTransport): + """gRPC backend transport for BetaAnalyticsData. + + Google Analytics reporting data service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'analyticsdata.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'analyticsdata.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'analyticsdata.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def run_report(self) -> Callable[ + [analytics_data_api.RunReportRequest], + analytics_data_api.RunReportResponse]: + r"""Return a callable for the run report method over gRPC. + + Returns a customized report of your Google Analytics event data. + Reports contain statistics derived from data collected by the + Google Analytics tracking code. The data returned from the API + is as a table with columns for the requested dimensions and + metrics. Metrics are individual measurements of user activity on + your property, such as active users or event count. Dimensions + break down metrics across some common criteria, such as country + or event name. + + For a guide to constructing requests & understanding responses, + see `Creating a + Report `__. + + Returns: + Callable[[~.RunReportRequest], + ~.RunReportResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_report' not in self._stubs: + self._stubs['run_report'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/RunReport', + request_serializer=analytics_data_api.RunReportRequest.serialize, + response_deserializer=analytics_data_api.RunReportResponse.deserialize, + ) + return self._stubs['run_report'] + + @property + def run_pivot_report(self) -> Callable[ + [analytics_data_api.RunPivotReportRequest], + analytics_data_api.RunPivotReportResponse]: + r"""Return a callable for the run pivot report method over gRPC. + + Returns a customized pivot report of your Google + Analytics event data. Pivot reports are more advanced + and expressive formats than regular reports. In a pivot + report, dimensions are only visible if they are included + in a pivot. Multiple pivots can be specified to further + dissect your data. + + Returns: + Callable[[~.RunPivotReportRequest], + ~.RunPivotReportResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_pivot_report' not in self._stubs: + self._stubs['run_pivot_report'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/RunPivotReport', + request_serializer=analytics_data_api.RunPivotReportRequest.serialize, + response_deserializer=analytics_data_api.RunPivotReportResponse.deserialize, + ) + return self._stubs['run_pivot_report'] + + @property + def batch_run_reports(self) -> Callable[ + [analytics_data_api.BatchRunReportsRequest], + analytics_data_api.BatchRunReportsResponse]: + r"""Return a callable for the batch run reports method over gRPC. + + Returns multiple reports in a batch. All reports must + be for the same Google Analytics property. + + Returns: + Callable[[~.BatchRunReportsRequest], + ~.BatchRunReportsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_run_reports' not in self._stubs: + self._stubs['batch_run_reports'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/BatchRunReports', + request_serializer=analytics_data_api.BatchRunReportsRequest.serialize, + response_deserializer=analytics_data_api.BatchRunReportsResponse.deserialize, + ) + return self._stubs['batch_run_reports'] + + @property + def batch_run_pivot_reports(self) -> Callable[ + [analytics_data_api.BatchRunPivotReportsRequest], + analytics_data_api.BatchRunPivotReportsResponse]: + r"""Return a callable for the batch run pivot reports method over gRPC. + + Returns multiple pivot reports in a batch. All + reports must be for the same Google Analytics property. + + Returns: + Callable[[~.BatchRunPivotReportsRequest], + ~.BatchRunPivotReportsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_run_pivot_reports' not in self._stubs: + self._stubs['batch_run_pivot_reports'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/BatchRunPivotReports', + request_serializer=analytics_data_api.BatchRunPivotReportsRequest.serialize, + response_deserializer=analytics_data_api.BatchRunPivotReportsResponse.deserialize, + ) + return self._stubs['batch_run_pivot_reports'] + + @property + def get_metadata(self) -> Callable[ + [analytics_data_api.GetMetadataRequest], + analytics_data_api.Metadata]: + r"""Return a callable for the get metadata method over gRPC. + + Returns metadata for dimensions and metrics available in + reporting methods. Used to explore the dimensions and metrics. + In this method, a Google Analytics property identifier is + specified in the request, and the metadata response includes + Custom dimensions and metrics as well as Universal metadata. + + For example if a custom metric with parameter name + ``levels_unlocked`` is registered to a property, the Metadata + response will contain ``customEvent:levels_unlocked``. Universal + metadata are dimensions and metrics applicable to any property + such as ``country`` and ``totalUsers``. + + Returns: + Callable[[~.GetMetadataRequest], + ~.Metadata]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_metadata' not in self._stubs: + self._stubs['get_metadata'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/GetMetadata', + request_serializer=analytics_data_api.GetMetadataRequest.serialize, + response_deserializer=analytics_data_api.Metadata.deserialize, + ) + return self._stubs['get_metadata'] + + @property + def run_realtime_report(self) -> Callable[ + [analytics_data_api.RunRealtimeReportRequest], + analytics_data_api.RunRealtimeReportResponse]: + r"""Return a callable for the run realtime report method over gRPC. + + Returns a customized report of realtime event data for your + property. Events appear in realtime reports seconds after they + have been sent to the Google Analytics. Realtime reports show + events and usage data for the periods of time ranging from the + present moment to 30 minutes ago (up to 60 minutes for Google + Analytics 360 properties). + + For a guide to constructing realtime requests & understanding + responses, see `Creating a Realtime + Report `__. + + Returns: + Callable[[~.RunRealtimeReportRequest], + ~.RunRealtimeReportResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_realtime_report' not in self._stubs: + self._stubs['run_realtime_report'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/RunRealtimeReport', + request_serializer=analytics_data_api.RunRealtimeReportRequest.serialize, + response_deserializer=analytics_data_api.RunRealtimeReportResponse.deserialize, + ) + return self._stubs['run_realtime_report'] + + @property + def check_compatibility(self) -> Callable[ + [analytics_data_api.CheckCompatibilityRequest], + analytics_data_api.CheckCompatibilityResponse]: + r"""Return a callable for the check compatibility method over gRPC. + + This compatibility method lists dimensions and + metrics that can be added to a report request and + maintain compatibility. This method fails if the + request's dimensions and metrics are incompatible. + + In Google Analytics, reports fail if they request + incompatible dimensions and/or metrics; in that case, + you will need to remove dimensions and/or metrics from + the incompatible report until the report is compatible. + + The Realtime and Core reports have different + compatibility rules. This method checks compatibility + for Core reports. + + Returns: + Callable[[~.CheckCompatibilityRequest], + ~.CheckCompatibilityResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'check_compatibility' not in self._stubs: + self._stubs['check_compatibility'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/CheckCompatibility', + request_serializer=analytics_data_api.CheckCompatibilityRequest.serialize, + response_deserializer=analytics_data_api.CheckCompatibilityResponse.deserialize, + ) + return self._stubs['check_compatibility'] + + @property + def create_audience_export(self) -> Callable[ + [analytics_data_api.CreateAudienceExportRequest], + operations_pb2.Operation]: + r"""Return a callable for the create audience export method over gRPC. + + Creates an audience export for later retrieval. This method + quickly returns the audience export's resource name and + initiates a long running asynchronous request to form an + audience export. To export the users in an audience export, + first create the audience export through this method and then + send the audience resource name to the ``QueryAudienceExport`` + method. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + An audience export is a snapshot of the users currently in the + audience at the time of audience export creation. Creating + audience exports for one audience on different days will return + different results as users enter and exit the audience. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + Audience exports contain the users in each audience. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.CreateAudienceExportRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_audience_export' not in self._stubs: + self._stubs['create_audience_export'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/CreateAudienceExport', + request_serializer=analytics_data_api.CreateAudienceExportRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_audience_export'] + + @property + def query_audience_export(self) -> Callable[ + [analytics_data_api.QueryAudienceExportRequest], + analytics_data_api.QueryAudienceExportResponse]: + r"""Return a callable for the query audience export method over gRPC. + + Retrieves an audience export of users. After creating an + audience, the users are not immediately available for exporting. + First, a request to ``CreateAudienceExport`` is necessary to + create an audience export of users, and then second, this method + is used to retrieve the users in the audience export. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.QueryAudienceExportRequest], + ~.QueryAudienceExportResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'query_audience_export' not in self._stubs: + self._stubs['query_audience_export'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/QueryAudienceExport', + request_serializer=analytics_data_api.QueryAudienceExportRequest.serialize, + response_deserializer=analytics_data_api.QueryAudienceExportResponse.deserialize, + ) + return self._stubs['query_audience_export'] + + @property + def get_audience_export(self) -> Callable[ + [analytics_data_api.GetAudienceExportRequest], + analytics_data_api.AudienceExport]: + r"""Return a callable for the get audience export method over gRPC. + + Gets configuration metadata about a specific audience export. + This method can be used to understand an audience export after + it has been created. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.GetAudienceExportRequest], + ~.AudienceExport]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_audience_export' not in self._stubs: + self._stubs['get_audience_export'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/GetAudienceExport', + request_serializer=analytics_data_api.GetAudienceExportRequest.serialize, + response_deserializer=analytics_data_api.AudienceExport.deserialize, + ) + return self._stubs['get_audience_export'] + + @property + def list_audience_exports(self) -> Callable[ + [analytics_data_api.ListAudienceExportsRequest], + analytics_data_api.ListAudienceExportsResponse]: + r"""Return a callable for the list audience exports method over gRPC. + + Lists all audience exports for a property. This method can be + used for you to find and reuse existing audience exports rather + than creating unnecessary new audience exports. The same + audience can have multiple audience exports that represent the + export of users that were in an audience on different days. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.ListAudienceExportsRequest], + ~.ListAudienceExportsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_audience_exports' not in self._stubs: + self._stubs['list_audience_exports'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/ListAudienceExports', + request_serializer=analytics_data_api.ListAudienceExportsRequest.serialize, + response_deserializer=analytics_data_api.ListAudienceExportsResponse.deserialize, + ) + return self._stubs['list_audience_exports'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'BetaAnalyticsDataGrpcTransport', +) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc_asyncio.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc_asyncio.py new file mode 100644 index 000000000000..b133e3591418 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc_asyncio.py @@ -0,0 +1,743 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.analytics.data_v1beta.types import analytics_data_api +from google.longrunning import operations_pb2 # type: ignore +from .base import BetaAnalyticsDataTransport, DEFAULT_CLIENT_INFO +from .grpc import BetaAnalyticsDataGrpcTransport + + +class BetaAnalyticsDataGrpcAsyncIOTransport(BetaAnalyticsDataTransport): + """gRPC AsyncIO backend transport for BetaAnalyticsData. + + Google Analytics reporting data service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'analyticsdata.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'analyticsdata.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'analyticsdata.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def run_report(self) -> Callable[ + [analytics_data_api.RunReportRequest], + Awaitable[analytics_data_api.RunReportResponse]]: + r"""Return a callable for the run report method over gRPC. + + Returns a customized report of your Google Analytics event data. + Reports contain statistics derived from data collected by the + Google Analytics tracking code. The data returned from the API + is as a table with columns for the requested dimensions and + metrics. Metrics are individual measurements of user activity on + your property, such as active users or event count. Dimensions + break down metrics across some common criteria, such as country + or event name. + + For a guide to constructing requests & understanding responses, + see `Creating a + Report `__. + + Returns: + Callable[[~.RunReportRequest], + Awaitable[~.RunReportResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_report' not in self._stubs: + self._stubs['run_report'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/RunReport', + request_serializer=analytics_data_api.RunReportRequest.serialize, + response_deserializer=analytics_data_api.RunReportResponse.deserialize, + ) + return self._stubs['run_report'] + + @property + def run_pivot_report(self) -> Callable[ + [analytics_data_api.RunPivotReportRequest], + Awaitable[analytics_data_api.RunPivotReportResponse]]: + r"""Return a callable for the run pivot report method over gRPC. + + Returns a customized pivot report of your Google + Analytics event data. Pivot reports are more advanced + and expressive formats than regular reports. In a pivot + report, dimensions are only visible if they are included + in a pivot. Multiple pivots can be specified to further + dissect your data. + + Returns: + Callable[[~.RunPivotReportRequest], + Awaitable[~.RunPivotReportResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_pivot_report' not in self._stubs: + self._stubs['run_pivot_report'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/RunPivotReport', + request_serializer=analytics_data_api.RunPivotReportRequest.serialize, + response_deserializer=analytics_data_api.RunPivotReportResponse.deserialize, + ) + return self._stubs['run_pivot_report'] + + @property + def batch_run_reports(self) -> Callable[ + [analytics_data_api.BatchRunReportsRequest], + Awaitable[analytics_data_api.BatchRunReportsResponse]]: + r"""Return a callable for the batch run reports method over gRPC. + + Returns multiple reports in a batch. All reports must + be for the same Google Analytics property. + + Returns: + Callable[[~.BatchRunReportsRequest], + Awaitable[~.BatchRunReportsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_run_reports' not in self._stubs: + self._stubs['batch_run_reports'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/BatchRunReports', + request_serializer=analytics_data_api.BatchRunReportsRequest.serialize, + response_deserializer=analytics_data_api.BatchRunReportsResponse.deserialize, + ) + return self._stubs['batch_run_reports'] + + @property + def batch_run_pivot_reports(self) -> Callable[ + [analytics_data_api.BatchRunPivotReportsRequest], + Awaitable[analytics_data_api.BatchRunPivotReportsResponse]]: + r"""Return a callable for the batch run pivot reports method over gRPC. + + Returns multiple pivot reports in a batch. All + reports must be for the same Google Analytics property. + + Returns: + Callable[[~.BatchRunPivotReportsRequest], + Awaitable[~.BatchRunPivotReportsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_run_pivot_reports' not in self._stubs: + self._stubs['batch_run_pivot_reports'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/BatchRunPivotReports', + request_serializer=analytics_data_api.BatchRunPivotReportsRequest.serialize, + response_deserializer=analytics_data_api.BatchRunPivotReportsResponse.deserialize, + ) + return self._stubs['batch_run_pivot_reports'] + + @property + def get_metadata(self) -> Callable[ + [analytics_data_api.GetMetadataRequest], + Awaitable[analytics_data_api.Metadata]]: + r"""Return a callable for the get metadata method over gRPC. + + Returns metadata for dimensions and metrics available in + reporting methods. Used to explore the dimensions and metrics. + In this method, a Google Analytics property identifier is + specified in the request, and the metadata response includes + Custom dimensions and metrics as well as Universal metadata. + + For example if a custom metric with parameter name + ``levels_unlocked`` is registered to a property, the Metadata + response will contain ``customEvent:levels_unlocked``. Universal + metadata are dimensions and metrics applicable to any property + such as ``country`` and ``totalUsers``. + + Returns: + Callable[[~.GetMetadataRequest], + Awaitable[~.Metadata]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_metadata' not in self._stubs: + self._stubs['get_metadata'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/GetMetadata', + request_serializer=analytics_data_api.GetMetadataRequest.serialize, + response_deserializer=analytics_data_api.Metadata.deserialize, + ) + return self._stubs['get_metadata'] + + @property + def run_realtime_report(self) -> Callable[ + [analytics_data_api.RunRealtimeReportRequest], + Awaitable[analytics_data_api.RunRealtimeReportResponse]]: + r"""Return a callable for the run realtime report method over gRPC. + + Returns a customized report of realtime event data for your + property. Events appear in realtime reports seconds after they + have been sent to the Google Analytics. Realtime reports show + events and usage data for the periods of time ranging from the + present moment to 30 minutes ago (up to 60 minutes for Google + Analytics 360 properties). + + For a guide to constructing realtime requests & understanding + responses, see `Creating a Realtime + Report `__. + + Returns: + Callable[[~.RunRealtimeReportRequest], + Awaitable[~.RunRealtimeReportResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_realtime_report' not in self._stubs: + self._stubs['run_realtime_report'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/RunRealtimeReport', + request_serializer=analytics_data_api.RunRealtimeReportRequest.serialize, + response_deserializer=analytics_data_api.RunRealtimeReportResponse.deserialize, + ) + return self._stubs['run_realtime_report'] + + @property + def check_compatibility(self) -> Callable[ + [analytics_data_api.CheckCompatibilityRequest], + Awaitable[analytics_data_api.CheckCompatibilityResponse]]: + r"""Return a callable for the check compatibility method over gRPC. + + This compatibility method lists dimensions and + metrics that can be added to a report request and + maintain compatibility. This method fails if the + request's dimensions and metrics are incompatible. + + In Google Analytics, reports fail if they request + incompatible dimensions and/or metrics; in that case, + you will need to remove dimensions and/or metrics from + the incompatible report until the report is compatible. + + The Realtime and Core reports have different + compatibility rules. This method checks compatibility + for Core reports. + + Returns: + Callable[[~.CheckCompatibilityRequest], + Awaitable[~.CheckCompatibilityResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'check_compatibility' not in self._stubs: + self._stubs['check_compatibility'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/CheckCompatibility', + request_serializer=analytics_data_api.CheckCompatibilityRequest.serialize, + response_deserializer=analytics_data_api.CheckCompatibilityResponse.deserialize, + ) + return self._stubs['check_compatibility'] + + @property + def create_audience_export(self) -> Callable[ + [analytics_data_api.CreateAudienceExportRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create audience export method over gRPC. + + Creates an audience export for later retrieval. This method + quickly returns the audience export's resource name and + initiates a long running asynchronous request to form an + audience export. To export the users in an audience export, + first create the audience export through this method and then + send the audience resource name to the ``QueryAudienceExport`` + method. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + An audience export is a snapshot of the users currently in the + audience at the time of audience export creation. Creating + audience exports for one audience on different days will return + different results as users enter and exit the audience. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + Audience exports contain the users in each audience. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.CreateAudienceExportRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_audience_export' not in self._stubs: + self._stubs['create_audience_export'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/CreateAudienceExport', + request_serializer=analytics_data_api.CreateAudienceExportRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_audience_export'] + + @property + def query_audience_export(self) -> Callable[ + [analytics_data_api.QueryAudienceExportRequest], + Awaitable[analytics_data_api.QueryAudienceExportResponse]]: + r"""Return a callable for the query audience export method over gRPC. + + Retrieves an audience export of users. After creating an + audience, the users are not immediately available for exporting. + First, a request to ``CreateAudienceExport`` is necessary to + create an audience export of users, and then second, this method + is used to retrieve the users in the audience export. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.QueryAudienceExportRequest], + Awaitable[~.QueryAudienceExportResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'query_audience_export' not in self._stubs: + self._stubs['query_audience_export'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/QueryAudienceExport', + request_serializer=analytics_data_api.QueryAudienceExportRequest.serialize, + response_deserializer=analytics_data_api.QueryAudienceExportResponse.deserialize, + ) + return self._stubs['query_audience_export'] + + @property + def get_audience_export(self) -> Callable[ + [analytics_data_api.GetAudienceExportRequest], + Awaitable[analytics_data_api.AudienceExport]]: + r"""Return a callable for the get audience export method over gRPC. + + Gets configuration metadata about a specific audience export. + This method can be used to understand an audience export after + it has been created. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.GetAudienceExportRequest], + Awaitable[~.AudienceExport]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_audience_export' not in self._stubs: + self._stubs['get_audience_export'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/GetAudienceExport', + request_serializer=analytics_data_api.GetAudienceExportRequest.serialize, + response_deserializer=analytics_data_api.AudienceExport.deserialize, + ) + return self._stubs['get_audience_export'] + + @property + def list_audience_exports(self) -> Callable[ + [analytics_data_api.ListAudienceExportsRequest], + Awaitable[analytics_data_api.ListAudienceExportsResponse]]: + r"""Return a callable for the list audience exports method over gRPC. + + Lists all audience exports for a property. This method can be + used for you to find and reuse existing audience exports rather + than creating unnecessary new audience exports. The same + audience can have multiple audience exports that represent the + export of users that were in an audience on different days. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.ListAudienceExportsRequest], + Awaitable[~.ListAudienceExportsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_audience_exports' not in self._stubs: + self._stubs['list_audience_exports'] = self.grpc_channel.unary_unary( + '/google.analytics.data.v1beta.BetaAnalyticsData/ListAudienceExports', + request_serializer=analytics_data_api.ListAudienceExportsRequest.serialize, + response_deserializer=analytics_data_api.ListAudienceExportsResponse.deserialize, + ) + return self._stubs['list_audience_exports'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.run_report: self._wrap_method( + self.run_report, + default_timeout=60.0, + client_info=client_info, + ), + self.run_pivot_report: self._wrap_method( + self.run_pivot_report, + default_timeout=60.0, + client_info=client_info, + ), + self.batch_run_reports: self._wrap_method( + self.batch_run_reports, + default_timeout=60.0, + client_info=client_info, + ), + self.batch_run_pivot_reports: self._wrap_method( + self.batch_run_pivot_reports, + default_timeout=60.0, + client_info=client_info, + ), + self.get_metadata: self._wrap_method( + self.get_metadata, + default_timeout=60.0, + client_info=client_info, + ), + self.run_realtime_report: self._wrap_method( + self.run_realtime_report, + default_timeout=60.0, + client_info=client_info, + ), + self.check_compatibility: self._wrap_method( + self.check_compatibility, + default_timeout=60.0, + client_info=client_info, + ), + self.create_audience_export: self._wrap_method( + self.create_audience_export, + default_timeout=None, + client_info=client_info, + ), + self.query_audience_export: self._wrap_method( + self.query_audience_export, + default_timeout=None, + client_info=client_info, + ), + self.get_audience_export: self._wrap_method( + self.get_audience_export, + default_timeout=None, + client_info=client_info, + ), + self.list_audience_exports: self._wrap_method( + self.list_audience_exports, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + +__all__ = ( + 'BetaAnalyticsDataGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest.py new file mode 100644 index 000000000000..9922f685c648 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest.py @@ -0,0 +1,1404 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.analytics.data_v1beta.types import analytics_data_api +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseBetaAnalyticsDataRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class BetaAnalyticsDataRestInterceptor: + """Interceptor for BetaAnalyticsData. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the BetaAnalyticsDataRestTransport. + + .. code-block:: python + class MyCustomBetaAnalyticsDataInterceptor(BetaAnalyticsDataRestInterceptor): + def pre_batch_run_pivot_reports(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_run_pivot_reports(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_run_reports(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_run_reports(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_check_compatibility(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_check_compatibility(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_audience_export(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_audience_export(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_audience_export(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_audience_export(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_metadata(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_audience_exports(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_audience_exports(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_query_audience_export(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_query_audience_export(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_pivot_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_pivot_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_realtime_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_realtime_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_report(self, response): + logging.log(f"Received response: {response}") + return response + + transport = BetaAnalyticsDataRestTransport(interceptor=MyCustomBetaAnalyticsDataInterceptor()) + client = BetaAnalyticsDataClient(transport=transport) + + + """ + def pre_batch_run_pivot_reports(self, request: analytics_data_api.BatchRunPivotReportsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.BatchRunPivotReportsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_run_pivot_reports + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_batch_run_pivot_reports(self, response: analytics_data_api.BatchRunPivotReportsResponse) -> analytics_data_api.BatchRunPivotReportsResponse: + """Post-rpc interceptor for batch_run_pivot_reports + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_batch_run_reports(self, request: analytics_data_api.BatchRunReportsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.BatchRunReportsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_run_reports + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_batch_run_reports(self, response: analytics_data_api.BatchRunReportsResponse) -> analytics_data_api.BatchRunReportsResponse: + """Post-rpc interceptor for batch_run_reports + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_check_compatibility(self, request: analytics_data_api.CheckCompatibilityRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.CheckCompatibilityRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for check_compatibility + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_check_compatibility(self, response: analytics_data_api.CheckCompatibilityResponse) -> analytics_data_api.CheckCompatibilityResponse: + """Post-rpc interceptor for check_compatibility + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_create_audience_export(self, request: analytics_data_api.CreateAudienceExportRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.CreateAudienceExportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_audience_export + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_create_audience_export(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_audience_export + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_get_audience_export(self, request: analytics_data_api.GetAudienceExportRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.GetAudienceExportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_audience_export + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_get_audience_export(self, response: analytics_data_api.AudienceExport) -> analytics_data_api.AudienceExport: + """Post-rpc interceptor for get_audience_export + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_get_metadata(self, request: analytics_data_api.GetMetadataRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.GetMetadataRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_get_metadata(self, response: analytics_data_api.Metadata) -> analytics_data_api.Metadata: + """Post-rpc interceptor for get_metadata + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_list_audience_exports(self, request: analytics_data_api.ListAudienceExportsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.ListAudienceExportsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_audience_exports + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_list_audience_exports(self, response: analytics_data_api.ListAudienceExportsResponse) -> analytics_data_api.ListAudienceExportsResponse: + """Post-rpc interceptor for list_audience_exports + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_query_audience_export(self, request: analytics_data_api.QueryAudienceExportRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.QueryAudienceExportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for query_audience_export + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_query_audience_export(self, response: analytics_data_api.QueryAudienceExportResponse) -> analytics_data_api.QueryAudienceExportResponse: + """Post-rpc interceptor for query_audience_export + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_run_pivot_report(self, request: analytics_data_api.RunPivotReportRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.RunPivotReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_pivot_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_run_pivot_report(self, response: analytics_data_api.RunPivotReportResponse) -> analytics_data_api.RunPivotReportResponse: + """Post-rpc interceptor for run_pivot_report + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_run_realtime_report(self, request: analytics_data_api.RunRealtimeReportRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.RunRealtimeReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_realtime_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_run_realtime_report(self, response: analytics_data_api.RunRealtimeReportResponse) -> analytics_data_api.RunRealtimeReportResponse: + """Post-rpc interceptor for run_realtime_report + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_run_report(self, request: analytics_data_api.RunReportRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[analytics_data_api.RunReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_run_report(self, response: analytics_data_api.RunReportResponse) -> analytics_data_api.RunReportResponse: + """Post-rpc interceptor for run_report + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BetaAnalyticsDataRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BetaAnalyticsDataRestInterceptor + + +class BetaAnalyticsDataRestTransport(_BaseBetaAnalyticsDataRestTransport): + """REST backend synchronous transport for BetaAnalyticsData. + + Google Analytics reporting data service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'analyticsdata.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[BetaAnalyticsDataRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'analyticsdata.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BetaAnalyticsDataRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _BatchRunPivotReports(_BaseBetaAnalyticsDataRestTransport._BaseBatchRunPivotReports, BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("BetaAnalyticsDataRestTransport.BatchRunPivotReports") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.BatchRunPivotReportsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.BatchRunPivotReportsResponse: + r"""Call the batch run pivot reports method over HTTP. + + Args: + request (~.analytics_data_api.BatchRunPivotReportsRequest): + The request object. The batch request containing multiple + pivot report requests. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.BatchRunPivotReportsResponse: + The batch response containing + multiple pivot reports. + + """ + + http_options = _BaseBetaAnalyticsDataRestTransport._BaseBatchRunPivotReports._get_http_options() + request, metadata = self._interceptor.pre_batch_run_pivot_reports(request, metadata) + transcoded_request = _BaseBetaAnalyticsDataRestTransport._BaseBatchRunPivotReports._get_transcoded_request(http_options, request) + + body = _BaseBetaAnalyticsDataRestTransport._BaseBatchRunPivotReports._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBetaAnalyticsDataRestTransport._BaseBatchRunPivotReports._get_query_params_json(transcoded_request) + + # Send the request + response = BetaAnalyticsDataRestTransport._BatchRunPivotReports._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.BatchRunPivotReportsResponse() + pb_resp = analytics_data_api.BatchRunPivotReportsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_run_pivot_reports(resp) + return resp + + class _BatchRunReports(_BaseBetaAnalyticsDataRestTransport._BaseBatchRunReports, BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("BetaAnalyticsDataRestTransport.BatchRunReports") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.BatchRunReportsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.BatchRunReportsResponse: + r"""Call the batch run reports method over HTTP. + + Args: + request (~.analytics_data_api.BatchRunReportsRequest): + The request object. The batch request containing multiple + report requests. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.BatchRunReportsResponse: + The batch response containing + multiple reports. + + """ + + http_options = _BaseBetaAnalyticsDataRestTransport._BaseBatchRunReports._get_http_options() + request, metadata = self._interceptor.pre_batch_run_reports(request, metadata) + transcoded_request = _BaseBetaAnalyticsDataRestTransport._BaseBatchRunReports._get_transcoded_request(http_options, request) + + body = _BaseBetaAnalyticsDataRestTransport._BaseBatchRunReports._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBetaAnalyticsDataRestTransport._BaseBatchRunReports._get_query_params_json(transcoded_request) + + # Send the request + response = BetaAnalyticsDataRestTransport._BatchRunReports._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.BatchRunReportsResponse() + pb_resp = analytics_data_api.BatchRunReportsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_run_reports(resp) + return resp + + class _CheckCompatibility(_BaseBetaAnalyticsDataRestTransport._BaseCheckCompatibility, BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("BetaAnalyticsDataRestTransport.CheckCompatibility") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.CheckCompatibilityRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.CheckCompatibilityResponse: + r"""Call the check compatibility method over HTTP. + + Args: + request (~.analytics_data_api.CheckCompatibilityRequest): + The request object. The request for compatibility information for a report's + dimensions and metrics. Check compatibility provides a + preview of the compatibility of a report; fields shared + with the ``runReport`` request should be the same values + as in your ``runReport`` request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.CheckCompatibilityResponse: + The compatibility response with the + compatibility of each dimension & + metric. + + """ + + http_options = _BaseBetaAnalyticsDataRestTransport._BaseCheckCompatibility._get_http_options() + request, metadata = self._interceptor.pre_check_compatibility(request, metadata) + transcoded_request = _BaseBetaAnalyticsDataRestTransport._BaseCheckCompatibility._get_transcoded_request(http_options, request) + + body = _BaseBetaAnalyticsDataRestTransport._BaseCheckCompatibility._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBetaAnalyticsDataRestTransport._BaseCheckCompatibility._get_query_params_json(transcoded_request) + + # Send the request + response = BetaAnalyticsDataRestTransport._CheckCompatibility._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.CheckCompatibilityResponse() + pb_resp = analytics_data_api.CheckCompatibilityResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_check_compatibility(resp) + return resp + + class _CreateAudienceExport(_BaseBetaAnalyticsDataRestTransport._BaseCreateAudienceExport, BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("BetaAnalyticsDataRestTransport.CreateAudienceExport") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.CreateAudienceExportRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create audience export method over HTTP. + + Args: + request (~.analytics_data_api.CreateAudienceExportRequest): + The request object. A request to create a new audience + export. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseBetaAnalyticsDataRestTransport._BaseCreateAudienceExport._get_http_options() + request, metadata = self._interceptor.pre_create_audience_export(request, metadata) + transcoded_request = _BaseBetaAnalyticsDataRestTransport._BaseCreateAudienceExport._get_transcoded_request(http_options, request) + + body = _BaseBetaAnalyticsDataRestTransport._BaseCreateAudienceExport._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBetaAnalyticsDataRestTransport._BaseCreateAudienceExport._get_query_params_json(transcoded_request) + + # Send the request + response = BetaAnalyticsDataRestTransport._CreateAudienceExport._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_audience_export(resp) + return resp + + class _GetAudienceExport(_BaseBetaAnalyticsDataRestTransport._BaseGetAudienceExport, BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("BetaAnalyticsDataRestTransport.GetAudienceExport") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: analytics_data_api.GetAudienceExportRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.AudienceExport: + r"""Call the get audience export method over HTTP. + + Args: + request (~.analytics_data_api.GetAudienceExportRequest): + The request object. A request to retrieve configuration + metadata about a specific audience + export. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.AudienceExport: + An audience export is a list of users + in an audience at the time of the list's + creation. One audience may have multiple + audience exports created for different + days. + + """ + + http_options = _BaseBetaAnalyticsDataRestTransport._BaseGetAudienceExport._get_http_options() + request, metadata = self._interceptor.pre_get_audience_export(request, metadata) + transcoded_request = _BaseBetaAnalyticsDataRestTransport._BaseGetAudienceExport._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBetaAnalyticsDataRestTransport._BaseGetAudienceExport._get_query_params_json(transcoded_request) + + # Send the request + response = BetaAnalyticsDataRestTransport._GetAudienceExport._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.AudienceExport() + pb_resp = analytics_data_api.AudienceExport.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_audience_export(resp) + return resp + + class _GetMetadata(_BaseBetaAnalyticsDataRestTransport._BaseGetMetadata, BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("BetaAnalyticsDataRestTransport.GetMetadata") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: analytics_data_api.GetMetadataRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.Metadata: + r"""Call the get metadata method over HTTP. + + Args: + request (~.analytics_data_api.GetMetadataRequest): + The request object. Request for a property's dimension + and metric metadata. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.Metadata: + The dimensions, metrics and + comparisons currently accepted in + reporting methods. + + """ + + http_options = _BaseBetaAnalyticsDataRestTransport._BaseGetMetadata._get_http_options() + request, metadata = self._interceptor.pre_get_metadata(request, metadata) + transcoded_request = _BaseBetaAnalyticsDataRestTransport._BaseGetMetadata._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBetaAnalyticsDataRestTransport._BaseGetMetadata._get_query_params_json(transcoded_request) + + # Send the request + response = BetaAnalyticsDataRestTransport._GetMetadata._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.Metadata() + pb_resp = analytics_data_api.Metadata.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_metadata(resp) + return resp + + class _ListAudienceExports(_BaseBetaAnalyticsDataRestTransport._BaseListAudienceExports, BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("BetaAnalyticsDataRestTransport.ListAudienceExports") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: analytics_data_api.ListAudienceExportsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.ListAudienceExportsResponse: + r"""Call the list audience exports method over HTTP. + + Args: + request (~.analytics_data_api.ListAudienceExportsRequest): + The request object. A request to list all audience + exports for a property. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.ListAudienceExportsResponse: + A list of all audience exports for a + property. + + """ + + http_options = _BaseBetaAnalyticsDataRestTransport._BaseListAudienceExports._get_http_options() + request, metadata = self._interceptor.pre_list_audience_exports(request, metadata) + transcoded_request = _BaseBetaAnalyticsDataRestTransport._BaseListAudienceExports._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBetaAnalyticsDataRestTransport._BaseListAudienceExports._get_query_params_json(transcoded_request) + + # Send the request + response = BetaAnalyticsDataRestTransport._ListAudienceExports._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.ListAudienceExportsResponse() + pb_resp = analytics_data_api.ListAudienceExportsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_audience_exports(resp) + return resp + + class _QueryAudienceExport(_BaseBetaAnalyticsDataRestTransport._BaseQueryAudienceExport, BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("BetaAnalyticsDataRestTransport.QueryAudienceExport") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.QueryAudienceExportRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.QueryAudienceExportResponse: + r"""Call the query audience export method over HTTP. + + Args: + request (~.analytics_data_api.QueryAudienceExportRequest): + The request object. A request to list users in an + audience export. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.QueryAudienceExportResponse: + A list of users in an audience + export. + + """ + + http_options = _BaseBetaAnalyticsDataRestTransport._BaseQueryAudienceExport._get_http_options() + request, metadata = self._interceptor.pre_query_audience_export(request, metadata) + transcoded_request = _BaseBetaAnalyticsDataRestTransport._BaseQueryAudienceExport._get_transcoded_request(http_options, request) + + body = _BaseBetaAnalyticsDataRestTransport._BaseQueryAudienceExport._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBetaAnalyticsDataRestTransport._BaseQueryAudienceExport._get_query_params_json(transcoded_request) + + # Send the request + response = BetaAnalyticsDataRestTransport._QueryAudienceExport._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.QueryAudienceExportResponse() + pb_resp = analytics_data_api.QueryAudienceExportResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_query_audience_export(resp) + return resp + + class _RunPivotReport(_BaseBetaAnalyticsDataRestTransport._BaseRunPivotReport, BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("BetaAnalyticsDataRestTransport.RunPivotReport") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.RunPivotReportRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.RunPivotReportResponse: + r"""Call the run pivot report method over HTTP. + + Args: + request (~.analytics_data_api.RunPivotReportRequest): + The request object. The request to generate a pivot + report. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.RunPivotReportResponse: + The response pivot report table + corresponding to a pivot request. + + """ + + http_options = _BaseBetaAnalyticsDataRestTransport._BaseRunPivotReport._get_http_options() + request, metadata = self._interceptor.pre_run_pivot_report(request, metadata) + transcoded_request = _BaseBetaAnalyticsDataRestTransport._BaseRunPivotReport._get_transcoded_request(http_options, request) + + body = _BaseBetaAnalyticsDataRestTransport._BaseRunPivotReport._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBetaAnalyticsDataRestTransport._BaseRunPivotReport._get_query_params_json(transcoded_request) + + # Send the request + response = BetaAnalyticsDataRestTransport._RunPivotReport._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.RunPivotReportResponse() + pb_resp = analytics_data_api.RunPivotReportResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_pivot_report(resp) + return resp + + class _RunRealtimeReport(_BaseBetaAnalyticsDataRestTransport._BaseRunRealtimeReport, BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("BetaAnalyticsDataRestTransport.RunRealtimeReport") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.RunRealtimeReportRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.RunRealtimeReportResponse: + r"""Call the run realtime report method over HTTP. + + Args: + request (~.analytics_data_api.RunRealtimeReportRequest): + The request object. The request to generate a realtime + report. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.RunRealtimeReportResponse: + The response realtime report table + corresponding to a request. + + """ + + http_options = _BaseBetaAnalyticsDataRestTransport._BaseRunRealtimeReport._get_http_options() + request, metadata = self._interceptor.pre_run_realtime_report(request, metadata) + transcoded_request = _BaseBetaAnalyticsDataRestTransport._BaseRunRealtimeReport._get_transcoded_request(http_options, request) + + body = _BaseBetaAnalyticsDataRestTransport._BaseRunRealtimeReport._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBetaAnalyticsDataRestTransport._BaseRunRealtimeReport._get_query_params_json(transcoded_request) + + # Send the request + response = BetaAnalyticsDataRestTransport._RunRealtimeReport._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.RunRealtimeReportResponse() + pb_resp = analytics_data_api.RunRealtimeReportResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_realtime_report(resp) + return resp + + class _RunReport(_BaseBetaAnalyticsDataRestTransport._BaseRunReport, BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("BetaAnalyticsDataRestTransport.RunReport") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: analytics_data_api.RunReportRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> analytics_data_api.RunReportResponse: + r"""Call the run report method over HTTP. + + Args: + request (~.analytics_data_api.RunReportRequest): + The request object. The request to generate a report. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.RunReportResponse: + The response report table + corresponding to a request. + + """ + + http_options = _BaseBetaAnalyticsDataRestTransport._BaseRunReport._get_http_options() + request, metadata = self._interceptor.pre_run_report(request, metadata) + transcoded_request = _BaseBetaAnalyticsDataRestTransport._BaseRunReport._get_transcoded_request(http_options, request) + + body = _BaseBetaAnalyticsDataRestTransport._BaseRunReport._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBetaAnalyticsDataRestTransport._BaseRunReport._get_query_params_json(transcoded_request) + + # Send the request + response = BetaAnalyticsDataRestTransport._RunReport._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.RunReportResponse() + pb_resp = analytics_data_api.RunReportResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_report(resp) + return resp + + @property + def batch_run_pivot_reports(self) -> Callable[ + [analytics_data_api.BatchRunPivotReportsRequest], + analytics_data_api.BatchRunPivotReportsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchRunPivotReports(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_run_reports(self) -> Callable[ + [analytics_data_api.BatchRunReportsRequest], + analytics_data_api.BatchRunReportsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchRunReports(self._session, self._host, self._interceptor) # type: ignore + + @property + def check_compatibility(self) -> Callable[ + [analytics_data_api.CheckCompatibilityRequest], + analytics_data_api.CheckCompatibilityResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CheckCompatibility(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_audience_export(self) -> Callable[ + [analytics_data_api.CreateAudienceExportRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAudienceExport(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_audience_export(self) -> Callable[ + [analytics_data_api.GetAudienceExportRequest], + analytics_data_api.AudienceExport]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAudienceExport(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_metadata(self) -> Callable[ + [analytics_data_api.GetMetadataRequest], + analytics_data_api.Metadata]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetMetadata(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_audience_exports(self) -> Callable[ + [analytics_data_api.ListAudienceExportsRequest], + analytics_data_api.ListAudienceExportsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAudienceExports(self._session, self._host, self._interceptor) # type: ignore + + @property + def query_audience_export(self) -> Callable[ + [analytics_data_api.QueryAudienceExportRequest], + analytics_data_api.QueryAudienceExportResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._QueryAudienceExport(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_pivot_report(self) -> Callable[ + [analytics_data_api.RunPivotReportRequest], + analytics_data_api.RunPivotReportResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunPivotReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_realtime_report(self) -> Callable[ + [analytics_data_api.RunRealtimeReportRequest], + analytics_data_api.RunRealtimeReportResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunRealtimeReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_report(self) -> Callable[ + [analytics_data_api.RunReportRequest], + analytics_data_api.RunReportResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'BetaAnalyticsDataRestTransport', +) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest_base.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest_base.py new file mode 100644 index 000000000000..d0440982765a --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest_base.py @@ -0,0 +1,531 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import BetaAnalyticsDataTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.analytics.data_v1beta.types import analytics_data_api +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseBetaAnalyticsDataRestTransport(BetaAnalyticsDataTransport): + """Base REST backend transport for BetaAnalyticsData. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'analyticsdata.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'analyticsdata.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseBatchRunPivotReports: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta/{property=properties/*}:batchRunPivotReports', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.BatchRunPivotReportsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBatchRunReports: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta/{property=properties/*}:batchRunReports', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.BatchRunReportsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCheckCompatibility: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta/{property=properties/*}:checkCompatibility', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.CheckCompatibilityRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateAudienceExport: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta/{parent=properties/*}/audienceExports', + 'body': 'audience_export', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.CreateAudienceExportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBetaAnalyticsDataRestTransport._BaseCreateAudienceExport._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetAudienceExport: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1beta/{name=properties/*/audienceExports/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.GetAudienceExportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBetaAnalyticsDataRestTransport._BaseGetAudienceExport._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetMetadata: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1beta/{name=properties/*/metadata}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.GetMetadataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBetaAnalyticsDataRestTransport._BaseGetMetadata._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListAudienceExports: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1beta/{parent=properties/*}/audienceExports', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.ListAudienceExportsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBetaAnalyticsDataRestTransport._BaseListAudienceExports._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseQueryAudienceExport: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta/{name=properties/*/audienceExports/*}:query', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.QueryAudienceExportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBetaAnalyticsDataRestTransport._BaseQueryAudienceExport._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRunPivotReport: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta/{property=properties/*}:runPivotReport', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.RunPivotReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRunRealtimeReport: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta/{property=properties/*}:runRealtimeReport', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.RunRealtimeReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRunReport: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta/{property=properties/*}:runReport', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.RunReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + +__all__=( + '_BaseBetaAnalyticsDataRestTransport', +) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/types/__init__.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/types/__init__.py new file mode 100644 index 000000000000..0925746cfbdb --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/types/__init__.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .analytics_data_api import ( + AudienceDimension, + AudienceDimensionValue, + AudienceExport, + AudienceExportMetadata, + AudienceRow, + BatchRunPivotReportsRequest, + BatchRunPivotReportsResponse, + BatchRunReportsRequest, + BatchRunReportsResponse, + CheckCompatibilityRequest, + CheckCompatibilityResponse, + CreateAudienceExportRequest, + GetAudienceExportRequest, + GetMetadataRequest, + ListAudienceExportsRequest, + ListAudienceExportsResponse, + Metadata, + QueryAudienceExportRequest, + QueryAudienceExportResponse, + RunPivotReportRequest, + RunPivotReportResponse, + RunRealtimeReportRequest, + RunRealtimeReportResponse, + RunReportRequest, + RunReportResponse, +) +from .data import ( + Cohort, + CohortReportSettings, + CohortSpec, + CohortsRange, + Comparison, + ComparisonMetadata, + DateRange, + Dimension, + DimensionCompatibility, + DimensionExpression, + DimensionHeader, + DimensionMetadata, + DimensionValue, + Filter, + FilterExpression, + FilterExpressionList, + Metric, + MetricCompatibility, + MetricHeader, + MetricMetadata, + MetricValue, + MinuteRange, + NumericValue, + OrderBy, + Pivot, + PivotDimensionHeader, + PivotHeader, + PropertyQuota, + QuotaStatus, + ResponseMetaData, + Row, + SamplingMetadata, + Compatibility, + MetricAggregation, + MetricType, + RestrictedMetricType, +) + +__all__ = ( + 'AudienceDimension', + 'AudienceDimensionValue', + 'AudienceExport', + 'AudienceExportMetadata', + 'AudienceRow', + 'BatchRunPivotReportsRequest', + 'BatchRunPivotReportsResponse', + 'BatchRunReportsRequest', + 'BatchRunReportsResponse', + 'CheckCompatibilityRequest', + 'CheckCompatibilityResponse', + 'CreateAudienceExportRequest', + 'GetAudienceExportRequest', + 'GetMetadataRequest', + 'ListAudienceExportsRequest', + 'ListAudienceExportsResponse', + 'Metadata', + 'QueryAudienceExportRequest', + 'QueryAudienceExportResponse', + 'RunPivotReportRequest', + 'RunPivotReportResponse', + 'RunRealtimeReportRequest', + 'RunRealtimeReportResponse', + 'RunReportRequest', + 'RunReportResponse', + 'Cohort', + 'CohortReportSettings', + 'CohortSpec', + 'CohortsRange', + 'Comparison', + 'ComparisonMetadata', + 'DateRange', + 'Dimension', + 'DimensionCompatibility', + 'DimensionExpression', + 'DimensionHeader', + 'DimensionMetadata', + 'DimensionValue', + 'Filter', + 'FilterExpression', + 'FilterExpressionList', + 'Metric', + 'MetricCompatibility', + 'MetricHeader', + 'MetricMetadata', + 'MetricValue', + 'MinuteRange', + 'NumericValue', + 'OrderBy', + 'Pivot', + 'PivotDimensionHeader', + 'PivotHeader', + 'PropertyQuota', + 'QuotaStatus', + 'ResponseMetaData', + 'Row', + 'SamplingMetadata', + 'Compatibility', + 'MetricAggregation', + 'MetricType', + 'RestrictedMetricType', +) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/types/analytics_data_api.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/types/analytics_data_api.py new file mode 100644 index 000000000000..787b4cc58d72 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/types/analytics_data_api.py @@ -0,0 +1,1439 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.analytics.data_v1beta.types import data +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.analytics.data.v1beta', + manifest={ + 'CheckCompatibilityRequest', + 'CheckCompatibilityResponse', + 'Metadata', + 'RunReportRequest', + 'RunReportResponse', + 'RunPivotReportRequest', + 'RunPivotReportResponse', + 'BatchRunReportsRequest', + 'BatchRunReportsResponse', + 'BatchRunPivotReportsRequest', + 'BatchRunPivotReportsResponse', + 'GetMetadataRequest', + 'RunRealtimeReportRequest', + 'RunRealtimeReportResponse', + 'GetAudienceExportRequest', + 'ListAudienceExportsRequest', + 'ListAudienceExportsResponse', + 'CreateAudienceExportRequest', + 'AudienceExport', + 'AudienceExportMetadata', + 'QueryAudienceExportRequest', + 'QueryAudienceExportResponse', + 'AudienceRow', + 'AudienceDimension', + 'AudienceDimensionValue', + }, +) + + +class CheckCompatibilityRequest(proto.Message): + r"""The request for compatibility information for a report's dimensions + and metrics. Check compatibility provides a preview of the + compatibility of a report; fields shared with the ``runReport`` + request should be the same values as in your ``runReport`` request. + + Attributes: + property (str): + A Google Analytics property identifier whose events are + tracked. To learn more, see `where to find your Property + ID `__. + ``property`` should be the same value as in your + ``runReport`` request. + + Example: properties/1234 + dimensions (MutableSequence[google.analytics.data_v1beta.types.Dimension]): + The dimensions in this report. ``dimensions`` should be the + same value as in your ``runReport`` request. + metrics (MutableSequence[google.analytics.data_v1beta.types.Metric]): + The metrics in this report. ``metrics`` should be the same + value as in your ``runReport`` request. + dimension_filter (google.analytics.data_v1beta.types.FilterExpression): + The filter clause of dimensions. ``dimensionFilter`` should + be the same value as in your ``runReport`` request. + metric_filter (google.analytics.data_v1beta.types.FilterExpression): + The filter clause of metrics. ``metricFilter`` should be the + same value as in your ``runReport`` request + compatibility_filter (google.analytics.data_v1beta.types.Compatibility): + Filters the dimensions and metrics in the response to just + this compatibility. Commonly used as + ``”compatibilityFilter”: “COMPATIBLE”`` to only return + compatible dimensions & metrics. + """ + + property: str = proto.Field( + proto.STRING, + number=1, + ) + dimensions: MutableSequence[data.Dimension] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Dimension, + ) + metrics: MutableSequence[data.Metric] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Metric, + ) + dimension_filter: data.FilterExpression = proto.Field( + proto.MESSAGE, + number=4, + message=data.FilterExpression, + ) + metric_filter: data.FilterExpression = proto.Field( + proto.MESSAGE, + number=5, + message=data.FilterExpression, + ) + compatibility_filter: data.Compatibility = proto.Field( + proto.ENUM, + number=6, + enum=data.Compatibility, + ) + + +class CheckCompatibilityResponse(proto.Message): + r"""The compatibility response with the compatibility of each + dimension & metric. + + Attributes: + dimension_compatibilities (MutableSequence[google.analytics.data_v1beta.types.DimensionCompatibility]): + The compatibility of each dimension. + metric_compatibilities (MutableSequence[google.analytics.data_v1beta.types.MetricCompatibility]): + The compatibility of each metric. + """ + + dimension_compatibilities: MutableSequence[data.DimensionCompatibility] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=data.DimensionCompatibility, + ) + metric_compatibilities: MutableSequence[data.MetricCompatibility] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.MetricCompatibility, + ) + + +class Metadata(proto.Message): + r"""The dimensions, metrics and comparisons currently accepted in + reporting methods. + + Attributes: + name (str): + Resource name of this metadata. + dimensions (MutableSequence[google.analytics.data_v1beta.types.DimensionMetadata]): + The dimension descriptions. + metrics (MutableSequence[google.analytics.data_v1beta.types.MetricMetadata]): + The metric descriptions. + comparisons (MutableSequence[google.analytics.data_v1beta.types.ComparisonMetadata]): + The comparison descriptions. + """ + + name: str = proto.Field( + proto.STRING, + number=3, + ) + dimensions: MutableSequence[data.DimensionMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=data.DimensionMetadata, + ) + metrics: MutableSequence[data.MetricMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.MetricMetadata, + ) + comparisons: MutableSequence[data.ComparisonMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.ComparisonMetadata, + ) + + +class RunReportRequest(proto.Message): + r"""The request to generate a report. + + Attributes: + property (str): + A Google Analytics property identifier whose events are + tracked. Specified in the URL path and not the body. To + learn more, see `where to find your Property + ID `__. + Within a batch request, this property should either be + unspecified or consistent with the batch-level property. + + Example: properties/1234 + dimensions (MutableSequence[google.analytics.data_v1beta.types.Dimension]): + The dimensions requested and displayed. + metrics (MutableSequence[google.analytics.data_v1beta.types.Metric]): + The metrics requested and displayed. + date_ranges (MutableSequence[google.analytics.data_v1beta.types.DateRange]): + Date ranges of data to read. If multiple date ranges are + requested, each response row will contain a zero based date + range index. If two date ranges overlap, the event data for + the overlapping days is included in the response rows for + both date ranges. In a cohort request, this ``dateRanges`` + must be unspecified. + dimension_filter (google.analytics.data_v1beta.types.FilterExpression): + Dimension filters let you ask for only specific dimension + values in the report. To learn more, see `Fundamentals of + Dimension + Filters `__ + for examples. Metrics cannot be used in this filter. + metric_filter (google.analytics.data_v1beta.types.FilterExpression): + The filter clause of metrics. Applied after + aggregating the report's rows, similar to SQL + having-clause. Dimensions cannot be used in this + filter. + offset (int): + The row count of the start row. The first row is counted as + row 0. + + When paging, the first request does not specify offset; or + equivalently, sets offset to 0; the first request returns + the first ``limit`` of rows. The second request sets offset + to the ``limit`` of the first request; the second request + returns the second ``limit`` of rows. + + To learn more about this pagination parameter, see + `Pagination `__. + limit (int): + The number of rows to return. If unspecified, 10,000 rows + are returned. The API returns a maximum of 250,000 rows per + request, no matter how many you ask for. ``limit`` must be + positive. + + The API can also return fewer rows than the requested + ``limit``, if there aren't as many dimension values as the + ``limit``. For instance, there are fewer than 300 possible + values for the dimension ``country``, so when reporting on + only ``country``, you can't get more than 300 rows, even if + you set ``limit`` to a higher value. + + To learn more about this pagination parameter, see + `Pagination `__. + metric_aggregations (MutableSequence[google.analytics.data_v1beta.types.MetricAggregation]): + Aggregation of metrics. Aggregated metric values will be + shown in rows where the dimension_values are set to + "RESERVED_(MetricAggregation)". Aggregates including both + comparisons and multiple date ranges will be aggregated + based on the date ranges. + order_bys (MutableSequence[google.analytics.data_v1beta.types.OrderBy]): + Specifies how rows are ordered in the + response. Requests including both comparisons + and multiple date ranges will have order bys + applied on the comparisons. + currency_code (str): + A currency code in ISO4217 format, such as + "AED", "USD", "JPY". If the field is empty, the + report uses the property's default currency. + cohort_spec (google.analytics.data_v1beta.types.CohortSpec): + Cohort group associated with this request. If + there is a cohort group in the request the + 'cohort' dimension must be present. + keep_empty_rows (bool): + If false or unspecified, each row with all metrics equal to + 0 will not be returned. If true, these rows will be returned + if they are not separately removed by a filter. + + Regardless of this ``keep_empty_rows`` setting, only data + recorded by the Google Analytics property can be displayed + in a report. + + For example if a property never logs a ``purchase`` event, + then a query for the ``eventName`` dimension and + ``eventCount`` metric will not have a row eventName: + "purchase" and eventCount: 0. + return_property_quota (bool): + Toggles whether to return the current state of this Google + Analytics property's quota. Quota is returned in + `PropertyQuota <#PropertyQuota>`__. + comparisons (MutableSequence[google.analytics.data_v1beta.types.Comparison]): + Optional. The configuration of comparisons + requested and displayed. The request only + requires a comparisons field in order to receive + a comparison column in the response. + """ + + property: str = proto.Field( + proto.STRING, + number=1, + ) + dimensions: MutableSequence[data.Dimension] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Dimension, + ) + metrics: MutableSequence[data.Metric] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Metric, + ) + date_ranges: MutableSequence[data.DateRange] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.DateRange, + ) + dimension_filter: data.FilterExpression = proto.Field( + proto.MESSAGE, + number=5, + message=data.FilterExpression, + ) + metric_filter: data.FilterExpression = proto.Field( + proto.MESSAGE, + number=6, + message=data.FilterExpression, + ) + offset: int = proto.Field( + proto.INT64, + number=7, + ) + limit: int = proto.Field( + proto.INT64, + number=8, + ) + metric_aggregations: MutableSequence[data.MetricAggregation] = proto.RepeatedField( + proto.ENUM, + number=9, + enum=data.MetricAggregation, + ) + order_bys: MutableSequence[data.OrderBy] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=data.OrderBy, + ) + currency_code: str = proto.Field( + proto.STRING, + number=11, + ) + cohort_spec: data.CohortSpec = proto.Field( + proto.MESSAGE, + number=12, + message=data.CohortSpec, + ) + keep_empty_rows: bool = proto.Field( + proto.BOOL, + number=13, + ) + return_property_quota: bool = proto.Field( + proto.BOOL, + number=14, + ) + comparisons: MutableSequence[data.Comparison] = proto.RepeatedField( + proto.MESSAGE, + number=15, + message=data.Comparison, + ) + + +class RunReportResponse(proto.Message): + r"""The response report table corresponding to a request. + + Attributes: + dimension_headers (MutableSequence[google.analytics.data_v1beta.types.DimensionHeader]): + Describes dimension columns. The number of + DimensionHeaders and ordering of + DimensionHeaders matches the dimensions present + in rows. + metric_headers (MutableSequence[google.analytics.data_v1beta.types.MetricHeader]): + Describes metric columns. The number of + MetricHeaders and ordering of MetricHeaders + matches the metrics present in rows. + rows (MutableSequence[google.analytics.data_v1beta.types.Row]): + Rows of dimension value combinations and + metric values in the report. + totals (MutableSequence[google.analytics.data_v1beta.types.Row]): + If requested, the totaled values of metrics. + maximums (MutableSequence[google.analytics.data_v1beta.types.Row]): + If requested, the maximum values of metrics. + minimums (MutableSequence[google.analytics.data_v1beta.types.Row]): + If requested, the minimum values of metrics. + row_count (int): + The total number of rows in the query result. ``rowCount`` + is independent of the number of rows returned in the + response, the ``limit`` request parameter, and the + ``offset`` request parameter. For example if a query returns + 175 rows and includes ``limit`` of 50 in the API request, + the response will contain ``rowCount`` of 175 but only 50 + rows. + + To learn more about this pagination parameter, see + `Pagination `__. + metadata (google.analytics.data_v1beta.types.ResponseMetaData): + Metadata for the report. + property_quota (google.analytics.data_v1beta.types.PropertyQuota): + This Google Analytics property's quota state + including this request. + kind (str): + Identifies what kind of resource this message is. This + ``kind`` is always the fixed string + "analyticsData#runReport". Useful to distinguish between + response types in JSON. + """ + + dimension_headers: MutableSequence[data.DimensionHeader] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=data.DimensionHeader, + ) + metric_headers: MutableSequence[data.MetricHeader] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.MetricHeader, + ) + rows: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Row, + ) + totals: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.Row, + ) + maximums: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=data.Row, + ) + minimums: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=data.Row, + ) + row_count: int = proto.Field( + proto.INT32, + number=7, + ) + metadata: data.ResponseMetaData = proto.Field( + proto.MESSAGE, + number=8, + message=data.ResponseMetaData, + ) + property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=9, + message=data.PropertyQuota, + ) + kind: str = proto.Field( + proto.STRING, + number=10, + ) + + +class RunPivotReportRequest(proto.Message): + r"""The request to generate a pivot report. + + Attributes: + property (str): + A Google Analytics property identifier whose events are + tracked. Specified in the URL path and not the body. To + learn more, see `where to find your Property + ID `__. + Within a batch request, this property should either be + unspecified or consistent with the batch-level property. + + Example: properties/1234 + dimensions (MutableSequence[google.analytics.data_v1beta.types.Dimension]): + The dimensions requested. All defined dimensions must be + used by one of the following: dimension_expression, + dimension_filter, pivots, order_bys. + metrics (MutableSequence[google.analytics.data_v1beta.types.Metric]): + The metrics requested, at least one metric needs to be + specified. All defined metrics must be used by one of the + following: metric_expression, metric_filter, order_bys. + date_ranges (MutableSequence[google.analytics.data_v1beta.types.DateRange]): + The date range to retrieve event data for the report. If + multiple date ranges are specified, event data from each + date range is used in the report. A special dimension with + field name "dateRange" can be included in a Pivot's field + names; if included, the report compares between date ranges. + In a cohort request, this ``dateRanges`` must be + unspecified. + pivots (MutableSequence[google.analytics.data_v1beta.types.Pivot]): + Describes the visual format of the report's + dimensions in columns or rows. The union of the + fieldNames (dimension names) in all pivots must + be a subset of dimension names defined in + Dimensions. No two pivots can share a dimension. + A dimension is only visible if it appears in a + pivot. + dimension_filter (google.analytics.data_v1beta.types.FilterExpression): + The filter clause of dimensions. Dimensions + must be requested to be used in this filter. + Metrics cannot be used in this filter. + metric_filter (google.analytics.data_v1beta.types.FilterExpression): + The filter clause of metrics. Applied at post + aggregation phase, similar to SQL having-clause. + Metrics must be requested to be used in this + filter. Dimensions cannot be used in this + filter. + currency_code (str): + A currency code in ISO4217 format, such as + "AED", "USD", "JPY". If the field is empty, the + report uses the property's default currency. + cohort_spec (google.analytics.data_v1beta.types.CohortSpec): + Cohort group associated with this request. If + there is a cohort group in the request the + 'cohort' dimension must be present. + keep_empty_rows (bool): + If false or unspecified, each row with all metrics equal to + 0 will not be returned. If true, these rows will be returned + if they are not separately removed by a filter. + + Regardless of this ``keep_empty_rows`` setting, only data + recorded by the Google Analytics property can be displayed + in a report. + + For example if a property never logs a ``purchase`` event, + then a query for the ``eventName`` dimension and + ``eventCount`` metric will not have a row eventName: + "purchase" and eventCount: 0. + return_property_quota (bool): + Toggles whether to return the current state of this Google + Analytics property's quota. Quota is returned in + `PropertyQuota <#PropertyQuota>`__. + comparisons (MutableSequence[google.analytics.data_v1beta.types.Comparison]): + Optional. The configuration of comparisons + requested and displayed. The request requires + both a comparisons field and a comparisons + dimension to receive a comparison column in the + response. + """ + + property: str = proto.Field( + proto.STRING, + number=1, + ) + dimensions: MutableSequence[data.Dimension] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Dimension, + ) + metrics: MutableSequence[data.Metric] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Metric, + ) + date_ranges: MutableSequence[data.DateRange] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.DateRange, + ) + pivots: MutableSequence[data.Pivot] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=data.Pivot, + ) + dimension_filter: data.FilterExpression = proto.Field( + proto.MESSAGE, + number=6, + message=data.FilterExpression, + ) + metric_filter: data.FilterExpression = proto.Field( + proto.MESSAGE, + number=7, + message=data.FilterExpression, + ) + currency_code: str = proto.Field( + proto.STRING, + number=8, + ) + cohort_spec: data.CohortSpec = proto.Field( + proto.MESSAGE, + number=9, + message=data.CohortSpec, + ) + keep_empty_rows: bool = proto.Field( + proto.BOOL, + number=10, + ) + return_property_quota: bool = proto.Field( + proto.BOOL, + number=11, + ) + comparisons: MutableSequence[data.Comparison] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message=data.Comparison, + ) + + +class RunPivotReportResponse(proto.Message): + r"""The response pivot report table corresponding to a pivot + request. + + Attributes: + pivot_headers (MutableSequence[google.analytics.data_v1beta.types.PivotHeader]): + Summarizes the columns and rows created by a pivot. Each + pivot in the request produces one header in the response. If + we have a request like this: + + :: + + "pivots": [{ + "fieldNames": ["country", + "city"] + }, + { + "fieldNames": "eventName" + }] + + We will have the following ``pivotHeaders`` in the response: + + :: + + "pivotHeaders" : [{ + "dimensionHeaders": [{ + "dimensionValues": [ + { "value": "United Kingdom" }, + { "value": "London" } + ] + }, + { + "dimensionValues": [ + { "value": "Japan" }, + { "value": "Osaka" } + ] + }] + }, + { + "dimensionHeaders": [{ + "dimensionValues": [{ "value": "session_start" }] + }, + { + "dimensionValues": [{ "value": "scroll" }] + }] + }] + dimension_headers (MutableSequence[google.analytics.data_v1beta.types.DimensionHeader]): + Describes dimension columns. The number of + DimensionHeaders and ordering of + DimensionHeaders matches the dimensions present + in rows. + metric_headers (MutableSequence[google.analytics.data_v1beta.types.MetricHeader]): + Describes metric columns. The number of + MetricHeaders and ordering of MetricHeaders + matches the metrics present in rows. + rows (MutableSequence[google.analytics.data_v1beta.types.Row]): + Rows of dimension value combinations and + metric values in the report. + aggregates (MutableSequence[google.analytics.data_v1beta.types.Row]): + Aggregation of metric values. Can be totals, minimums, or + maximums. The returned aggregations are controlled by the + metric_aggregations in the pivot. The type of aggregation + returned in each row is shown by the dimension_values which + are set to "RESERVED\_". + metadata (google.analytics.data_v1beta.types.ResponseMetaData): + Metadata for the report. + property_quota (google.analytics.data_v1beta.types.PropertyQuota): + This Google Analytics property's quota state + including this request. + kind (str): + Identifies what kind of resource this message is. This + ``kind`` is always the fixed string + "analyticsData#runPivotReport". Useful to distinguish + between response types in JSON. + """ + + pivot_headers: MutableSequence[data.PivotHeader] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=data.PivotHeader, + ) + dimension_headers: MutableSequence[data.DimensionHeader] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.DimensionHeader, + ) + metric_headers: MutableSequence[data.MetricHeader] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.MetricHeader, + ) + rows: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.Row, + ) + aggregates: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=data.Row, + ) + metadata: data.ResponseMetaData = proto.Field( + proto.MESSAGE, + number=6, + message=data.ResponseMetaData, + ) + property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=7, + message=data.PropertyQuota, + ) + kind: str = proto.Field( + proto.STRING, + number=8, + ) + + +class BatchRunReportsRequest(proto.Message): + r"""The batch request containing multiple report requests. + + Attributes: + property (str): + A Google Analytics property identifier whose events are + tracked. Specified in the URL path and not the body. To + learn more, see `where to find your Property + ID `__. + This property must be specified for the batch. The property + within RunReportRequest may either be unspecified or + consistent with this property. + + Example: properties/1234 + requests (MutableSequence[google.analytics.data_v1beta.types.RunReportRequest]): + Individual requests. Each request has a + separate report response. Each batch request is + allowed up to 5 requests. + """ + + property: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence['RunReportRequest'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='RunReportRequest', + ) + + +class BatchRunReportsResponse(proto.Message): + r"""The batch response containing multiple reports. + + Attributes: + reports (MutableSequence[google.analytics.data_v1beta.types.RunReportResponse]): + Individual responses. Each response has a + separate report request. + kind (str): + Identifies what kind of resource this message is. This + ``kind`` is always the fixed string + "analyticsData#batchRunReports". Useful to distinguish + between response types in JSON. + """ + + reports: MutableSequence['RunReportResponse'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='RunReportResponse', + ) + kind: str = proto.Field( + proto.STRING, + number=2, + ) + + +class BatchRunPivotReportsRequest(proto.Message): + r"""The batch request containing multiple pivot report requests. + + Attributes: + property (str): + A Google Analytics property identifier whose events are + tracked. Specified in the URL path and not the body. To + learn more, see `where to find your Property + ID `__. + This property must be specified for the batch. The property + within RunPivotReportRequest may either be unspecified or + consistent with this property. + + Example: properties/1234 + requests (MutableSequence[google.analytics.data_v1beta.types.RunPivotReportRequest]): + Individual requests. Each request has a + separate pivot report response. Each batch + request is allowed up to 5 requests. + """ + + property: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence['RunPivotReportRequest'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='RunPivotReportRequest', + ) + + +class BatchRunPivotReportsResponse(proto.Message): + r"""The batch response containing multiple pivot reports. + + Attributes: + pivot_reports (MutableSequence[google.analytics.data_v1beta.types.RunPivotReportResponse]): + Individual responses. Each response has a + separate pivot report request. + kind (str): + Identifies what kind of resource this message is. This + ``kind`` is always the fixed string + "analyticsData#batchRunPivotReports". Useful to distinguish + between response types in JSON. + """ + + pivot_reports: MutableSequence['RunPivotReportResponse'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='RunPivotReportResponse', + ) + kind: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetMetadataRequest(proto.Message): + r"""Request for a property's dimension and metric metadata. + + Attributes: + name (str): + Required. The resource name of the metadata to retrieve. + This name field is specified in the URL path and not URL + parameters. Property is a numeric Google Analytics property + identifier. To learn more, see `where to find your Property + ID `__. + + Example: properties/1234/metadata + + Set the Property ID to 0 for dimensions and metrics common + to all properties. In this special mode, this method will + not return custom dimensions and metrics. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RunRealtimeReportRequest(proto.Message): + r"""The request to generate a realtime report. + + Attributes: + property (str): + A Google Analytics property identifier whose events are + tracked. Specified in the URL path and not the body. To + learn more, see `where to find your Property + ID `__. + + Example: properties/1234 + dimensions (MutableSequence[google.analytics.data_v1beta.types.Dimension]): + The dimensions requested and displayed. + metrics (MutableSequence[google.analytics.data_v1beta.types.Metric]): + The metrics requested and displayed. + dimension_filter (google.analytics.data_v1beta.types.FilterExpression): + The filter clause of dimensions. Metrics + cannot be used in this filter. + metric_filter (google.analytics.data_v1beta.types.FilterExpression): + The filter clause of metrics. Applied at post + aggregation phase, similar to SQL having-clause. + Dimensions cannot be used in this filter. + limit (int): + The number of rows to return. If unspecified, 10,000 rows + are returned. The API returns a maximum of 250,000 rows per + request, no matter how many you ask for. ``limit`` must be + positive. + + The API can also return fewer rows than the requested + ``limit``, if there aren't as many dimension values as the + ``limit``. For instance, there are fewer than 300 possible + values for the dimension ``country``, so when reporting on + only ``country``, you can't get more than 300 rows, even if + you set ``limit`` to a higher value. + metric_aggregations (MutableSequence[google.analytics.data_v1beta.types.MetricAggregation]): + Aggregation of metrics. Aggregated metric values will be + shown in rows where the dimension_values are set to + "RESERVED_(MetricAggregation)". + order_bys (MutableSequence[google.analytics.data_v1beta.types.OrderBy]): + Specifies how rows are ordered in the + response. + return_property_quota (bool): + Toggles whether to return the current state of this Google + Analytics property's Realtime quota. Quota is returned in + `PropertyQuota <#PropertyQuota>`__. + minute_ranges (MutableSequence[google.analytics.data_v1beta.types.MinuteRange]): + The minute ranges of event data to read. If + unspecified, one minute range for the last 30 + minutes will be used. If multiple minute ranges + are requested, each response row will contain a + zero based minute range index. If two minute + ranges overlap, the event data for the + overlapping minutes is included in the response + rows for both minute ranges. + """ + + property: str = proto.Field( + proto.STRING, + number=1, + ) + dimensions: MutableSequence[data.Dimension] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Dimension, + ) + metrics: MutableSequence[data.Metric] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Metric, + ) + dimension_filter: data.FilterExpression = proto.Field( + proto.MESSAGE, + number=4, + message=data.FilterExpression, + ) + metric_filter: data.FilterExpression = proto.Field( + proto.MESSAGE, + number=5, + message=data.FilterExpression, + ) + limit: int = proto.Field( + proto.INT64, + number=6, + ) + metric_aggregations: MutableSequence[data.MetricAggregation] = proto.RepeatedField( + proto.ENUM, + number=7, + enum=data.MetricAggregation, + ) + order_bys: MutableSequence[data.OrderBy] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=data.OrderBy, + ) + return_property_quota: bool = proto.Field( + proto.BOOL, + number=9, + ) + minute_ranges: MutableSequence[data.MinuteRange] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=data.MinuteRange, + ) + + +class RunRealtimeReportResponse(proto.Message): + r"""The response realtime report table corresponding to a + request. + + Attributes: + dimension_headers (MutableSequence[google.analytics.data_v1beta.types.DimensionHeader]): + Describes dimension columns. The number of + DimensionHeaders and ordering of + DimensionHeaders matches the dimensions present + in rows. + metric_headers (MutableSequence[google.analytics.data_v1beta.types.MetricHeader]): + Describes metric columns. The number of + MetricHeaders and ordering of MetricHeaders + matches the metrics present in rows. + rows (MutableSequence[google.analytics.data_v1beta.types.Row]): + Rows of dimension value combinations and + metric values in the report. + totals (MutableSequence[google.analytics.data_v1beta.types.Row]): + If requested, the totaled values of metrics. + maximums (MutableSequence[google.analytics.data_v1beta.types.Row]): + If requested, the maximum values of metrics. + minimums (MutableSequence[google.analytics.data_v1beta.types.Row]): + If requested, the minimum values of metrics. + row_count (int): + The total number of rows in the query result. ``rowCount`` + is independent of the number of rows returned in the + response and the ``limit`` request parameter. For example if + a query returns 175 rows and includes ``limit`` of 50 in the + API request, the response will contain ``rowCount`` of 175 + but only 50 rows. + property_quota (google.analytics.data_v1beta.types.PropertyQuota): + This Google Analytics property's Realtime + quota state including this request. + kind (str): + Identifies what kind of resource this message is. This + ``kind`` is always the fixed string + "analyticsData#runRealtimeReport". Useful to distinguish + between response types in JSON. + """ + + dimension_headers: MutableSequence[data.DimensionHeader] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=data.DimensionHeader, + ) + metric_headers: MutableSequence[data.MetricHeader] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.MetricHeader, + ) + rows: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Row, + ) + totals: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.Row, + ) + maximums: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=data.Row, + ) + minimums: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=data.Row, + ) + row_count: int = proto.Field( + proto.INT32, + number=7, + ) + property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=8, + message=data.PropertyQuota, + ) + kind: str = proto.Field( + proto.STRING, + number=9, + ) + + +class GetAudienceExportRequest(proto.Message): + r"""A request to retrieve configuration metadata about a specific + audience export. + + Attributes: + name (str): + Required. The audience export resource name. Format: + ``properties/{property}/audienceExports/{audience_export}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListAudienceExportsRequest(proto.Message): + r"""A request to list all audience exports for a property. + + Attributes: + parent (str): + Required. All audience exports for this property will be + listed in the response. Format: ``properties/{property}`` + page_size (int): + Optional. The maximum number of audience + exports to return. The service may return fewer + than this value. If unspecified, at most 200 + audience exports will be returned. The maximum + value is 1000 (higher values will be coerced to + the maximum). + page_token (str): + Optional. A page token, received from a previous + ``ListAudienceExports`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAudienceExports`` must match the call that provided + the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAudienceExportsResponse(proto.Message): + r"""A list of all audience exports for a property. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + audience_exports (MutableSequence[google.analytics.data_v1beta.types.AudienceExport]): + Each audience export for a property. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + + This field is a member of `oneof`_ ``_next_page_token``. + """ + + @property + def raw_page(self): + return self + + audience_exports: MutableSequence['AudienceExport'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AudienceExport', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class CreateAudienceExportRequest(proto.Message): + r"""A request to create a new audience export. + + Attributes: + parent (str): + Required. The parent resource where this audience export + will be created. Format: ``properties/{property}`` + audience_export (google.analytics.data_v1beta.types.AudienceExport): + Required. The audience export to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + audience_export: 'AudienceExport' = proto.Field( + proto.MESSAGE, + number=2, + message='AudienceExport', + ) + + +class AudienceExport(proto.Message): + r"""An audience export is a list of users in an audience at the + time of the list's creation. One audience may have multiple + audience exports created for different days. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. The audience export resource name + assigned during creation. This resource name identifies this + ``AudienceExport``. + + Format: + ``properties/{property}/audienceExports/{audience_export}`` + audience (str): + Required. The audience resource name. This resource name + identifies the audience being listed and is shared between + the Analytics Data & Admin APIs. + + Format: ``properties/{property}/audiences/{audience}`` + audience_display_name (str): + Output only. The descriptive display name for + this audience. For example, "Purchasers". + dimensions (MutableSequence[google.analytics.data_v1beta.types.AudienceDimension]): + Required. The dimensions requested and + displayed in the query response. + state (google.analytics.data_v1beta.types.AudienceExport.State): + Output only. The current state for this + AudienceExport. + + This field is a member of `oneof`_ ``_state``. + begin_creating_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when CreateAudienceExport was called + and the AudienceExport began the ``CREATING`` state. + + This field is a member of `oneof`_ ``_begin_creating_time``. + creation_quota_tokens_charged (int): + Output only. The total quota tokens charged during creation + of the AudienceExport. Because this token count is based on + activity from the ``CREATING`` state, this tokens charged + will be fixed once an AudienceExport enters the ``ACTIVE`` + or ``FAILED`` states. + row_count (int): + Output only. The total number of rows in the + AudienceExport result. + + This field is a member of `oneof`_ ``_row_count``. + error_message (str): + Output only. Error message is populated when + an audience export fails during creation. A + common reason for such a failure is quota + exhaustion. + + This field is a member of `oneof`_ ``_error_message``. + percentage_completed (float): + Output only. The percentage completed for + this audience export ranging between 0 to 100. + + This field is a member of `oneof`_ ``_percentage_completed``. + """ + class State(proto.Enum): + r"""The AudienceExport currently exists in this state. + + Values: + STATE_UNSPECIFIED (0): + Unspecified state will never be used. + CREATING (1): + The AudienceExport is currently creating and + will be available in the future. Creating occurs + immediately after the CreateAudienceExport call. + ACTIVE (2): + The AudienceExport is fully created and ready + for querying. An AudienceExport is updated to + active asynchronously from a request; this + occurs some time (for example 15 minutes) after + the initial create call. + FAILED (3): + The AudienceExport failed to be created. It + is possible that re-requesting this audience + export will succeed. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + FAILED = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + audience: str = proto.Field( + proto.STRING, + number=2, + ) + audience_display_name: str = proto.Field( + proto.STRING, + number=3, + ) + dimensions: MutableSequence['AudienceDimension'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AudienceDimension', + ) + state: State = proto.Field( + proto.ENUM, + number=5, + optional=True, + enum=State, + ) + begin_creating_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=timestamp_pb2.Timestamp, + ) + creation_quota_tokens_charged: int = proto.Field( + proto.INT32, + number=7, + ) + row_count: int = proto.Field( + proto.INT32, + number=8, + optional=True, + ) + error_message: str = proto.Field( + proto.STRING, + number=9, + optional=True, + ) + percentage_completed: float = proto.Field( + proto.DOUBLE, + number=10, + optional=True, + ) + + +class AudienceExportMetadata(proto.Message): + r"""This metadata is currently blank. + """ + + +class QueryAudienceExportRequest(proto.Message): + r"""A request to list users in an audience export. + + Attributes: + name (str): + Required. The name of the audience export to retrieve users + from. Format: + ``properties/{property}/audienceExports/{audience_export}`` + offset (int): + Optional. The row count of the start row. The first row is + counted as row 0. + + When paging, the first request does not specify offset; or + equivalently, sets offset to 0; the first request returns + the first ``limit`` of rows. The second request sets offset + to the ``limit`` of the first request; the second request + returns the second ``limit`` of rows. + + To learn more about this pagination parameter, see + `Pagination `__. + limit (int): + Optional. The number of rows to return. If unspecified, + 10,000 rows are returned. The API returns a maximum of + 250,000 rows per request, no matter how many you ask for. + ``limit`` must be positive. + + The API can also return fewer rows than the requested + ``limit``, if there aren't as many dimension values as the + ``limit``. + + To learn more about this pagination parameter, see + `Pagination `__. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + offset: int = proto.Field( + proto.INT64, + number=2, + ) + limit: int = proto.Field( + proto.INT64, + number=3, + ) + + +class QueryAudienceExportResponse(proto.Message): + r"""A list of users in an audience export. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + audience_export (google.analytics.data_v1beta.types.AudienceExport): + Configuration data about AudienceExport being + queried. Returned to help interpret the audience + rows in this response. For example, the + dimensions in this AudienceExport correspond to + the columns in the AudienceRows. + + This field is a member of `oneof`_ ``_audience_export``. + audience_rows (MutableSequence[google.analytics.data_v1beta.types.AudienceRow]): + Rows for each user in an audience export. The + number of rows in this response will be less + than or equal to request's page size. + row_count (int): + The total number of rows in the AudienceExport result. + ``rowCount`` is independent of the number of rows returned + in the response, the ``limit`` request parameter, and the + ``offset`` request parameter. For example if a query returns + 175 rows and includes ``limit`` of 50 in the API request, + the response will contain ``rowCount`` of 175 but only 50 + rows. + + To learn more about this pagination parameter, see + `Pagination `__. + + This field is a member of `oneof`_ ``_row_count``. + """ + + audience_export: 'AudienceExport' = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message='AudienceExport', + ) + audience_rows: MutableSequence['AudienceRow'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='AudienceRow', + ) + row_count: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + + +class AudienceRow(proto.Message): + r"""Dimension value attributes for the audience user row. + + Attributes: + dimension_values (MutableSequence[google.analytics.data_v1beta.types.AudienceDimensionValue]): + Each dimension value attribute for an + audience user. One dimension value will be added + for each dimension column requested. + """ + + dimension_values: MutableSequence['AudienceDimensionValue'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AudienceDimensionValue', + ) + + +class AudienceDimension(proto.Message): + r"""An audience dimension is a user attribute. Specific user attributed + are requested and then later returned in the + ``QueryAudienceExportResponse``. + + Attributes: + dimension_name (str): + Optional. The API name of the dimension. See the `API + Dimensions `__ + for the list of dimension names. + """ + + dimension_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AudienceDimensionValue(proto.Message): + r"""The value of a dimension. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + Value as a string if the dimension type is a + string. + + This field is a member of `oneof`_ ``one_value``. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + oneof='one_value', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/types/data.py b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/types/data.py new file mode 100644 index 000000000000..3c19503e466a --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/google/analytics/data_v1beta/types/data.py @@ -0,0 +1,2052 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.analytics.data.v1beta', + manifest={ + 'MetricAggregation', + 'MetricType', + 'RestrictedMetricType', + 'Compatibility', + 'DateRange', + 'MinuteRange', + 'Dimension', + 'DimensionExpression', + 'Metric', + 'Comparison', + 'FilterExpression', + 'FilterExpressionList', + 'Filter', + 'OrderBy', + 'Pivot', + 'CohortSpec', + 'Cohort', + 'CohortsRange', + 'CohortReportSettings', + 'ResponseMetaData', + 'SamplingMetadata', + 'DimensionHeader', + 'MetricHeader', + 'PivotHeader', + 'PivotDimensionHeader', + 'Row', + 'DimensionValue', + 'MetricValue', + 'NumericValue', + 'PropertyQuota', + 'QuotaStatus', + 'DimensionMetadata', + 'MetricMetadata', + 'ComparisonMetadata', + 'DimensionCompatibility', + 'MetricCompatibility', + }, +) + + +class MetricAggregation(proto.Enum): + r"""Represents aggregation of metrics. + + Values: + METRIC_AGGREGATION_UNSPECIFIED (0): + Unspecified operator. + TOTAL (1): + SUM operator. + MINIMUM (5): + Minimum operator. + MAXIMUM (6): + Maximum operator. + COUNT (4): + Count operator. + """ + METRIC_AGGREGATION_UNSPECIFIED = 0 + TOTAL = 1 + MINIMUM = 5 + MAXIMUM = 6 + COUNT = 4 + + +class MetricType(proto.Enum): + r"""A metric's value type. + + Values: + METRIC_TYPE_UNSPECIFIED (0): + Unspecified type. + TYPE_INTEGER (1): + Integer type. + TYPE_FLOAT (2): + Floating point type. + TYPE_SECONDS (4): + A duration of seconds; a special floating + point type. + TYPE_MILLISECONDS (5): + A duration in milliseconds; a special + floating point type. + TYPE_MINUTES (6): + A duration in minutes; a special floating + point type. + TYPE_HOURS (7): + A duration in hours; a special floating point + type. + TYPE_STANDARD (8): + A custom metric of standard type; a special + floating point type. + TYPE_CURRENCY (9): + An amount of money; a special floating point + type. + TYPE_FEET (10): + A length in feet; a special floating point + type. + TYPE_MILES (11): + A length in miles; a special floating point + type. + TYPE_METERS (12): + A length in meters; a special floating point + type. + TYPE_KILOMETERS (13): + A length in kilometers; a special floating + point type. + """ + METRIC_TYPE_UNSPECIFIED = 0 + TYPE_INTEGER = 1 + TYPE_FLOAT = 2 + TYPE_SECONDS = 4 + TYPE_MILLISECONDS = 5 + TYPE_MINUTES = 6 + TYPE_HOURS = 7 + TYPE_STANDARD = 8 + TYPE_CURRENCY = 9 + TYPE_FEET = 10 + TYPE_MILES = 11 + TYPE_METERS = 12 + TYPE_KILOMETERS = 13 + + +class RestrictedMetricType(proto.Enum): + r"""Categories of data that you may be restricted from viewing on + certain Google Analytics properties. + + Values: + RESTRICTED_METRIC_TYPE_UNSPECIFIED (0): + Unspecified type. + COST_DATA (1): + Cost metrics such as ``adCost``. + REVENUE_DATA (2): + Revenue metrics such as ``purchaseRevenue``. + """ + RESTRICTED_METRIC_TYPE_UNSPECIFIED = 0 + COST_DATA = 1 + REVENUE_DATA = 2 + + +class Compatibility(proto.Enum): + r"""The compatibility types for a single dimension or metric. + + Values: + COMPATIBILITY_UNSPECIFIED (0): + Unspecified compatibility. + COMPATIBLE (1): + The dimension or metric is compatible. This + dimension or metric can be successfully added to + a report. + INCOMPATIBLE (2): + The dimension or metric is incompatible. This + dimension or metric cannot be successfully added + to a report. + """ + COMPATIBILITY_UNSPECIFIED = 0 + COMPATIBLE = 1 + INCOMPATIBLE = 2 + + +class DateRange(proto.Message): + r"""A contiguous set of days: ``startDate``, ``startDate + 1``, ..., + ``endDate``. Requests are allowed up to 4 date ranges. + + Attributes: + start_date (str): + The inclusive start date for the query in the format + ``YYYY-MM-DD``. Cannot be after ``end_date``. The format + ``NdaysAgo``, ``yesterday``, or ``today`` is also accepted, + and in that case, the date is inferred based on the + property's reporting time zone. + end_date (str): + The inclusive end date for the query in the format + ``YYYY-MM-DD``. Cannot be before ``start_date``. The format + ``NdaysAgo``, ``yesterday``, or ``today`` is also accepted, + and in that case, the date is inferred based on the + property's reporting time zone. + name (str): + Assigns a name to this date range. The dimension + ``dateRange`` is valued to this name in a report response. + If set, cannot begin with ``date_range_`` or ``RESERVED_``. + If not set, date ranges are named by their zero based index + in the request: ``date_range_0``, ``date_range_1``, etc. + """ + + start_date: str = proto.Field( + proto.STRING, + number=1, + ) + end_date: str = proto.Field( + proto.STRING, + number=2, + ) + name: str = proto.Field( + proto.STRING, + number=3, + ) + + +class MinuteRange(proto.Message): + r"""A contiguous set of minutes: ``startMinutesAgo``, + ``startMinutesAgo + 1``, ..., ``endMinutesAgo``. Requests are + allowed up to 2 minute ranges. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_minutes_ago (int): + The inclusive start minute for the query as a number of + minutes before now. For example, ``"startMinutesAgo": 29`` + specifies the report should include event data from 29 + minutes ago and after. Cannot be after ``endMinutesAgo``. + + If unspecified, ``startMinutesAgo`` is defaulted to 29. + Standard Analytics properties can request up to the last 30 + minutes of event data (``startMinutesAgo <= 29``), and 360 + Analytics properties can request up to the last 60 minutes + of event data (``startMinutesAgo <= 59``). + + This field is a member of `oneof`_ ``_start_minutes_ago``. + end_minutes_ago (int): + The inclusive end minute for the query as a number of + minutes before now. Cannot be before ``startMinutesAgo``. + For example, ``"endMinutesAgo": 15`` specifies the report + should include event data from prior to 15 minutes ago. + + If unspecified, ``endMinutesAgo`` is defaulted to 0. + Standard Analytics properties can request any minute in the + last 30 minutes of event data (``endMinutesAgo <= 29``), and + 360 Analytics properties can request any minute in the last + 60 minutes of event data (``endMinutesAgo <= 59``). + + This field is a member of `oneof`_ ``_end_minutes_ago``. + name (str): + Assigns a name to this minute range. The dimension + ``dateRange`` is valued to this name in a report response. + If set, cannot begin with ``date_range_`` or ``RESERVED_``. + If not set, minute ranges are named by their zero based + index in the request: ``date_range_0``, ``date_range_1``, + etc. + """ + + start_minutes_ago: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + end_minutes_ago: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3, + ) + + +class Dimension(proto.Message): + r"""Dimensions are attributes of your data. For example, the + dimension city indicates the city from which an event + originates. Dimension values in report responses are strings; + for example, the city could be "Paris" or "New York". Requests + are allowed up to 9 dimensions. + + Attributes: + name (str): + The name of the dimension. See the `API + Dimensions `__ + for the list of dimension names supported by core reporting + methods such as ``runReport`` and ``batchRunReports``. See + `Realtime + Dimensions `__ + for the list of dimension names supported by the + ``runRealtimeReport`` method. See `Funnel + Dimensions `__ + for the list of dimension names supported by the + ``runFunnelReport`` method. + + If ``dimensionExpression`` is specified, ``name`` can be any + string that you would like within the allowed character set. + For example if a ``dimensionExpression`` concatenates + ``country`` and ``city``, you could call that dimension + ``countryAndCity``. Dimension names that you choose must + match the regular expression ``^[a-zA-Z0-9_]$``. + + Dimensions are referenced by ``name`` in + ``dimensionFilter``, ``orderBys``, ``dimensionExpression``, + and ``pivots``. + dimension_expression (google.analytics.data_v1beta.types.DimensionExpression): + One dimension can be the result of an + expression of multiple dimensions. For example, + dimension "country, city": concatenate(country, + ", ", city). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + dimension_expression: 'DimensionExpression' = proto.Field( + proto.MESSAGE, + number=2, + message='DimensionExpression', + ) + + +class DimensionExpression(proto.Message): + r"""Used to express a dimension which is the result of a formula of + multiple dimensions. Example usages: + + 1) lower_case(dimension) + 2) concatenate(dimension1, symbol, dimension2). + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + lower_case (google.analytics.data_v1beta.types.DimensionExpression.CaseExpression): + Used to convert a dimension value to lower + case. + + This field is a member of `oneof`_ ``one_expression``. + upper_case (google.analytics.data_v1beta.types.DimensionExpression.CaseExpression): + Used to convert a dimension value to upper + case. + + This field is a member of `oneof`_ ``one_expression``. + concatenate (google.analytics.data_v1beta.types.DimensionExpression.ConcatenateExpression): + Used to combine dimension values to a single + dimension. For example, dimension "country, + city": concatenate(country, ", ", city). + + This field is a member of `oneof`_ ``one_expression``. + """ + + class CaseExpression(proto.Message): + r"""Used to convert a dimension value to a single case. + + Attributes: + dimension_name (str): + Name of a dimension. The name must refer back + to a name in dimensions field of the request. + """ + + dimension_name: str = proto.Field( + proto.STRING, + number=1, + ) + + class ConcatenateExpression(proto.Message): + r"""Used to combine dimension values to a single dimension. + + Attributes: + dimension_names (MutableSequence[str]): + Names of dimensions. The names must refer + back to names in the dimensions field of the + request. + delimiter (str): + The delimiter placed between dimension names. + + Delimiters are often single characters such as "|" or "," + but can be longer strings. If a dimension value contains the + delimiter, both will be present in response with no + distinction. For example if dimension 1 value = "US,FR", + dimension 2 value = "JP", and delimiter = ",", then the + response will contain "US,FR,JP". + """ + + dimension_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + delimiter: str = proto.Field( + proto.STRING, + number=2, + ) + + lower_case: CaseExpression = proto.Field( + proto.MESSAGE, + number=4, + oneof='one_expression', + message=CaseExpression, + ) + upper_case: CaseExpression = proto.Field( + proto.MESSAGE, + number=5, + oneof='one_expression', + message=CaseExpression, + ) + concatenate: ConcatenateExpression = proto.Field( + proto.MESSAGE, + number=6, + oneof='one_expression', + message=ConcatenateExpression, + ) + + +class Metric(proto.Message): + r"""The quantitative measurements of a report. For example, the metric + ``eventCount`` is the total number of events. Requests are allowed + up to 10 metrics. + + Attributes: + name (str): + The name of the metric. See the `API + Metrics `__ + for the list of metric names supported by core reporting + methods such as ``runReport`` and ``batchRunReports``. See + `Realtime + Metrics `__ + for the list of metric names supported by the + ``runRealtimeReport`` method. See `Funnel + Metrics `__ + for the list of metric names supported by the + ``runFunnelReport`` method. + + If ``expression`` is specified, ``name`` can be any string + that you would like within the allowed character set. For + example if ``expression`` is ``screenPageViews/sessions``, + you could call that metric's name = ``viewsPerSession``. + Metric names that you choose must match the regular + expression ``^[a-zA-Z0-9_]$``. + + Metrics are referenced by ``name`` in ``metricFilter``, + ``orderBys``, and metric ``expression``. + expression (str): + A mathematical expression for derived metrics. For example, + the metric Event count per user is + ``eventCount/totalUsers``. + invisible (bool): + Indicates if a metric is invisible in the report response. + If a metric is invisible, the metric will not produce a + column in the response, but can be used in ``metricFilter``, + ``orderBys``, or a metric ``expression``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + expression: str = proto.Field( + proto.STRING, + number=2, + ) + invisible: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class Comparison(proto.Message): + r"""Defines an individual comparison. Most requests will include + multiple comparisons so that the report compares between the + comparisons. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Each comparison produces separate rows in the + response. In the response, this comparison is + identified by this name. If name is unspecified, + we will use the saved comparisons display name. + + This field is a member of `oneof`_ ``_name``. + dimension_filter (google.analytics.data_v1beta.types.FilterExpression): + A basic comparison. + + This field is a member of `oneof`_ ``one_comparison``. + comparison (str): + A saved comparison identified by the + comparison's resource name. For example, + 'comparisons/1234'. + + This field is a member of `oneof`_ ``one_comparison``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + dimension_filter: 'FilterExpression' = proto.Field( + proto.MESSAGE, + number=2, + oneof='one_comparison', + message='FilterExpression', + ) + comparison: str = proto.Field( + proto.STRING, + number=3, + oneof='one_comparison', + ) + + +class FilterExpression(proto.Message): + r"""To express dimension or metric filters. The fields in the + same FilterExpression need to be either all dimensions or all + metrics. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + and_group (google.analytics.data_v1beta.types.FilterExpressionList): + The FilterExpressions in and_group have an AND relationship. + + This field is a member of `oneof`_ ``expr``. + or_group (google.analytics.data_v1beta.types.FilterExpressionList): + The FilterExpressions in or_group have an OR relationship. + + This field is a member of `oneof`_ ``expr``. + not_expression (google.analytics.data_v1beta.types.FilterExpression): + The FilterExpression is NOT of not_expression. + + This field is a member of `oneof`_ ``expr``. + filter (google.analytics.data_v1beta.types.Filter): + A primitive filter. In the same + FilterExpression, all of the filter's field + names need to be either all dimensions or all + metrics. + + This field is a member of `oneof`_ ``expr``. + """ + + and_group: 'FilterExpressionList' = proto.Field( + proto.MESSAGE, + number=1, + oneof='expr', + message='FilterExpressionList', + ) + or_group: 'FilterExpressionList' = proto.Field( + proto.MESSAGE, + number=2, + oneof='expr', + message='FilterExpressionList', + ) + not_expression: 'FilterExpression' = proto.Field( + proto.MESSAGE, + number=3, + oneof='expr', + message='FilterExpression', + ) + filter: 'Filter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='expr', + message='Filter', + ) + + +class FilterExpressionList(proto.Message): + r"""A list of filter expressions. + + Attributes: + expressions (MutableSequence[google.analytics.data_v1beta.types.FilterExpression]): + A list of filter expressions. + """ + + expressions: MutableSequence['FilterExpression'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FilterExpression', + ) + + +class Filter(proto.Message): + r"""An expression to filter dimension or metric values. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field_name (str): + The dimension name or metric name. + + In most methods, dimensions & metrics can be + used for the first time in this field. However + in a RunPivotReportRequest, this field must be + additionally specified by name in the + RunPivotReportRequest's dimensions or metrics. + string_filter (google.analytics.data_v1beta.types.Filter.StringFilter): + Strings related filter. + + This field is a member of `oneof`_ ``one_filter``. + in_list_filter (google.analytics.data_v1beta.types.Filter.InListFilter): + A filter for in list values. + + This field is a member of `oneof`_ ``one_filter``. + numeric_filter (google.analytics.data_v1beta.types.Filter.NumericFilter): + A filter for numeric or date values. + + This field is a member of `oneof`_ ``one_filter``. + between_filter (google.analytics.data_v1beta.types.Filter.BetweenFilter): + A filter for two values. + + This field is a member of `oneof`_ ``one_filter``. + empty_filter (google.analytics.data_v1beta.types.Filter.EmptyFilter): + A filter for empty values such as "(not set)" + and "" values. + + This field is a member of `oneof`_ ``one_filter``. + """ + + class StringFilter(proto.Message): + r"""The filter for string + + Attributes: + match_type (google.analytics.data_v1beta.types.Filter.StringFilter.MatchType): + The match type for this filter. + value (str): + The string value used for the matching. + case_sensitive (bool): + If true, the string value is case sensitive. + """ + class MatchType(proto.Enum): + r"""The match type of a string filter + + Values: + MATCH_TYPE_UNSPECIFIED (0): + Unspecified + EXACT (1): + Exact match of the string value. + BEGINS_WITH (2): + Begins with the string value. + ENDS_WITH (3): + Ends with the string value. + CONTAINS (4): + Contains the string value. + FULL_REGEXP (5): + Full match for the regular expression with + the string value. + PARTIAL_REGEXP (6): + Partial match for the regular expression with + the string value. + """ + MATCH_TYPE_UNSPECIFIED = 0 + EXACT = 1 + BEGINS_WITH = 2 + ENDS_WITH = 3 + CONTAINS = 4 + FULL_REGEXP = 5 + PARTIAL_REGEXP = 6 + + match_type: 'Filter.StringFilter.MatchType' = proto.Field( + proto.ENUM, + number=1, + enum='Filter.StringFilter.MatchType', + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + case_sensitive: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class InListFilter(proto.Message): + r"""The result needs to be in a list of string values. + + Attributes: + values (MutableSequence[str]): + The list of string values. + Must be non-empty. + case_sensitive (bool): + If true, the string value is case sensitive. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + case_sensitive: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class NumericFilter(proto.Message): + r"""Filters for numeric or date values. + + Attributes: + operation (google.analytics.data_v1beta.types.Filter.NumericFilter.Operation): + The operation type for this filter. + value (google.analytics.data_v1beta.types.NumericValue): + A numeric value or a date value. + """ + class Operation(proto.Enum): + r"""The operation applied to a numeric filter + + Values: + OPERATION_UNSPECIFIED (0): + Unspecified. + EQUAL (1): + Equal + LESS_THAN (2): + Less than + LESS_THAN_OR_EQUAL (3): + Less than or equal + GREATER_THAN (4): + Greater than + GREATER_THAN_OR_EQUAL (5): + Greater than or equal + """ + OPERATION_UNSPECIFIED = 0 + EQUAL = 1 + LESS_THAN = 2 + LESS_THAN_OR_EQUAL = 3 + GREATER_THAN = 4 + GREATER_THAN_OR_EQUAL = 5 + + operation: 'Filter.NumericFilter.Operation' = proto.Field( + proto.ENUM, + number=1, + enum='Filter.NumericFilter.Operation', + ) + value: 'NumericValue' = proto.Field( + proto.MESSAGE, + number=2, + message='NumericValue', + ) + + class BetweenFilter(proto.Message): + r"""To express that the result needs to be between two numbers + (inclusive). + + Attributes: + from_value (google.analytics.data_v1beta.types.NumericValue): + Begins with this number. + to_value (google.analytics.data_v1beta.types.NumericValue): + Ends with this number. + """ + + from_value: 'NumericValue' = proto.Field( + proto.MESSAGE, + number=1, + message='NumericValue', + ) + to_value: 'NumericValue' = proto.Field( + proto.MESSAGE, + number=2, + message='NumericValue', + ) + + class EmptyFilter(proto.Message): + r"""Filter for empty values. + """ + + field_name: str = proto.Field( + proto.STRING, + number=1, + ) + string_filter: StringFilter = proto.Field( + proto.MESSAGE, + number=3, + oneof='one_filter', + message=StringFilter, + ) + in_list_filter: InListFilter = proto.Field( + proto.MESSAGE, + number=4, + oneof='one_filter', + message=InListFilter, + ) + numeric_filter: NumericFilter = proto.Field( + proto.MESSAGE, + number=5, + oneof='one_filter', + message=NumericFilter, + ) + between_filter: BetweenFilter = proto.Field( + proto.MESSAGE, + number=6, + oneof='one_filter', + message=BetweenFilter, + ) + empty_filter: EmptyFilter = proto.Field( + proto.MESSAGE, + number=8, + oneof='one_filter', + message=EmptyFilter, + ) + + +class OrderBy(proto.Message): + r"""Order bys define how rows will be sorted in the response. For + example, ordering rows by descending event count is one + ordering, and ordering rows by the event name string is a + different ordering. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + metric (google.analytics.data_v1beta.types.OrderBy.MetricOrderBy): + Sorts results by a metric's values. + + This field is a member of `oneof`_ ``one_order_by``. + dimension (google.analytics.data_v1beta.types.OrderBy.DimensionOrderBy): + Sorts results by a dimension's values. + + This field is a member of `oneof`_ ``one_order_by``. + pivot (google.analytics.data_v1beta.types.OrderBy.PivotOrderBy): + Sorts results by a metric's values within a + pivot column group. + + This field is a member of `oneof`_ ``one_order_by``. + desc (bool): + If true, sorts by descending order. + """ + + class MetricOrderBy(proto.Message): + r"""Sorts by metric values. + + Attributes: + metric_name (str): + A metric name in the request to order by. + """ + + metric_name: str = proto.Field( + proto.STRING, + number=1, + ) + + class DimensionOrderBy(proto.Message): + r"""Sorts by dimension values. + + Attributes: + dimension_name (str): + A dimension name in the request to order by. + order_type (google.analytics.data_v1beta.types.OrderBy.DimensionOrderBy.OrderType): + Controls the rule for dimension value + ordering. + """ + class OrderType(proto.Enum): + r"""Rule to order the string dimension values by. + + Values: + ORDER_TYPE_UNSPECIFIED (0): + Unspecified. + ALPHANUMERIC (1): + Alphanumeric sort by Unicode code point. For + example, "2" < "A" < "X" < "b" < "z". + CASE_INSENSITIVE_ALPHANUMERIC (2): + Case insensitive alphanumeric sort by lower + case Unicode code point. For example, "2" < "A" + < "b" < "X" < "z". + NUMERIC (3): + Dimension values are converted to numbers before sorting. + For example in NUMERIC sort, "25" < "100", and in + ``ALPHANUMERIC`` sort, "100" < "25". Non-numeric dimension + values all have equal ordering value below all numeric + values. + """ + ORDER_TYPE_UNSPECIFIED = 0 + ALPHANUMERIC = 1 + CASE_INSENSITIVE_ALPHANUMERIC = 2 + NUMERIC = 3 + + dimension_name: str = proto.Field( + proto.STRING, + number=1, + ) + order_type: 'OrderBy.DimensionOrderBy.OrderType' = proto.Field( + proto.ENUM, + number=2, + enum='OrderBy.DimensionOrderBy.OrderType', + ) + + class PivotOrderBy(proto.Message): + r"""Sorts by a pivot column group. + + Attributes: + metric_name (str): + In the response to order by, order rows by + this column. Must be a metric name from the + request. + pivot_selections (MutableSequence[google.analytics.data_v1beta.types.OrderBy.PivotOrderBy.PivotSelection]): + Used to select a dimension name and value + pivot. If multiple pivot selections are given, + the sort occurs on rows where all pivot + selection dimension name and value pairs match + the row's dimension name and value pair. + """ + + class PivotSelection(proto.Message): + r"""A pair of dimension names and values. Rows with this dimension pivot + pair are ordered by the metric's value. + + For example if pivots = {{"browser", "Chrome"}} and metric_name = + "Sessions", then the rows will be sorted based on Sessions in + Chrome. + + :: + + ---------|----------|----------------|----------|---------------- + | Chrome | Chrome | Safari | Safari + ---------|----------|----------------|----------|---------------- + Country | Sessions | Pages/Sessions | Sessions | Pages/Sessions + ---------|----------|----------------|----------|---------------- + US | 2 | 2 | 3 | 1 + ---------|----------|----------------|----------|---------------- + Canada | 3 | 1 | 4 | 1 + ---------|----------|----------------|----------|---------------- + + Attributes: + dimension_name (str): + Must be a dimension name from the request. + dimension_value (str): + Order by only when the named dimension is + this value. + """ + + dimension_name: str = proto.Field( + proto.STRING, + number=1, + ) + dimension_value: str = proto.Field( + proto.STRING, + number=2, + ) + + metric_name: str = proto.Field( + proto.STRING, + number=1, + ) + pivot_selections: MutableSequence['OrderBy.PivotOrderBy.PivotSelection'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='OrderBy.PivotOrderBy.PivotSelection', + ) + + metric: MetricOrderBy = proto.Field( + proto.MESSAGE, + number=1, + oneof='one_order_by', + message=MetricOrderBy, + ) + dimension: DimensionOrderBy = proto.Field( + proto.MESSAGE, + number=2, + oneof='one_order_by', + message=DimensionOrderBy, + ) + pivot: PivotOrderBy = proto.Field( + proto.MESSAGE, + number=3, + oneof='one_order_by', + message=PivotOrderBy, + ) + desc: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class Pivot(proto.Message): + r"""Describes the visible dimension columns and rows in the + report response. + + Attributes: + field_names (MutableSequence[str]): + Dimension names for visible columns in the + report response. Including "dateRange" produces + a date range column; for each row in the + response, dimension values in the date range + column will indicate the corresponding date + range from the request. + order_bys (MutableSequence[google.analytics.data_v1beta.types.OrderBy]): + Specifies how dimensions are ordered in the pivot. In the + first Pivot, the OrderBys determine Row and + PivotDimensionHeader ordering; in subsequent Pivots, the + OrderBys determine only PivotDimensionHeader ordering. + Dimensions specified in these OrderBys must be a subset of + Pivot.field_names. + offset (int): + The row count of the start row. The first row + is counted as row 0. + limit (int): + The number of unique combinations of dimension values to + return in this pivot. The ``limit`` parameter is required. A + ``limit`` of 10,000 is common for single pivot requests. + + The product of the ``limit`` for each ``pivot`` in a + ``RunPivotReportRequest`` must not exceed 250,000. For + example, a two pivot request with ``limit: 1000`` in each + pivot will fail because the product is ``1,000,000``. + metric_aggregations (MutableSequence[google.analytics.data_v1beta.types.MetricAggregation]): + Aggregate the metrics by dimensions in this pivot using the + specified metric_aggregations. + """ + + field_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + order_bys: MutableSequence['OrderBy'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='OrderBy', + ) + offset: int = proto.Field( + proto.INT64, + number=3, + ) + limit: int = proto.Field( + proto.INT64, + number=4, + ) + metric_aggregations: MutableSequence['MetricAggregation'] = proto.RepeatedField( + proto.ENUM, + number=5, + enum='MetricAggregation', + ) + + +class CohortSpec(proto.Message): + r"""The specification of cohorts for a cohort report. + + Cohort reports create a time series of user retention for the + cohort. For example, you could select the cohort of users that were + acquired in the first week of September and follow that cohort for + the next six weeks. Selecting the users acquired in the first week + of September cohort is specified in the ``cohort`` object. Following + that cohort for the next six weeks is specified in the + ``cohortsRange`` object. + + For examples, see `Cohort Report + Examples `__. + + The report response could show a weekly time series where say your + app has retained 60% of this cohort after three weeks and 25% of + this cohort after six weeks. These two percentages can be calculated + by the metric ``cohortActiveUsers/cohortTotalUsers`` and will be + separate rows in the report. + + Attributes: + cohorts (MutableSequence[google.analytics.data_v1beta.types.Cohort]): + Defines the selection criteria to group users + into cohorts. + Most cohort reports define only a single cohort. + If multiple cohorts are specified, each cohort + can be recognized in the report by their name. + cohorts_range (google.analytics.data_v1beta.types.CohortsRange): + Cohort reports follow cohorts over an + extended reporting date range. This range + specifies an offset duration to follow the + cohorts over. + cohort_report_settings (google.analytics.data_v1beta.types.CohortReportSettings): + Optional settings for a cohort report. + """ + + cohorts: MutableSequence['Cohort'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Cohort', + ) + cohorts_range: 'CohortsRange' = proto.Field( + proto.MESSAGE, + number=2, + message='CohortsRange', + ) + cohort_report_settings: 'CohortReportSettings' = proto.Field( + proto.MESSAGE, + number=3, + message='CohortReportSettings', + ) + + +class Cohort(proto.Message): + r"""Defines a cohort selection criteria. A cohort is a group of users + who share a common characteristic. For example, users with the same + ``firstSessionDate`` belong to the same cohort. + + Attributes: + name (str): + Assigns a name to this cohort. The dimension ``cohort`` is + valued to this name in a report response. If set, cannot + begin with ``cohort_`` or ``RESERVED_``. If not set, cohorts + are named by their zero based index ``cohort_0``, + ``cohort_1``, etc. + dimension (str): + Dimension used by the cohort. Required and only supports + ``firstSessionDate``. + date_range (google.analytics.data_v1beta.types.DateRange): + The cohort selects users whose first touch date is between + start date and end date defined in the ``dateRange``. This + ``dateRange`` does not specify the full date range of event + data that is present in a cohort report. In a cohort report, + this ``dateRange`` is extended by the granularity and offset + present in the ``cohortsRange``; event data for the extended + reporting date range is present in a cohort report. + + In a cohort request, this ``dateRange`` is required and the + ``dateRanges`` in the ``RunReportRequest`` or + ``RunPivotReportRequest`` must be unspecified. + + This ``dateRange`` should generally be aligned with the + cohort's granularity. If ``CohortsRange`` uses daily + granularity, this ``dateRange`` can be a single day. If + ``CohortsRange`` uses weekly granularity, this ``dateRange`` + can be aligned to a week boundary, starting at Sunday and + ending Saturday. If ``CohortsRange`` uses monthly + granularity, this ``dateRange`` can be aligned to a month, + starting at the first and ending on the last day of the + month. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + dimension: str = proto.Field( + proto.STRING, + number=2, + ) + date_range: 'DateRange' = proto.Field( + proto.MESSAGE, + number=3, + message='DateRange', + ) + + +class CohortsRange(proto.Message): + r"""Configures the extended reporting date range for a cohort + report. Specifies an offset duration to follow the cohorts over. + + Attributes: + granularity (google.analytics.data_v1beta.types.CohortsRange.Granularity): + Required. The granularity used to interpret the + ``startOffset`` and ``endOffset`` for the extended reporting + date range for a cohort report. + start_offset (int): + ``startOffset`` specifies the start date of the extended + reporting date range for a cohort report. ``startOffset`` is + commonly set to 0 so that reports contain data from the + acquisition of the cohort forward. + + If ``granularity`` is ``DAILY``, the ``startDate`` of the + extended reporting date range is ``startDate`` of the cohort + plus ``startOffset`` days. + + If ``granularity`` is ``WEEKLY``, the ``startDate`` of the + extended reporting date range is ``startDate`` of the cohort + plus ``startOffset * 7`` days. + + If ``granularity`` is ``MONTHLY``, the ``startDate`` of the + extended reporting date range is ``startDate`` of the cohort + plus ``startOffset * 30`` days. + end_offset (int): + Required. ``endOffset`` specifies the end date of the + extended reporting date range for a cohort report. + ``endOffset`` can be any positive integer but is commonly + set to 5 to 10 so that reports contain data on the cohort + for the next several granularity time periods. + + If ``granularity`` is ``DAILY``, the ``endDate`` of the + extended reporting date range is ``endDate`` of the cohort + plus ``endOffset`` days. + + If ``granularity`` is ``WEEKLY``, the ``endDate`` of the + extended reporting date range is ``endDate`` of the cohort + plus ``endOffset * 7`` days. + + If ``granularity`` is ``MONTHLY``, the ``endDate`` of the + extended reporting date range is ``endDate`` of the cohort + plus ``endOffset * 30`` days. + """ + class Granularity(proto.Enum): + r"""The granularity used to interpret the ``startOffset`` and + ``endOffset`` for the extended reporting date range for a cohort + report. + + Values: + GRANULARITY_UNSPECIFIED (0): + Should never be specified. + DAILY (1): + Daily granularity. Commonly used if the cohort's + ``dateRange`` is a single day and the request contains + ``cohortNthDay``. + WEEKLY (2): + Weekly granularity. Commonly used if the cohort's + ``dateRange`` is a week in duration (starting on Sunday and + ending on Saturday) and the request contains + ``cohortNthWeek``. + MONTHLY (3): + Monthly granularity. Commonly used if the cohort's + ``dateRange`` is a month in duration and the request + contains ``cohortNthMonth``. + """ + GRANULARITY_UNSPECIFIED = 0 + DAILY = 1 + WEEKLY = 2 + MONTHLY = 3 + + granularity: Granularity = proto.Field( + proto.ENUM, + number=1, + enum=Granularity, + ) + start_offset: int = proto.Field( + proto.INT32, + number=2, + ) + end_offset: int = proto.Field( + proto.INT32, + number=3, + ) + + +class CohortReportSettings(proto.Message): + r"""Optional settings of a cohort report. + + Attributes: + accumulate (bool): + If true, accumulates the result from first touch day to the + end day. Not supported in ``RunReportRequest``. + """ + + accumulate: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ResponseMetaData(proto.Message): + r"""Response's metadata carrying additional information about the + report content. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + data_loss_from_other_row (bool): + If true, indicates some buckets of dimension combinations + are rolled into "(other)" row. This can happen for high + cardinality reports. + + The metadata parameter dataLossFromOtherRow is populated + based on the aggregated data table used in the report. The + parameter will be accurately populated regardless of the + filters and limits in the report. + + For example, the (other) row could be dropped from the + report because the request contains a filter on + sessionSource = google. This parameter will still be + populated if data loss from other row was present in the + input aggregate data used to generate this report. + + To learn more, see `About the (other) row and data + sampling `__. + schema_restriction_response (google.analytics.data_v1beta.types.ResponseMetaData.SchemaRestrictionResponse): + Describes the schema restrictions actively enforced in + creating this report. To learn more, see `Access and + data-restriction + management `__. + + This field is a member of `oneof`_ ``_schema_restriction_response``. + currency_code (str): + The currency code used in this report. Intended to be used + in formatting currency metrics like ``purchaseRevenue`` for + visualization. If currency_code was specified in the + request, this response parameter will echo the request + parameter; otherwise, this response parameter is the + property's current currency_code. + + Currency codes are string encodings of currency types from + the ISO 4217 standard + (https://en.wikipedia.org/wiki/ISO_4217); for example "USD", + "EUR", "JPY". To learn more, see + https://support.google.com/analytics/answer/9796179. + + This field is a member of `oneof`_ ``_currency_code``. + time_zone (str): + The property's current timezone. Intended to be used to + interpret time-based dimensions like ``hour`` and + ``minute``. Formatted as strings from the IANA Time Zone + database (https://www.iana.org/time-zones); for example + "America/New_York" or "Asia/Tokyo". + + This field is a member of `oneof`_ ``_time_zone``. + empty_reason (str): + If empty reason is specified, the report is + empty for this reason. + + This field is a member of `oneof`_ ``_empty_reason``. + subject_to_thresholding (bool): + If ``subjectToThresholding`` is true, this report is subject + to thresholding and only returns data that meets the minimum + aggregation thresholds. It is possible for a request to be + subject to thresholding thresholding and no data is absent + from the report, and this happens when all data is above the + thresholds. To learn more, see `Data + thresholds `__. + + This field is a member of `oneof`_ ``_subject_to_thresholding``. + sampling_metadatas (MutableSequence[google.analytics.data_v1beta.types.SamplingMetadata]): + If this report results is + `sampled `__, + this describes the percentage of events used in this report. + One ``samplingMetadatas`` is populated for each date range. + Each ``samplingMetadatas`` corresponds to a date range in + order that date ranges were specified in the request. + + However if the results are not sampled, this field will not + be defined. + """ + + class SchemaRestrictionResponse(proto.Message): + r"""The schema restrictions actively enforced in creating this report. + To learn more, see `Access and data-restriction + management `__. + + Attributes: + active_metric_restrictions (MutableSequence[google.analytics.data_v1beta.types.ResponseMetaData.SchemaRestrictionResponse.ActiveMetricRestriction]): + All restrictions actively enforced in creating the report. + For example, ``purchaseRevenue`` always has the restriction + type ``REVENUE_DATA``. However, this active response + restriction is only populated if the user's custom role + disallows access to ``REVENUE_DATA``. + """ + + class ActiveMetricRestriction(proto.Message): + r"""A metric actively restricted in creating the report. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + metric_name (str): + The name of the restricted metric. + + This field is a member of `oneof`_ ``_metric_name``. + restricted_metric_types (MutableSequence[google.analytics.data_v1beta.types.RestrictedMetricType]): + The reason for this metric's restriction. + """ + + metric_name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + restricted_metric_types: MutableSequence['RestrictedMetricType'] = proto.RepeatedField( + proto.ENUM, + number=2, + enum='RestrictedMetricType', + ) + + active_metric_restrictions: MutableSequence['ResponseMetaData.SchemaRestrictionResponse.ActiveMetricRestriction'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ResponseMetaData.SchemaRestrictionResponse.ActiveMetricRestriction', + ) + + data_loss_from_other_row: bool = proto.Field( + proto.BOOL, + number=3, + ) + schema_restriction_response: SchemaRestrictionResponse = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=SchemaRestrictionResponse, + ) + currency_code: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + time_zone: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + empty_reason: str = proto.Field( + proto.STRING, + number=7, + optional=True, + ) + subject_to_thresholding: bool = proto.Field( + proto.BOOL, + number=8, + optional=True, + ) + sampling_metadatas: MutableSequence['SamplingMetadata'] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='SamplingMetadata', + ) + + +class SamplingMetadata(proto.Message): + r"""If this report results is + `sampled `__, + this describes the percentage of events used in this report. + Sampling is the practice of analyzing a subset of all data in order + to uncover the meaningful information in the larger data set. + + Attributes: + samples_read_count (int): + The total number of events read in this + sampled report for a date range. This is the + size of the subset this property's data that was + analyzed in this report. + sampling_space_size (int): + The total number of events present in this property's data + that could have been analyzed in this report for a date + range. Sampling uncovers the meaningful information about + the larger data set, and this is the size of the larger data + set. + + To calculate the percentage of available data that was used + in this report, compute + ``samplesReadCount/samplingSpaceSize``. + """ + + samples_read_count: int = proto.Field( + proto.INT64, + number=1, + ) + sampling_space_size: int = proto.Field( + proto.INT64, + number=2, + ) + + +class DimensionHeader(proto.Message): + r"""Describes a dimension column in the report. Dimensions + requested in a report produce column entries within rows and + DimensionHeaders. However, dimensions used exclusively within + filters or expressions do not produce columns in a report; + correspondingly, those dimensions do not produce headers. + + Attributes: + name (str): + The dimension's name. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class MetricHeader(proto.Message): + r"""Describes a metric column in the report. Visible metrics + requested in a report produce column entries within rows and + MetricHeaders. However, metrics used exclusively within filters + or expressions do not produce columns in a report; + correspondingly, those metrics do not produce headers. + + Attributes: + name (str): + The metric's name. + type_ (google.analytics.data_v1beta.types.MetricType): + The metric's data type. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'MetricType' = proto.Field( + proto.ENUM, + number=2, + enum='MetricType', + ) + + +class PivotHeader(proto.Message): + r"""Dimensions' values in a single pivot. + + Attributes: + pivot_dimension_headers (MutableSequence[google.analytics.data_v1beta.types.PivotDimensionHeader]): + The size is the same as the cardinality of + the corresponding dimension combinations. + row_count (int): + The cardinality of the pivot. The total number of rows for + this pivot's fields regardless of how the parameters + ``offset`` and ``limit`` are specified in the request. + """ + + pivot_dimension_headers: MutableSequence['PivotDimensionHeader'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PivotDimensionHeader', + ) + row_count: int = proto.Field( + proto.INT32, + number=2, + ) + + +class PivotDimensionHeader(proto.Message): + r"""Summarizes dimension values from a row for this pivot. + + Attributes: + dimension_values (MutableSequence[google.analytics.data_v1beta.types.DimensionValue]): + Values of multiple dimensions in a pivot. + """ + + dimension_values: MutableSequence['DimensionValue'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DimensionValue', + ) + + +class Row(proto.Message): + r"""Report data for each row. For example if RunReportRequest contains: + + .. code:: none + + "dimensions": [ + { + "name": "eventName" + }, + { + "name": "countryId" + } + ], + "metrics": [ + { + "name": "eventCount" + } + ] + + One row with 'in_app_purchase' as the eventName, 'JP' as the + countryId, and 15 as the eventCount, would be: + + .. code:: none + + "dimensionValues": [ + { + "value": "in_app_purchase" + }, + { + "value": "JP" + } + ], + "metricValues": [ + { + "value": "15" + } + ] + + Attributes: + dimension_values (MutableSequence[google.analytics.data_v1beta.types.DimensionValue]): + List of requested dimension values. In a PivotReport, + dimension_values are only listed for dimensions included in + a pivot. + metric_values (MutableSequence[google.analytics.data_v1beta.types.MetricValue]): + List of requested visible metric values. + """ + + dimension_values: MutableSequence['DimensionValue'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DimensionValue', + ) + metric_values: MutableSequence['MetricValue'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='MetricValue', + ) + + +class DimensionValue(proto.Message): + r"""The value of a dimension. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + Value as a string if the dimension type is a + string. + + This field is a member of `oneof`_ ``one_value``. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + oneof='one_value', + ) + + +class MetricValue(proto.Message): + r"""The value of a metric. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + Measurement value. See MetricHeader for type. + + This field is a member of `oneof`_ ``one_value``. + """ + + value: str = proto.Field( + proto.STRING, + number=4, + oneof='one_value', + ) + + +class NumericValue(proto.Message): + r"""To represent a number. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + int64_value (int): + Integer value + + This field is a member of `oneof`_ ``one_value``. + double_value (float): + Double value + + This field is a member of `oneof`_ ``one_value``. + """ + + int64_value: int = proto.Field( + proto.INT64, + number=1, + oneof='one_value', + ) + double_value: float = proto.Field( + proto.DOUBLE, + number=2, + oneof='one_value', + ) + + +class PropertyQuota(proto.Message): + r"""Current state of all quotas for this Analytics Property. If + any quota for a property is exhausted, all requests to that + property will return Resource Exhausted errors. + + Attributes: + tokens_per_day (google.analytics.data_v1beta.types.QuotaStatus): + Standard Analytics Properties can use up to + 200,000 tokens per day; Analytics 360 Properties + can use 2,000,000 tokens per day. Most requests + consume fewer than 10 tokens. + tokens_per_hour (google.analytics.data_v1beta.types.QuotaStatus): + Standard Analytics Properties can use up to + 40,000 tokens per hour; Analytics 360 Properties + can use 400,000 tokens per hour. An API request + consumes a single number of tokens, and that + number is deducted from all of the hourly, + daily, and per project hourly quotas. + concurrent_requests (google.analytics.data_v1beta.types.QuotaStatus): + Standard Analytics Properties can send up to + 10 concurrent requests; Analytics 360 Properties + can use up to 50 concurrent requests. + server_errors_per_project_per_hour (google.analytics.data_v1beta.types.QuotaStatus): + Standard Analytics Properties and cloud + project pairs can have up to 10 server errors + per hour; Analytics 360 Properties and cloud + project pairs can have up to 50 server errors + per hour. + potentially_thresholded_requests_per_hour (google.analytics.data_v1beta.types.QuotaStatus): + Analytics Properties can send up to 120 + requests with potentially thresholded dimensions + per hour. In a batch request, each report + request is individually counted for this quota + if the request contains potentially thresholded + dimensions. + tokens_per_project_per_hour (google.analytics.data_v1beta.types.QuotaStatus): + Analytics Properties can use up to 35% of + their tokens per project per hour. This amounts + to standard Analytics Properties can use up to + 14,000 tokens per project per hour, and + Analytics 360 Properties can use 140,000 tokens + per project per hour. An API request consumes a + single number of tokens, and that number is + deducted from all of the hourly, daily, and per + project hourly quotas. + """ + + tokens_per_day: 'QuotaStatus' = proto.Field( + proto.MESSAGE, + number=1, + message='QuotaStatus', + ) + tokens_per_hour: 'QuotaStatus' = proto.Field( + proto.MESSAGE, + number=2, + message='QuotaStatus', + ) + concurrent_requests: 'QuotaStatus' = proto.Field( + proto.MESSAGE, + number=3, + message='QuotaStatus', + ) + server_errors_per_project_per_hour: 'QuotaStatus' = proto.Field( + proto.MESSAGE, + number=4, + message='QuotaStatus', + ) + potentially_thresholded_requests_per_hour: 'QuotaStatus' = proto.Field( + proto.MESSAGE, + number=5, + message='QuotaStatus', + ) + tokens_per_project_per_hour: 'QuotaStatus' = proto.Field( + proto.MESSAGE, + number=6, + message='QuotaStatus', + ) + + +class QuotaStatus(proto.Message): + r"""Current state for a particular quota group. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + consumed (int): + Quota consumed by this request. + + This field is a member of `oneof`_ ``_consumed``. + remaining (int): + Quota remaining after this request. + + This field is a member of `oneof`_ ``_remaining``. + """ + + consumed: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + remaining: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + +class DimensionMetadata(proto.Message): + r"""Explains a dimension. + + Attributes: + api_name (str): + This dimension's name. Useable in + `Dimension <#Dimension>`__'s ``name``. For example, + ``eventName``. + ui_name (str): + This dimension's name within the Google Analytics user + interface. For example, ``Event name``. + description (str): + Description of how this dimension is used and + calculated. + deprecated_api_names (MutableSequence[str]): + Still usable but deprecated names for this dimension. If + populated, this dimension is available by either ``apiName`` + or one of ``deprecatedApiNames`` for a period of time. After + the deprecation period, the dimension will be available only + by ``apiName``. + custom_definition (bool): + True if the dimension is custom to this + property. This includes user, event, & item + scoped custom dimensions; to learn more about + custom dimensions, see + https://support.google.com/analytics/answer/14240153. + This also include custom channel groups; to + learn more about custom channel groups, see + https://support.google.com/analytics/answer/13051316. + category (str): + The display name of the category that this + dimension belongs to. Similar dimensions and + metrics are categorized together. + """ + + api_name: str = proto.Field( + proto.STRING, + number=1, + ) + ui_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + deprecated_api_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + custom_definition: bool = proto.Field( + proto.BOOL, + number=5, + ) + category: str = proto.Field( + proto.STRING, + number=7, + ) + + +class MetricMetadata(proto.Message): + r"""Explains a metric. + + Attributes: + api_name (str): + A metric name. Useable in `Metric <#Metric>`__'s ``name``. + For example, ``eventCount``. + ui_name (str): + This metric's name within the Google Analytics user + interface. For example, ``Event count``. + description (str): + Description of how this metric is used and + calculated. + deprecated_api_names (MutableSequence[str]): + Still usable but deprecated names for this metric. If + populated, this metric is available by either ``apiName`` or + one of ``deprecatedApiNames`` for a period of time. After + the deprecation period, the metric will be available only by + ``apiName``. + type_ (google.analytics.data_v1beta.types.MetricType): + The type of this metric. + expression (str): + The mathematical expression for this derived metric. Can be + used in `Metric <#Metric>`__'s ``expression`` field for + equivalent reports. Most metrics are not expressions, and + for non-expressions, this field is empty. + custom_definition (bool): + True if the metric is a custom metric for + this property. + blocked_reasons (MutableSequence[google.analytics.data_v1beta.types.MetricMetadata.BlockedReason]): + If reasons are specified, your access is blocked to this + metric for this property. API requests from you to this + property for this metric will succeed; however, the report + will contain only zeros for this metric. API requests with + metric filters on blocked metrics will fail. If reasons are + empty, you have access to this metric. + + To learn more, see `Access and data-restriction + management `__. + category (str): + The display name of the category that this + metrics belongs to. Similar dimensions and + metrics are categorized together. + """ + class BlockedReason(proto.Enum): + r"""Justifications for why this metric is blocked. + + Values: + BLOCKED_REASON_UNSPECIFIED (0): + Will never be specified in API response. + NO_REVENUE_METRICS (1): + If present, your access is blocked to revenue + related metrics for this property, and this + metric is revenue related. + NO_COST_METRICS (2): + If present, your access is blocked to cost + related metrics for this property, and this + metric is cost related. + """ + BLOCKED_REASON_UNSPECIFIED = 0 + NO_REVENUE_METRICS = 1 + NO_COST_METRICS = 2 + + api_name: str = proto.Field( + proto.STRING, + number=1, + ) + ui_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + deprecated_api_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + type_: 'MetricType' = proto.Field( + proto.ENUM, + number=5, + enum='MetricType', + ) + expression: str = proto.Field( + proto.STRING, + number=6, + ) + custom_definition: bool = proto.Field( + proto.BOOL, + number=7, + ) + blocked_reasons: MutableSequence[BlockedReason] = proto.RepeatedField( + proto.ENUM, + number=8, + enum=BlockedReason, + ) + category: str = proto.Field( + proto.STRING, + number=10, + ) + + +class ComparisonMetadata(proto.Message): + r"""The metadata for a single comparison. + + Attributes: + api_name (str): + This comparison's resource name. Useable in + `Comparison <#Comparison>`__'s ``comparison`` field. For + example, 'comparisons/1234'. + ui_name (str): + This comparison's name within the Google + Analytics user interface. + description (str): + This comparison's description. + """ + + api_name: str = proto.Field( + proto.STRING, + number=1, + ) + ui_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DimensionCompatibility(proto.Message): + r"""The compatibility for a single dimension. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dimension_metadata (google.analytics.data_v1beta.types.DimensionMetadata): + The dimension metadata contains the API name + for this compatibility information. The + dimension metadata also contains other helpful + information like the UI name and description. + + This field is a member of `oneof`_ ``_dimension_metadata``. + compatibility (google.analytics.data_v1beta.types.Compatibility): + The compatibility of this dimension. If the + compatibility is COMPATIBLE, this dimension can + be successfully added to the report. + + This field is a member of `oneof`_ ``_compatibility``. + """ + + dimension_metadata: 'DimensionMetadata' = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message='DimensionMetadata', + ) + compatibility: 'Compatibility' = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum='Compatibility', + ) + + +class MetricCompatibility(proto.Message): + r"""The compatibility for a single metric. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + metric_metadata (google.analytics.data_v1beta.types.MetricMetadata): + The metric metadata contains the API name for + this compatibility information. The metric + metadata also contains other helpful information + like the UI name and description. + + This field is a member of `oneof`_ ``_metric_metadata``. + compatibility (google.analytics.data_v1beta.types.Compatibility): + The compatibility of this metric. If the + compatibility is COMPATIBLE, this metric can be + successfully added to the report. + + This field is a member of `oneof`_ ``_compatibility``. + """ + + metric_metadata: 'MetricMetadata' = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message='MetricMetadata', + ) + compatibility: 'Compatibility' = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum='Compatibility', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-analytics-data/v1beta/mypy.ini b/owl-bot-staging/google-analytics-data/v1beta/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-analytics-data/v1beta/noxfile.py b/owl-bot-staging/google-analytics-data/v1beta/noxfile.py new file mode 100644 index 000000000000..bde0c5f2f1b4 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/noxfile.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-analytics-data' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.13" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/analytics/data_v1beta/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/analytics/data_v1beta/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_pivot_reports_async.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_pivot_reports_async.py new file mode 100644 index 000000000000..d1d39b06a65b --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_pivot_reports_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchRunPivotReports +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_BatchRunPivotReports_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_batch_run_pivot_reports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.BatchRunPivotReportsRequest( + ) + + # Make the request + response = await client.batch_run_pivot_reports(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_BatchRunPivotReports_async] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_pivot_reports_sync.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_pivot_reports_sync.py new file mode 100644 index 000000000000..9229bdb748a1 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_pivot_reports_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchRunPivotReports +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_BatchRunPivotReports_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_batch_run_pivot_reports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.BatchRunPivotReportsRequest( + ) + + # Make the request + response = client.batch_run_pivot_reports(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_BatchRunPivotReports_sync] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_reports_async.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_reports_async.py new file mode 100644 index 000000000000..3250aedf6037 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_reports_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchRunReports +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_BatchRunReports_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_batch_run_reports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.BatchRunReportsRequest( + ) + + # Make the request + response = await client.batch_run_reports(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_BatchRunReports_async] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_reports_sync.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_reports_sync.py new file mode 100644 index 000000000000..28ba99d6b104 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_batch_run_reports_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchRunReports +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_BatchRunReports_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_batch_run_reports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.BatchRunReportsRequest( + ) + + # Make the request + response = client.batch_run_reports(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_BatchRunReports_sync] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_check_compatibility_async.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_check_compatibility_async.py new file mode 100644 index 000000000000..cc377e84064b --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_check_compatibility_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckCompatibility +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_CheckCompatibility_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_check_compatibility(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.CheckCompatibilityRequest( + ) + + # Make the request + response = await client.check_compatibility(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_CheckCompatibility_async] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_check_compatibility_sync.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_check_compatibility_sync.py new file mode 100644 index 000000000000..42791e2179d7 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_check_compatibility_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckCompatibility +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_CheckCompatibility_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_check_compatibility(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.CheckCompatibilityRequest( + ) + + # Make the request + response = client.check_compatibility(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_CheckCompatibility_sync] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_async.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_async.py new file mode 100644 index 000000000000..c5592f4e2ec4 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAudienceExport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_CreateAudienceExport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_create_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + audience_export = data_v1beta.AudienceExport() + audience_export.audience = "audience_value" + + request = data_v1beta.CreateAudienceExportRequest( + parent="parent_value", + audience_export=audience_export, + ) + + # Make the request + operation = client.create_audience_export(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_CreateAudienceExport_async] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_sync.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_sync.py new file mode 100644 index 000000000000..c994111afefe --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAudienceExport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_CreateAudienceExport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_create_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + audience_export = data_v1beta.AudienceExport() + audience_export.audience = "audience_value" + + request = data_v1beta.CreateAudienceExportRequest( + parent="parent_value", + audience_export=audience_export, + ) + + # Make the request + operation = client.create_audience_export(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_CreateAudienceExport_sync] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_async.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_async.py new file mode 100644 index 000000000000..b2ce39b0d40a --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAudienceExport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_GetAudienceExport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_get_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.GetAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = await client.get_audience_export(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_GetAudienceExport_async] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_sync.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_sync.py new file mode 100644 index 000000000000..6a9ad4ea28f4 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAudienceExport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_GetAudienceExport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_get_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.GetAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = client.get_audience_export(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_GetAudienceExport_sync] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_async.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_async.py new file mode 100644 index 000000000000..032097498e6b --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_GetMetadata_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_get_metadata(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.GetMetadataRequest( + name="name_value", + ) + + # Make the request + response = await client.get_metadata(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_GetMetadata_async] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_sync.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_sync.py new file mode 100644 index 000000000000..7cbb4a811e76 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_GetMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_get_metadata(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.GetMetadataRequest( + name="name_value", + ) + + # Make the request + response = client.get_metadata(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_GetMetadata_sync] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_async.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_async.py new file mode 100644 index 000000000000..a5a3a942268c --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAudienceExports +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_ListAudienceExports_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_list_audience_exports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.ListAudienceExportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_audience_exports(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_ListAudienceExports_async] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_sync.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_sync.py new file mode 100644 index 000000000000..8da1d43a8960 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAudienceExports +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_ListAudienceExports_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_list_audience_exports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.ListAudienceExportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_audience_exports(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_ListAudienceExports_sync] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_async.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_async.py new file mode 100644 index 000000000000..1a6f32ef3634 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryAudienceExport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_QueryAudienceExport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_query_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.QueryAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = await client.query_audience_export(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_QueryAudienceExport_async] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_sync.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_sync.py new file mode 100644 index 000000000000..9ab764dca23a --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryAudienceExport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_QueryAudienceExport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_query_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.QueryAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = client.query_audience_export(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_QueryAudienceExport_sync] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_pivot_report_async.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_pivot_report_async.py new file mode 100644 index 000000000000..5b008b29a4ee --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_pivot_report_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunPivotReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_RunPivotReport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_run_pivot_report(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.RunPivotReportRequest( + ) + + # Make the request + response = await client.run_pivot_report(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_RunPivotReport_async] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_pivot_report_sync.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_pivot_report_sync.py new file mode 100644 index 000000000000..81d3a8733afb --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_pivot_report_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunPivotReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_RunPivotReport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_run_pivot_report(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.RunPivotReportRequest( + ) + + # Make the request + response = client.run_pivot_report(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_RunPivotReport_sync] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_realtime_report_async.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_realtime_report_async.py new file mode 100644 index 000000000000..f0e80eaf8e77 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_realtime_report_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunRealtimeReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_RunRealtimeReport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_run_realtime_report(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.RunRealtimeReportRequest( + ) + + # Make the request + response = await client.run_realtime_report(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_RunRealtimeReport_async] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_realtime_report_sync.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_realtime_report_sync.py new file mode 100644 index 000000000000..8463f5f86c12 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_realtime_report_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunRealtimeReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_RunRealtimeReport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_run_realtime_report(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.RunRealtimeReportRequest( + ) + + # Make the request + response = client.run_realtime_report(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_RunRealtimeReport_sync] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_report_async.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_report_async.py new file mode 100644 index 000000000000..943dd84ca66e --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_report_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_RunReport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_run_report(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.RunReportRequest( + ) + + # Make the request + response = await client.run_report(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_RunReport_async] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_report_sync.py b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_report_sync.py new file mode 100644 index 000000000000..54286b755915 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_run_report_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_RunReport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_run_report(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.RunReportRequest( + ) + + # Make the request + response = client.run_report(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_RunReport_sync] diff --git a/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json new file mode 100644 index 000000000000..29e86a085403 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json @@ -0,0 +1,1746 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.analytics.data.v1beta", + "version": "v1beta" + } + ], + "language": "PYTHON", + "name": "google-analytics-data", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.batch_run_pivot_reports", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.BatchRunPivotReports", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "BatchRunPivotReports" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.BatchRunPivotReportsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.BatchRunPivotReportsResponse", + "shortName": "batch_run_pivot_reports" + }, + "description": "Sample for BatchRunPivotReports", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_batch_run_pivot_reports_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_BatchRunPivotReports_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_batch_run_pivot_reports_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.batch_run_pivot_reports", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.BatchRunPivotReports", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "BatchRunPivotReports" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.BatchRunPivotReportsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.BatchRunPivotReportsResponse", + "shortName": "batch_run_pivot_reports" + }, + "description": "Sample for BatchRunPivotReports", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_batch_run_pivot_reports_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_BatchRunPivotReports_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_batch_run_pivot_reports_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.batch_run_reports", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.BatchRunReports", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "BatchRunReports" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.BatchRunReportsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.BatchRunReportsResponse", + "shortName": "batch_run_reports" + }, + "description": "Sample for BatchRunReports", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_batch_run_reports_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_BatchRunReports_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_batch_run_reports_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.batch_run_reports", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.BatchRunReports", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "BatchRunReports" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.BatchRunReportsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.BatchRunReportsResponse", + "shortName": "batch_run_reports" + }, + "description": "Sample for BatchRunReports", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_batch_run_reports_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_BatchRunReports_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_batch_run_reports_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.check_compatibility", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.CheckCompatibility", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "CheckCompatibility" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.CheckCompatibilityRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.CheckCompatibilityResponse", + "shortName": "check_compatibility" + }, + "description": "Sample for CheckCompatibility", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_check_compatibility_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_CheckCompatibility_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_check_compatibility_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.check_compatibility", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.CheckCompatibility", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "CheckCompatibility" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.CheckCompatibilityRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.CheckCompatibilityResponse", + "shortName": "check_compatibility" + }, + "description": "Sample for CheckCompatibility", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_check_compatibility_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_CheckCompatibility_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_check_compatibility_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.create_audience_export", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.CreateAudienceExport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "CreateAudienceExport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.CreateAudienceExportRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "audience_export", + "type": "google.analytics.data_v1beta.types.AudienceExport" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_audience_export" + }, + "description": "Sample for CreateAudienceExport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_CreateAudienceExport_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.create_audience_export", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.CreateAudienceExport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "CreateAudienceExport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.CreateAudienceExportRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "audience_export", + "type": "google.analytics.data_v1beta.types.AudienceExport" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_audience_export" + }, + "description": "Sample for CreateAudienceExport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_CreateAudienceExport_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.get_audience_export", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.GetAudienceExport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "GetAudienceExport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.GetAudienceExportRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.AudienceExport", + "shortName": "get_audience_export" + }, + "description": "Sample for GetAudienceExport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_GetAudienceExport_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.get_audience_export", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.GetAudienceExport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "GetAudienceExport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.GetAudienceExportRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.AudienceExport", + "shortName": "get_audience_export" + }, + "description": "Sample for GetAudienceExport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_GetAudienceExport_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.get_metadata", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.GetMetadata", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "GetMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.GetMetadataRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.Metadata", + "shortName": "get_metadata" + }, + "description": "Sample for GetMetadata", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_GetMetadata_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.get_metadata", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.GetMetadata", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "GetMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.GetMetadataRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.Metadata", + "shortName": "get_metadata" + }, + "description": "Sample for GetMetadata", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_GetMetadata_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.list_audience_exports", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.ListAudienceExports", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "ListAudienceExports" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.ListAudienceExportsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.services.beta_analytics_data.pagers.ListAudienceExportsAsyncPager", + "shortName": "list_audience_exports" + }, + "description": "Sample for ListAudienceExports", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_ListAudienceExports_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.list_audience_exports", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.ListAudienceExports", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "ListAudienceExports" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.ListAudienceExportsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.services.beta_analytics_data.pagers.ListAudienceExportsPager", + "shortName": "list_audience_exports" + }, + "description": "Sample for ListAudienceExports", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_ListAudienceExports_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.query_audience_export", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.QueryAudienceExport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "QueryAudienceExport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.QueryAudienceExportRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.QueryAudienceExportResponse", + "shortName": "query_audience_export" + }, + "description": "Sample for QueryAudienceExport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_QueryAudienceExport_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.query_audience_export", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.QueryAudienceExport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "QueryAudienceExport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.QueryAudienceExportRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.QueryAudienceExportResponse", + "shortName": "query_audience_export" + }, + "description": "Sample for QueryAudienceExport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_QueryAudienceExport_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.run_pivot_report", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.RunPivotReport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "RunPivotReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.RunPivotReportRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.RunPivotReportResponse", + "shortName": "run_pivot_report" + }, + "description": "Sample for RunPivotReport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_run_pivot_report_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_RunPivotReport_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_run_pivot_report_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.run_pivot_report", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.RunPivotReport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "RunPivotReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.RunPivotReportRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.RunPivotReportResponse", + "shortName": "run_pivot_report" + }, + "description": "Sample for RunPivotReport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_run_pivot_report_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_RunPivotReport_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_run_pivot_report_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.run_realtime_report", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.RunRealtimeReport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "RunRealtimeReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.RunRealtimeReportRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.RunRealtimeReportResponse", + "shortName": "run_realtime_report" + }, + "description": "Sample for RunRealtimeReport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_run_realtime_report_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_RunRealtimeReport_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_run_realtime_report_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.run_realtime_report", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.RunRealtimeReport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "RunRealtimeReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.RunRealtimeReportRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.RunRealtimeReportResponse", + "shortName": "run_realtime_report" + }, + "description": "Sample for RunRealtimeReport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_run_realtime_report_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_RunRealtimeReport_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_run_realtime_report_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.run_report", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.RunReport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "RunReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.RunReportRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.RunReportResponse", + "shortName": "run_report" + }, + "description": "Sample for RunReport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_run_report_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_RunReport_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_run_report_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.run_report", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.RunReport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "RunReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.RunReportRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.RunReportResponse", + "shortName": "run_report" + }, + "description": "Sample for RunReport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_run_report_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_RunReport_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_run_report_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-analytics-data/v1beta/scripts/fixup_data_v1beta_keywords.py b/owl-bot-staging/google-analytics-data/v1beta/scripts/fixup_data_v1beta_keywords.py new file mode 100644 index 000000000000..7363c0b9a5be --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/scripts/fixup_data_v1beta_keywords.py @@ -0,0 +1,186 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dataCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_run_pivot_reports': ('property', 'requests', ), + 'batch_run_reports': ('property', 'requests', ), + 'check_compatibility': ('property', 'dimensions', 'metrics', 'dimension_filter', 'metric_filter', 'compatibility_filter', ), + 'create_audience_export': ('parent', 'audience_export', ), + 'get_audience_export': ('name', ), + 'get_metadata': ('name', ), + 'list_audience_exports': ('parent', 'page_size', 'page_token', ), + 'query_audience_export': ('name', 'offset', 'limit', ), + 'run_pivot_report': ('property', 'dimensions', 'metrics', 'date_ranges', 'pivots', 'dimension_filter', 'metric_filter', 'currency_code', 'cohort_spec', 'keep_empty_rows', 'return_property_quota', 'comparisons', ), + 'run_realtime_report': ('property', 'dimensions', 'metrics', 'dimension_filter', 'metric_filter', 'limit', 'metric_aggregations', 'order_bys', 'return_property_quota', 'minute_ranges', ), + 'run_report': ('property', 'dimensions', 'metrics', 'date_ranges', 'dimension_filter', 'metric_filter', 'offset', 'limit', 'metric_aggregations', 'order_bys', 'currency_code', 'cohort_spec', 'keep_empty_rows', 'return_property_quota', 'comparisons', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dataCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the data client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-analytics-data/v1beta/setup.py b/owl-bot-staging/google-analytics-data/v1beta/setup.py new file mode 100644 index 000000000000..bf63952e379c --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/setup.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-analytics-data' + + +description = "Google Analytics Data API client library" + +version = None + +with open(os.path.join(package_root, 'google/analytics/data/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.10.txt b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.11.txt b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.12.txt b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.13.txt b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.13.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.7.txt b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.8.txt b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.9.txt b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-analytics-data/v1beta/tests/__init__.py b/owl-bot-staging/google-analytics-data/v1beta/tests/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-analytics-data/v1beta/tests/unit/__init__.py b/owl-bot-staging/google-analytics-data/v1beta/tests/unit/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-analytics-data/v1beta/tests/unit/gapic/__init__.py b/owl-bot-staging/google-analytics-data/v1beta/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-analytics-data/v1beta/tests/unit/gapic/data_v1beta/__init__.py b/owl-bot-staging/google-analytics-data/v1beta/tests/unit/gapic/data_v1beta/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/tests/unit/gapic/data_v1beta/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-analytics-data/v1beta/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py b/owl-bot-staging/google-analytics-data/v1beta/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py new file mode 100644 index 000000000000..b22bec686cb6 --- /dev/null +++ b/owl-bot-staging/google-analytics-data/v1beta/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py @@ -0,0 +1,7777 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.analytics.data_v1beta.services.beta_analytics_data import BetaAnalyticsDataAsyncClient +from google.analytics.data_v1beta.services.beta_analytics_data import BetaAnalyticsDataClient +from google.analytics.data_v1beta.services.beta_analytics_data import pagers +from google.analytics.data_v1beta.services.beta_analytics_data import transports +from google.analytics.data_v1beta.types import analytics_data_api +from google.analytics.data_v1beta.types import data +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BetaAnalyticsDataClient._get_default_mtls_endpoint(None) is None + assert BetaAnalyticsDataClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert BetaAnalyticsDataClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert BetaAnalyticsDataClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert BetaAnalyticsDataClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert BetaAnalyticsDataClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert BetaAnalyticsDataClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BetaAnalyticsDataClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert BetaAnalyticsDataClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + BetaAnalyticsDataClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert BetaAnalyticsDataClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert BetaAnalyticsDataClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert BetaAnalyticsDataClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + BetaAnalyticsDataClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert BetaAnalyticsDataClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert BetaAnalyticsDataClient._get_client_cert_source(None, False) is None + assert BetaAnalyticsDataClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert BetaAnalyticsDataClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert BetaAnalyticsDataClient._get_client_cert_source(None, True) is mock_default_cert_source + assert BetaAnalyticsDataClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(BetaAnalyticsDataClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BetaAnalyticsDataClient)) +@mock.patch.object(BetaAnalyticsDataAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BetaAnalyticsDataAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = BetaAnalyticsDataClient._DEFAULT_UNIVERSE + default_endpoint = BetaAnalyticsDataClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = BetaAnalyticsDataClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert BetaAnalyticsDataClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert BetaAnalyticsDataClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == BetaAnalyticsDataClient.DEFAULT_MTLS_ENDPOINT + assert BetaAnalyticsDataClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert BetaAnalyticsDataClient._get_api_endpoint(None, None, default_universe, "always") == BetaAnalyticsDataClient.DEFAULT_MTLS_ENDPOINT + assert BetaAnalyticsDataClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == BetaAnalyticsDataClient.DEFAULT_MTLS_ENDPOINT + assert BetaAnalyticsDataClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert BetaAnalyticsDataClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + BetaAnalyticsDataClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert BetaAnalyticsDataClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert BetaAnalyticsDataClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert BetaAnalyticsDataClient._get_universe_domain(None, None) == BetaAnalyticsDataClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + BetaAnalyticsDataClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (BetaAnalyticsDataClient, "grpc"), + (BetaAnalyticsDataAsyncClient, "grpc_asyncio"), + (BetaAnalyticsDataClient, "rest"), +]) +def test_beta_analytics_data_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'analyticsdata.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://analyticsdata.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.BetaAnalyticsDataGrpcTransport, "grpc"), + (transports.BetaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.BetaAnalyticsDataRestTransport, "rest"), +]) +def test_beta_analytics_data_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (BetaAnalyticsDataClient, "grpc"), + (BetaAnalyticsDataAsyncClient, "grpc_asyncio"), + (BetaAnalyticsDataClient, "rest"), +]) +def test_beta_analytics_data_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'analyticsdata.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://analyticsdata.googleapis.com' + ) + + +def test_beta_analytics_data_client_get_transport_class(): + transport = BetaAnalyticsDataClient.get_transport_class() + available_transports = [ + transports.BetaAnalyticsDataGrpcTransport, + transports.BetaAnalyticsDataRestTransport, + ] + assert transport in available_transports + + transport = BetaAnalyticsDataClient.get_transport_class("grpc") + assert transport == transports.BetaAnalyticsDataGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (BetaAnalyticsDataClient, transports.BetaAnalyticsDataGrpcTransport, "grpc"), + (BetaAnalyticsDataAsyncClient, transports.BetaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio"), + (BetaAnalyticsDataClient, transports.BetaAnalyticsDataRestTransport, "rest"), +]) +@mock.patch.object(BetaAnalyticsDataClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BetaAnalyticsDataClient)) +@mock.patch.object(BetaAnalyticsDataAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BetaAnalyticsDataAsyncClient)) +def test_beta_analytics_data_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(BetaAnalyticsDataClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(BetaAnalyticsDataClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (BetaAnalyticsDataClient, transports.BetaAnalyticsDataGrpcTransport, "grpc", "true"), + (BetaAnalyticsDataAsyncClient, transports.BetaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (BetaAnalyticsDataClient, transports.BetaAnalyticsDataGrpcTransport, "grpc", "false"), + (BetaAnalyticsDataAsyncClient, transports.BetaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (BetaAnalyticsDataClient, transports.BetaAnalyticsDataRestTransport, "rest", "true"), + (BetaAnalyticsDataClient, transports.BetaAnalyticsDataRestTransport, "rest", "false"), +]) +@mock.patch.object(BetaAnalyticsDataClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BetaAnalyticsDataClient)) +@mock.patch.object(BetaAnalyticsDataAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BetaAnalyticsDataAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_beta_analytics_data_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + BetaAnalyticsDataClient, BetaAnalyticsDataAsyncClient +]) +@mock.patch.object(BetaAnalyticsDataClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BetaAnalyticsDataClient)) +@mock.patch.object(BetaAnalyticsDataAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BetaAnalyticsDataAsyncClient)) +def test_beta_analytics_data_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + BetaAnalyticsDataClient, BetaAnalyticsDataAsyncClient +]) +@mock.patch.object(BetaAnalyticsDataClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BetaAnalyticsDataClient)) +@mock.patch.object(BetaAnalyticsDataAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BetaAnalyticsDataAsyncClient)) +def test_beta_analytics_data_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = BetaAnalyticsDataClient._DEFAULT_UNIVERSE + default_endpoint = BetaAnalyticsDataClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = BetaAnalyticsDataClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (BetaAnalyticsDataClient, transports.BetaAnalyticsDataGrpcTransport, "grpc"), + (BetaAnalyticsDataAsyncClient, transports.BetaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio"), + (BetaAnalyticsDataClient, transports.BetaAnalyticsDataRestTransport, "rest"), +]) +def test_beta_analytics_data_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (BetaAnalyticsDataClient, transports.BetaAnalyticsDataGrpcTransport, "grpc", grpc_helpers), + (BetaAnalyticsDataAsyncClient, transports.BetaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (BetaAnalyticsDataClient, transports.BetaAnalyticsDataRestTransport, "rest", None), +]) +def test_beta_analytics_data_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_beta_analytics_data_client_client_options_from_dict(): + with mock.patch('google.analytics.data_v1beta.services.beta_analytics_data.transports.BetaAnalyticsDataGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = BetaAnalyticsDataClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (BetaAnalyticsDataClient, transports.BetaAnalyticsDataGrpcTransport, "grpc", grpc_helpers), + (BetaAnalyticsDataAsyncClient, transports.BetaAnalyticsDataGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_beta_analytics_data_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "analyticsdata.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/analytics', + 'https://www.googleapis.com/auth/analytics.readonly', +), + scopes=None, + default_host="analyticsdata.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.RunReportRequest, + dict, +]) +def test_run_report(request_type, transport: str = 'grpc'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RunReportResponse( + row_count=992, + kind='kind_value', + ) + response = client.run_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.RunReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunReportResponse) + assert response.row_count == 992 + assert response.kind == 'kind_value' + + +def test_run_report_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.RunReportRequest( + property='property_value', + currency_code='currency_code_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_report), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.run_report(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.RunReportRequest( + property='property_value', + currency_code='currency_code_value', + ) + +def test_run_report_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_report] = mock_rpc + request = {} + client.run_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_report_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.run_report in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.run_report] = mock_rpc + + request = {} + await client.run_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.run_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_report_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.RunReportRequest): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RunReportResponse( + row_count=992, + kind='kind_value', + )) + response = await client.run_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.RunReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunReportResponse) + assert response.row_count == 992 + assert response.kind == 'kind_value' + + +@pytest.mark.asyncio +async def test_run_report_async_from_dict(): + await test_run_report_async(request_type=dict) + +def test_run_report_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.RunReportRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_report), + '__call__') as call: + call.return_value = analytics_data_api.RunReportResponse() + client.run_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_run_report_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.RunReportRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_report), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RunReportResponse()) + await client.run_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.RunPivotReportRequest, + dict, +]) +def test_run_pivot_report(request_type, transport: str = 'grpc'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_pivot_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RunPivotReportResponse( + kind='kind_value', + ) + response = client.run_pivot_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.RunPivotReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunPivotReportResponse) + assert response.kind == 'kind_value' + + +def test_run_pivot_report_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.RunPivotReportRequest( + property='property_value', + currency_code='currency_code_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_pivot_report), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.run_pivot_report(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.RunPivotReportRequest( + property='property_value', + currency_code='currency_code_value', + ) + +def test_run_pivot_report_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_pivot_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_pivot_report] = mock_rpc + request = {} + client.run_pivot_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_pivot_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_pivot_report_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.run_pivot_report in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.run_pivot_report] = mock_rpc + + request = {} + await client.run_pivot_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.run_pivot_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_pivot_report_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.RunPivotReportRequest): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_pivot_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RunPivotReportResponse( + kind='kind_value', + )) + response = await client.run_pivot_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.RunPivotReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunPivotReportResponse) + assert response.kind == 'kind_value' + + +@pytest.mark.asyncio +async def test_run_pivot_report_async_from_dict(): + await test_run_pivot_report_async(request_type=dict) + +def test_run_pivot_report_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.RunPivotReportRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_pivot_report), + '__call__') as call: + call.return_value = analytics_data_api.RunPivotReportResponse() + client.run_pivot_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_run_pivot_report_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.RunPivotReportRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_pivot_report), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RunPivotReportResponse()) + await client.run_pivot_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.BatchRunReportsRequest, + dict, +]) +def test_batch_run_reports(request_type, transport: str = 'grpc'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_reports), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.BatchRunReportsResponse( + kind='kind_value', + ) + response = client.batch_run_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.BatchRunReportsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.BatchRunReportsResponse) + assert response.kind == 'kind_value' + + +def test_batch_run_reports_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.BatchRunReportsRequest( + property='property_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_reports), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.batch_run_reports(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.BatchRunReportsRequest( + property='property_value', + ) + +def test_batch_run_reports_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_run_reports in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_run_reports] = mock_rpc + request = {} + client.batch_run_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_run_reports(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_run_reports_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.batch_run_reports in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.batch_run_reports] = mock_rpc + + request = {} + await client.batch_run_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_run_reports(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_run_reports_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.BatchRunReportsRequest): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_reports), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.BatchRunReportsResponse( + kind='kind_value', + )) + response = await client.batch_run_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.BatchRunReportsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.BatchRunReportsResponse) + assert response.kind == 'kind_value' + + +@pytest.mark.asyncio +async def test_batch_run_reports_async_from_dict(): + await test_batch_run_reports_async(request_type=dict) + +def test_batch_run_reports_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.BatchRunReportsRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_reports), + '__call__') as call: + call.return_value = analytics_data_api.BatchRunReportsResponse() + client.batch_run_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_run_reports_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.BatchRunReportsRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_reports), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.BatchRunReportsResponse()) + await client.batch_run_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.BatchRunPivotReportsRequest, + dict, +]) +def test_batch_run_pivot_reports(request_type, transport: str = 'grpc'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_pivot_reports), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.BatchRunPivotReportsResponse( + kind='kind_value', + ) + response = client.batch_run_pivot_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.BatchRunPivotReportsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.BatchRunPivotReportsResponse) + assert response.kind == 'kind_value' + + +def test_batch_run_pivot_reports_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.BatchRunPivotReportsRequest( + property='property_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_pivot_reports), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.batch_run_pivot_reports(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.BatchRunPivotReportsRequest( + property='property_value', + ) + +def test_batch_run_pivot_reports_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_run_pivot_reports in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_run_pivot_reports] = mock_rpc + request = {} + client.batch_run_pivot_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_run_pivot_reports(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_run_pivot_reports_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.batch_run_pivot_reports in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.batch_run_pivot_reports] = mock_rpc + + request = {} + await client.batch_run_pivot_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_run_pivot_reports(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_run_pivot_reports_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.BatchRunPivotReportsRequest): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_pivot_reports), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.BatchRunPivotReportsResponse( + kind='kind_value', + )) + response = await client.batch_run_pivot_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.BatchRunPivotReportsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.BatchRunPivotReportsResponse) + assert response.kind == 'kind_value' + + +@pytest.mark.asyncio +async def test_batch_run_pivot_reports_async_from_dict(): + await test_batch_run_pivot_reports_async(request_type=dict) + +def test_batch_run_pivot_reports_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.BatchRunPivotReportsRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_pivot_reports), + '__call__') as call: + call.return_value = analytics_data_api.BatchRunPivotReportsResponse() + client.batch_run_pivot_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_run_pivot_reports_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.BatchRunPivotReportsRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_pivot_reports), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.BatchRunPivotReportsResponse()) + await client.batch_run_pivot_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.GetMetadataRequest, + dict, +]) +def test_get_metadata(request_type, transport: str = 'grpc'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.Metadata( + name='name_value', + ) + response = client.get_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.GetMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.Metadata) + assert response.name == 'name_value' + + +def test_get_metadata_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.GetMetadataRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_metadata(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.GetMetadataRequest( + name='name_value', + ) + +def test_get_metadata_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_metadata in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_metadata] = mock_rpc + request = {} + client.get_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_metadata_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_metadata in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_metadata] = mock_rpc + + request = {} + await client.get_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_metadata_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.GetMetadataRequest): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.Metadata( + name='name_value', + )) + response = await client.get_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.GetMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.Metadata) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_metadata_async_from_dict(): + await test_get_metadata_async(request_type=dict) + +def test_get_metadata_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetMetadataRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata), + '__call__') as call: + call.return_value = analytics_data_api.Metadata() + client.get_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_metadata_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetMetadataRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.Metadata()) + await client.get_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_metadata_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.Metadata() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_metadata( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_metadata_flattened_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metadata( + analytics_data_api.GetMetadataRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_metadata_flattened_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.Metadata() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.Metadata()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_metadata( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_metadata_flattened_error_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_metadata( + analytics_data_api.GetMetadataRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.RunRealtimeReportRequest, + dict, +]) +def test_run_realtime_report(request_type, transport: str = 'grpc'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_realtime_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RunRealtimeReportResponse( + row_count=992, + kind='kind_value', + ) + response = client.run_realtime_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.RunRealtimeReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunRealtimeReportResponse) + assert response.row_count == 992 + assert response.kind == 'kind_value' + + +def test_run_realtime_report_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.RunRealtimeReportRequest( + property='property_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_realtime_report), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.run_realtime_report(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.RunRealtimeReportRequest( + property='property_value', + ) + +def test_run_realtime_report_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_realtime_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_realtime_report] = mock_rpc + request = {} + client.run_realtime_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_realtime_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_realtime_report_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.run_realtime_report in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.run_realtime_report] = mock_rpc + + request = {} + await client.run_realtime_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.run_realtime_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_realtime_report_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.RunRealtimeReportRequest): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_realtime_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RunRealtimeReportResponse( + row_count=992, + kind='kind_value', + )) + response = await client.run_realtime_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.RunRealtimeReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunRealtimeReportResponse) + assert response.row_count == 992 + assert response.kind == 'kind_value' + + +@pytest.mark.asyncio +async def test_run_realtime_report_async_from_dict(): + await test_run_realtime_report_async(request_type=dict) + +def test_run_realtime_report_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.RunRealtimeReportRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_realtime_report), + '__call__') as call: + call.return_value = analytics_data_api.RunRealtimeReportResponse() + client.run_realtime_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_run_realtime_report_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.RunRealtimeReportRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_realtime_report), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RunRealtimeReportResponse()) + await client.run_realtime_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.CheckCompatibilityRequest, + dict, +]) +def test_check_compatibility(request_type, transport: str = 'grpc'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_compatibility), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.CheckCompatibilityResponse( + ) + response = client.check_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.CheckCompatibilityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.CheckCompatibilityResponse) + + +def test_check_compatibility_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.CheckCompatibilityRequest( + property='property_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_compatibility), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.check_compatibility(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.CheckCompatibilityRequest( + property='property_value', + ) + +def test_check_compatibility_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check_compatibility in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.check_compatibility] = mock_rpc + request = {} + client.check_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.check_compatibility(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_check_compatibility_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.check_compatibility in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.check_compatibility] = mock_rpc + + request = {} + await client.check_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.check_compatibility(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_check_compatibility_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.CheckCompatibilityRequest): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_compatibility), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.CheckCompatibilityResponse( + )) + response = await client.check_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.CheckCompatibilityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.CheckCompatibilityResponse) + + +@pytest.mark.asyncio +async def test_check_compatibility_async_from_dict(): + await test_check_compatibility_async(request_type=dict) + +def test_check_compatibility_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CheckCompatibilityRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_compatibility), + '__call__') as call: + call.return_value = analytics_data_api.CheckCompatibilityResponse() + client.check_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_check_compatibility_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CheckCompatibilityRequest() + + request.property = 'property_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_compatibility), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.CheckCompatibilityResponse()) + await client.check_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'property=property_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.CreateAudienceExportRequest, + dict, +]) +def test_create_audience_export(request_type, transport: str = 'grpc'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.CreateAudienceExportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_audience_export_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.CreateAudienceExportRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_audience_export(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.CreateAudienceExportRequest( + parent='parent_value', + ) + +def test_create_audience_export_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_audience_export in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_audience_export] = mock_rpc + request = {} + client.create_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_audience_export(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_audience_export_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_audience_export in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_audience_export] = mock_rpc + + request = {} + await client.create_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_audience_export(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_audience_export_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.CreateAudienceExportRequest): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.CreateAudienceExportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_audience_export_async_from_dict(): + await test_create_audience_export_async(request_type=dict) + +def test_create_audience_export_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CreateAudienceExportRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_audience_export_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CreateAudienceExportRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_audience_export_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_audience_export( + parent='parent_value', + audience_export=analytics_data_api.AudienceExport(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].audience_export + mock_val = analytics_data_api.AudienceExport(name='name_value') + assert arg == mock_val + + +def test_create_audience_export_flattened_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_audience_export( + analytics_data_api.CreateAudienceExportRequest(), + parent='parent_value', + audience_export=analytics_data_api.AudienceExport(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_audience_export_flattened_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_audience_export( + parent='parent_value', + audience_export=analytics_data_api.AudienceExport(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].audience_export + mock_val = analytics_data_api.AudienceExport(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_audience_export_flattened_error_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_audience_export( + analytics_data_api.CreateAudienceExportRequest(), + parent='parent_value', + audience_export=analytics_data_api.AudienceExport(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.QueryAudienceExportRequest, + dict, +]) +def test_query_audience_export(request_type, transport: str = 'grpc'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.QueryAudienceExportResponse( + row_count=992, + ) + response = client.query_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.QueryAudienceExportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.QueryAudienceExportResponse) + assert response.row_count == 992 + + +def test_query_audience_export_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.QueryAudienceExportRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.query_audience_export(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.QueryAudienceExportRequest( + name='name_value', + ) + +def test_query_audience_export_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.query_audience_export in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.query_audience_export] = mock_rpc + request = {} + client.query_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.query_audience_export(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_query_audience_export_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.query_audience_export in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.query_audience_export] = mock_rpc + + request = {} + await client.query_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.query_audience_export(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_query_audience_export_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.QueryAudienceExportRequest): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.QueryAudienceExportResponse( + row_count=992, + )) + response = await client.query_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.QueryAudienceExportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.QueryAudienceExportResponse) + assert response.row_count == 992 + + +@pytest.mark.asyncio +async def test_query_audience_export_async_from_dict(): + await test_query_audience_export_async(request_type=dict) + +def test_query_audience_export_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.QueryAudienceExportRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), + '__call__') as call: + call.return_value = analytics_data_api.QueryAudienceExportResponse() + client.query_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_query_audience_export_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.QueryAudienceExportRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.QueryAudienceExportResponse()) + await client.query_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_query_audience_export_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.QueryAudienceExportResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.query_audience_export( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_query_audience_export_flattened_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.query_audience_export( + analytics_data_api.QueryAudienceExportRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_query_audience_export_flattened_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.QueryAudienceExportResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.QueryAudienceExportResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.query_audience_export( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_query_audience_export_flattened_error_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.query_audience_export( + analytics_data_api.QueryAudienceExportRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.GetAudienceExportRequest, + dict, +]) +def test_get_audience_export(request_type, transport: str = 'grpc'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.AudienceExport( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + state=analytics_data_api.AudienceExport.State.CREATING, + creation_quota_tokens_charged=3070, + row_count=992, + error_message='error_message_value', + percentage_completed=0.2106, + ) + response = client.get_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.GetAudienceExportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.AudienceExport) + assert response.name == 'name_value' + assert response.audience == 'audience_value' + assert response.audience_display_name == 'audience_display_name_value' + assert response.state == analytics_data_api.AudienceExport.State.CREATING + assert response.creation_quota_tokens_charged == 3070 + assert response.row_count == 992 + assert response.error_message == 'error_message_value' + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + + +def test_get_audience_export_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.GetAudienceExportRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_audience_export(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.GetAudienceExportRequest( + name='name_value', + ) + +def test_get_audience_export_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_audience_export in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_audience_export] = mock_rpc + request = {} + client.get_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_audience_export(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_audience_export_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_audience_export in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_audience_export] = mock_rpc + + request = {} + await client.get_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_audience_export(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_audience_export_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.GetAudienceExportRequest): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.AudienceExport( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + state=analytics_data_api.AudienceExport.State.CREATING, + creation_quota_tokens_charged=3070, + row_count=992, + error_message='error_message_value', + percentage_completed=0.2106, + )) + response = await client.get_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.GetAudienceExportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.AudienceExport) + assert response.name == 'name_value' + assert response.audience == 'audience_value' + assert response.audience_display_name == 'audience_display_name_value' + assert response.state == analytics_data_api.AudienceExport.State.CREATING + assert response.creation_quota_tokens_charged == 3070 + assert response.row_count == 992 + assert response.error_message == 'error_message_value' + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + + +@pytest.mark.asyncio +async def test_get_audience_export_async_from_dict(): + await test_get_audience_export_async(request_type=dict) + +def test_get_audience_export_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetAudienceExportRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), + '__call__') as call: + call.return_value = analytics_data_api.AudienceExport() + client.get_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_audience_export_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetAudienceExportRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.AudienceExport()) + await client.get_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_audience_export_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.AudienceExport() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_audience_export( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_audience_export_flattened_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_audience_export( + analytics_data_api.GetAudienceExportRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_audience_export_flattened_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.AudienceExport() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.AudienceExport()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_audience_export( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_audience_export_flattened_error_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_audience_export( + analytics_data_api.GetAudienceExportRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.ListAudienceExportsRequest, + dict, +]) +def test_list_audience_exports(request_type, transport: str = 'grpc'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListAudienceExportsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_audience_exports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListAudienceExportsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAudienceExportsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_audience_exports_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.ListAudienceExportsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_audience_exports(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListAudienceExportsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_audience_exports_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_audience_exports in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_audience_exports] = mock_rpc + request = {} + client.list_audience_exports(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_audience_exports(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_audience_exports_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_audience_exports in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_audience_exports] = mock_rpc + + request = {} + await client.list_audience_exports(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_audience_exports(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_audience_exports_async(transport: str = 'grpc_asyncio', request_type=analytics_data_api.ListAudienceExportsRequest): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListAudienceExportsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_audience_exports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListAudienceExportsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAudienceExportsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_audience_exports_async_from_dict(): + await test_list_audience_exports_async(request_type=dict) + +def test_list_audience_exports_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListAudienceExportsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__') as call: + call.return_value = analytics_data_api.ListAudienceExportsResponse() + client.list_audience_exports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_audience_exports_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListAudienceExportsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListAudienceExportsResponse()) + await client.list_audience_exports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_audience_exports_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListAudienceExportsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_audience_exports( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_audience_exports_flattened_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_audience_exports( + analytics_data_api.ListAudienceExportsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_audience_exports_flattened_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListAudienceExportsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListAudienceExportsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_audience_exports( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_audience_exports_flattened_error_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_audience_exports( + analytics_data_api.ListAudienceExportsRequest(), + parent='parent_value', + ) + + +def test_list_audience_exports_pager(transport_name: str = "grpc"): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + next_page_token='abc', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[], + next_page_token='def', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_audience_exports(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.AudienceExport) + for i in results) +def test_list_audience_exports_pages(transport_name: str = "grpc"): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + next_page_token='abc', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[], + next_page_token='def', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + ), + RuntimeError, + ) + pages = list(client.list_audience_exports(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_audience_exports_async_pager(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + next_page_token='abc', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[], + next_page_token='def', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_audience_exports(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analytics_data_api.AudienceExport) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_audience_exports_async_pages(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + next_page_token='abc', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[], + next_page_token='def', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_audience_exports(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_run_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_report] = mock_rpc + + request = {} + client.run_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_run_pivot_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_pivot_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_pivot_report] = mock_rpc + + request = {} + client.run_pivot_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_pivot_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_run_reports_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_run_reports in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_run_reports] = mock_rpc + + request = {} + client.batch_run_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_run_reports(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_run_pivot_reports_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_run_pivot_reports in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_run_pivot_reports] = mock_rpc + + request = {} + client.batch_run_pivot_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_run_pivot_reports(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_metadata_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_metadata in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_metadata] = mock_rpc + + request = {} + client.get_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_metadata_rest_required_fields(request_type=analytics_data_api.GetMetadataRequest): + transport_class = transports.BetaAnalyticsDataRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_metadata._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_metadata._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.Metadata() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.Metadata.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_metadata(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_metadata_rest_unset_required_fields(): + transport = transports.BetaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_metadata._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_metadata_rest_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.Metadata() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'properties/sample1/metadata'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.Metadata.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_metadata(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta/{name=properties/*/metadata}" % client.transport._host, args[1]) + + +def test_get_metadata_rest_flattened_error(transport: str = 'rest'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metadata( + analytics_data_api.GetMetadataRequest(), + name='name_value', + ) + + +def test_run_realtime_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_realtime_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_realtime_report] = mock_rpc + + request = {} + client.run_realtime_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_realtime_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_check_compatibility_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check_compatibility in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.check_compatibility] = mock_rpc + + request = {} + client.check_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.check_compatibility(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_audience_export_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_audience_export in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_audience_export] = mock_rpc + + request = {} + client.create_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_audience_export(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_audience_export_rest_required_fields(request_type=analytics_data_api.CreateAudienceExportRequest): + transport_class = transports.BetaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_audience_export._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_audience_export._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_audience_export(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_audience_export_rest_unset_required_fields(): + transport = transports.BetaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_audience_export._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "audienceExport", ))) + + +def test_create_audience_export_rest_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'properties/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + audience_export=analytics_data_api.AudienceExport(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_audience_export(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta/{parent=properties/*}/audienceExports" % client.transport._host, args[1]) + + +def test_create_audience_export_rest_flattened_error(transport: str = 'rest'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_audience_export( + analytics_data_api.CreateAudienceExportRequest(), + parent='parent_value', + audience_export=analytics_data_api.AudienceExport(name='name_value'), + ) + + +def test_query_audience_export_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.query_audience_export in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.query_audience_export] = mock_rpc + + request = {} + client.query_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.query_audience_export(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_query_audience_export_rest_required_fields(request_type=analytics_data_api.QueryAudienceExportRequest): + transport_class = transports.BetaAnalyticsDataRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_audience_export._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_audience_export._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.QueryAudienceExportResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceExportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.query_audience_export(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_query_audience_export_rest_unset_required_fields(): + transport = transports.BetaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.query_audience_export._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_query_audience_export_rest_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.QueryAudienceExportResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'properties/sample1/audienceExports/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceExportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.query_audience_export(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta/{name=properties/*/audienceExports/*}:query" % client.transport._host, args[1]) + + +def test_query_audience_export_rest_flattened_error(transport: str = 'rest'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.query_audience_export( + analytics_data_api.QueryAudienceExportRequest(), + name='name_value', + ) + + +def test_get_audience_export_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_audience_export in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_audience_export] = mock_rpc + + request = {} + client.get_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_audience_export(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_audience_export_rest_required_fields(request_type=analytics_data_api.GetAudienceExportRequest): + transport_class = transports.BetaAnalyticsDataRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_audience_export._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_audience_export._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.AudienceExport() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.AudienceExport.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_audience_export(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_audience_export_rest_unset_required_fields(): + transport = transports.BetaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_audience_export._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_audience_export_rest_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.AudienceExport() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'properties/sample1/audienceExports/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.AudienceExport.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_audience_export(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta/{name=properties/*/audienceExports/*}" % client.transport._host, args[1]) + + +def test_get_audience_export_rest_flattened_error(transport: str = 'rest'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_audience_export( + analytics_data_api.GetAudienceExportRequest(), + name='name_value', + ) + + +def test_list_audience_exports_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_audience_exports in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_audience_exports] = mock_rpc + + request = {} + client.list_audience_exports(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_audience_exports(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_audience_exports_rest_required_fields(request_type=analytics_data_api.ListAudienceExportsRequest): + transport_class = transports.BetaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_audience_exports._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_audience_exports._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListAudienceExportsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.ListAudienceExportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_audience_exports(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_audience_exports_rest_unset_required_fields(): + transport = transports.BetaAnalyticsDataRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_audience_exports._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_audience_exports_rest_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListAudienceExportsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'properties/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.ListAudienceExportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_audience_exports(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta/{parent=properties/*}/audienceExports" % client.transport._host, args[1]) + + +def test_list_audience_exports_rest_flattened_error(transport: str = 'rest'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_audience_exports( + analytics_data_api.ListAudienceExportsRequest(), + parent='parent_value', + ) + + +def test_list_audience_exports_rest_pager(transport: str = 'rest'): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + next_page_token='abc', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[], + next_page_token='def', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + ], + next_page_token='ghi', + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(analytics_data_api.ListAudienceExportsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'properties/sample1'} + + pager = client.list_audience_exports(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.AudienceExport) + for i in results) + + pages = list(client.list_audience_exports(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BetaAnalyticsDataGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BetaAnalyticsDataGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BetaAnalyticsDataClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.BetaAnalyticsDataGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BetaAnalyticsDataClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BetaAnalyticsDataClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.BetaAnalyticsDataGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BetaAnalyticsDataClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.BetaAnalyticsDataGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BetaAnalyticsDataClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.BetaAnalyticsDataGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.BetaAnalyticsDataGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.BetaAnalyticsDataGrpcTransport, + transports.BetaAnalyticsDataGrpcAsyncIOTransport, + transports.BetaAnalyticsDataRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = BetaAnalyticsDataClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_report_empty_call_grpc(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_report), + '__call__') as call: + call.return_value = analytics_data_api.RunReportResponse() + client.run_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_pivot_report_empty_call_grpc(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_pivot_report), + '__call__') as call: + call.return_value = analytics_data_api.RunPivotReportResponse() + client.run_pivot_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunPivotReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_run_reports_empty_call_grpc(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_reports), + '__call__') as call: + call.return_value = analytics_data_api.BatchRunReportsResponse() + client.batch_run_reports(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.BatchRunReportsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_run_pivot_reports_empty_call_grpc(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_pivot_reports), + '__call__') as call: + call.return_value = analytics_data_api.BatchRunPivotReportsResponse() + client.batch_run_pivot_reports(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.BatchRunPivotReportsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_metadata_empty_call_grpc(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata), + '__call__') as call: + call.return_value = analytics_data_api.Metadata() + client.get_metadata(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetMetadataRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_realtime_report_empty_call_grpc(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_realtime_report), + '__call__') as call: + call.return_value = analytics_data_api.RunRealtimeReportResponse() + client.run_realtime_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunRealtimeReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_check_compatibility_empty_call_grpc(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.check_compatibility), + '__call__') as call: + call.return_value = analytics_data_api.CheckCompatibilityResponse() + client.check_compatibility(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CheckCompatibilityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_audience_export_empty_call_grpc(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_audience_export(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CreateAudienceExportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_audience_export_empty_call_grpc(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), + '__call__') as call: + call.return_value = analytics_data_api.QueryAudienceExportResponse() + client.query_audience_export(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.QueryAudienceExportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_audience_export_empty_call_grpc(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), + '__call__') as call: + call.return_value = analytics_data_api.AudienceExport() + client.get_audience_export(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetAudienceExportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_audience_exports_empty_call_grpc(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__') as call: + call.return_value = analytics_data_api.ListAudienceExportsResponse() + client.list_audience_exports(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.ListAudienceExportsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = BetaAnalyticsDataAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_run_report_empty_call_grpc_asyncio(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RunReportResponse( + row_count=992, + kind='kind_value', + )) + await client.run_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_run_pivot_report_empty_call_grpc_asyncio(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_pivot_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RunPivotReportResponse( + kind='kind_value', + )) + await client.run_pivot_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunPivotReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_run_reports_empty_call_grpc_asyncio(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_reports), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.BatchRunReportsResponse( + kind='kind_value', + )) + await client.batch_run_reports(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.BatchRunReportsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_run_pivot_reports_empty_call_grpc_asyncio(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_pivot_reports), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.BatchRunPivotReportsResponse( + kind='kind_value', + )) + await client.batch_run_pivot_reports(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.BatchRunPivotReportsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_metadata_empty_call_grpc_asyncio(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.Metadata( + name='name_value', + )) + await client.get_metadata(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetMetadataRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_run_realtime_report_empty_call_grpc_asyncio(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_realtime_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.RunRealtimeReportResponse( + row_count=992, + kind='kind_value', + )) + await client.run_realtime_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunRealtimeReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_check_compatibility_empty_call_grpc_asyncio(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.check_compatibility), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.CheckCompatibilityResponse( + )) + await client.check_compatibility(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CheckCompatibilityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_audience_export_empty_call_grpc_asyncio(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_audience_export(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CreateAudienceExportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_query_audience_export_empty_call_grpc_asyncio(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.QueryAudienceExportResponse( + row_count=992, + )) + await client.query_audience_export(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.QueryAudienceExportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_audience_export_empty_call_grpc_asyncio(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.AudienceExport( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + state=analytics_data_api.AudienceExport.State.CREATING, + creation_quota_tokens_charged=3070, + row_count=992, + error_message='error_message_value', + percentage_completed=0.2106, + )) + await client.get_audience_export(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetAudienceExportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_audience_exports_empty_call_grpc_asyncio(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analytics_data_api.ListAudienceExportsResponse( + next_page_token='next_page_token_value', + )) + await client.list_audience_exports(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.ListAudienceExportsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = BetaAnalyticsDataClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_run_report_rest_bad_request(request_type=analytics_data_api.RunReportRequest): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.run_report(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.RunReportRequest, + dict, +]) +def test_run_report_rest_call_success(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RunReportResponse( + row_count=992, + kind='kind_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.RunReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.run_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunReportResponse) + assert response.row_count == 992 + assert response.kind == 'kind_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_report_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "post_run_report") as post, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "pre_run_report") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.RunReportRequest.pb(analytics_data_api.RunReportRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.RunReportResponse.to_json(analytics_data_api.RunReportResponse()) + req.return_value.content = return_value + + request = analytics_data_api.RunReportRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.RunReportResponse() + + client.run_report(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_pivot_report_rest_bad_request(request_type=analytics_data_api.RunPivotReportRequest): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.run_pivot_report(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.RunPivotReportRequest, + dict, +]) +def test_run_pivot_report_rest_call_success(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RunPivotReportResponse( + kind='kind_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.RunPivotReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.run_pivot_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunPivotReportResponse) + assert response.kind == 'kind_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_pivot_report_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "post_run_pivot_report") as post, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "pre_run_pivot_report") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.RunPivotReportRequest.pb(analytics_data_api.RunPivotReportRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.RunPivotReportResponse.to_json(analytics_data_api.RunPivotReportResponse()) + req.return_value.content = return_value + + request = analytics_data_api.RunPivotReportRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.RunPivotReportResponse() + + client.run_pivot_report(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_run_reports_rest_bad_request(request_type=analytics_data_api.BatchRunReportsRequest): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.batch_run_reports(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.BatchRunReportsRequest, + dict, +]) +def test_batch_run_reports_rest_call_success(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.BatchRunReportsResponse( + kind='kind_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.BatchRunReportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.batch_run_reports(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.BatchRunReportsResponse) + assert response.kind == 'kind_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_run_reports_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "post_batch_run_reports") as post, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "pre_batch_run_reports") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.BatchRunReportsRequest.pb(analytics_data_api.BatchRunReportsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.BatchRunReportsResponse.to_json(analytics_data_api.BatchRunReportsResponse()) + req.return_value.content = return_value + + request = analytics_data_api.BatchRunReportsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.BatchRunReportsResponse() + + client.batch_run_reports(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_run_pivot_reports_rest_bad_request(request_type=analytics_data_api.BatchRunPivotReportsRequest): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.batch_run_pivot_reports(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.BatchRunPivotReportsRequest, + dict, +]) +def test_batch_run_pivot_reports_rest_call_success(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.BatchRunPivotReportsResponse( + kind='kind_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.BatchRunPivotReportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.batch_run_pivot_reports(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.BatchRunPivotReportsResponse) + assert response.kind == 'kind_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_run_pivot_reports_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "post_batch_run_pivot_reports") as post, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "pre_batch_run_pivot_reports") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.BatchRunPivotReportsRequest.pb(analytics_data_api.BatchRunPivotReportsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.BatchRunPivotReportsResponse.to_json(analytics_data_api.BatchRunPivotReportsResponse()) + req.return_value.content = return_value + + request = analytics_data_api.BatchRunPivotReportsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.BatchRunPivotReportsResponse() + + client.batch_run_pivot_reports(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_metadata_rest_bad_request(request_type=analytics_data_api.GetMetadataRequest): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/metadata'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_metadata(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.GetMetadataRequest, + dict, +]) +def test_get_metadata_rest_call_success(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/metadata'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.Metadata( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.Metadata.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_metadata(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.Metadata) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_metadata_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "post_get_metadata") as post, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "pre_get_metadata") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.GetMetadataRequest.pb(analytics_data_api.GetMetadataRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.Metadata.to_json(analytics_data_api.Metadata()) + req.return_value.content = return_value + + request = analytics_data_api.GetMetadataRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.Metadata() + + client.get_metadata(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_realtime_report_rest_bad_request(request_type=analytics_data_api.RunRealtimeReportRequest): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.run_realtime_report(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.RunRealtimeReportRequest, + dict, +]) +def test_run_realtime_report_rest_call_success(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RunRealtimeReportResponse( + row_count=992, + kind='kind_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.RunRealtimeReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.run_realtime_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunRealtimeReportResponse) + assert response.row_count == 992 + assert response.kind == 'kind_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_realtime_report_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "post_run_realtime_report") as post, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "pre_run_realtime_report") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.RunRealtimeReportRequest.pb(analytics_data_api.RunRealtimeReportRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.RunRealtimeReportResponse.to_json(analytics_data_api.RunRealtimeReportResponse()) + req.return_value.content = return_value + + request = analytics_data_api.RunRealtimeReportRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.RunRealtimeReportResponse() + + client.run_realtime_report(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_check_compatibility_rest_bad_request(request_type=analytics_data_api.CheckCompatibilityRequest): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.check_compatibility(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.CheckCompatibilityRequest, + dict, +]) +def test_check_compatibility_rest_call_success(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'property': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.CheckCompatibilityResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.CheckCompatibilityResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.check_compatibility(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.CheckCompatibilityResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_check_compatibility_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "post_check_compatibility") as post, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "pre_check_compatibility") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.CheckCompatibilityRequest.pb(analytics_data_api.CheckCompatibilityRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.CheckCompatibilityResponse.to_json(analytics_data_api.CheckCompatibilityResponse()) + req.return_value.content = return_value + + request = analytics_data_api.CheckCompatibilityRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.CheckCompatibilityResponse() + + client.check_compatibility(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_audience_export_rest_bad_request(request_type=analytics_data_api.CreateAudienceExportRequest): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_audience_export(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.CreateAudienceExportRequest, + dict, +]) +def test_create_audience_export_rest_call_success(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request_init["audience_export"] = {'name': 'name_value', 'audience': 'audience_value', 'audience_display_name': 'audience_display_name_value', 'dimensions': [{'dimension_name': 'dimension_name_value'}], 'state': 1, 'begin_creating_time': {'seconds': 751, 'nanos': 543}, 'creation_quota_tokens_charged': 3070, 'row_count': 992, 'error_message': 'error_message_value', 'percentage_completed': 0.2106} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateAudienceExportRequest.meta.fields["audience_export"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["audience_export"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["audience_export"][field])): + del request_init["audience_export"][field][i][subfield] + else: + del request_init["audience_export"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_audience_export(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_audience_export_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "post_create_audience_export") as post, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "pre_create_audience_export") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.CreateAudienceExportRequest.pb(analytics_data_api.CreateAudienceExportRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = analytics_data_api.CreateAudienceExportRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_audience_export(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_query_audience_export_rest_bad_request(request_type=analytics_data_api.QueryAudienceExportRequest): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/audienceExports/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.query_audience_export(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.QueryAudienceExportRequest, + dict, +]) +def test_query_audience_export_rest_call_success(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/audienceExports/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.QueryAudienceExportResponse( + row_count=992, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceExportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.query_audience_export(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.QueryAudienceExportResponse) + assert response.row_count == 992 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_query_audience_export_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "post_query_audience_export") as post, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "pre_query_audience_export") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.QueryAudienceExportRequest.pb(analytics_data_api.QueryAudienceExportRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.QueryAudienceExportResponse.to_json(analytics_data_api.QueryAudienceExportResponse()) + req.return_value.content = return_value + + request = analytics_data_api.QueryAudienceExportRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.QueryAudienceExportResponse() + + client.query_audience_export(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_audience_export_rest_bad_request(request_type=analytics_data_api.GetAudienceExportRequest): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/audienceExports/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_audience_export(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.GetAudienceExportRequest, + dict, +]) +def test_get_audience_export_rest_call_success(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'properties/sample1/audienceExports/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.AudienceExport( + name='name_value', + audience='audience_value', + audience_display_name='audience_display_name_value', + state=analytics_data_api.AudienceExport.State.CREATING, + creation_quota_tokens_charged=3070, + row_count=992, + error_message='error_message_value', + percentage_completed=0.2106, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.AudienceExport.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_audience_export(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.AudienceExport) + assert response.name == 'name_value' + assert response.audience == 'audience_value' + assert response.audience_display_name == 'audience_display_name_value' + assert response.state == analytics_data_api.AudienceExport.State.CREATING + assert response.creation_quota_tokens_charged == 3070 + assert response.row_count == 992 + assert response.error_message == 'error_message_value' + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_audience_export_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "post_get_audience_export") as post, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "pre_get_audience_export") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.GetAudienceExportRequest.pb(analytics_data_api.GetAudienceExportRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.AudienceExport.to_json(analytics_data_api.AudienceExport()) + req.return_value.content = return_value + + request = analytics_data_api.GetAudienceExportRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.AudienceExport() + + client.get_audience_export(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_audience_exports_rest_bad_request(request_type=analytics_data_api.ListAudienceExportsRequest): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_audience_exports(request) + + +@pytest.mark.parametrize("request_type", [ + analytics_data_api.ListAudienceExportsRequest, + dict, +]) +def test_list_audience_exports_rest_call_success(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'properties/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListAudienceExportsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.ListAudienceExportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_audience_exports(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAudienceExportsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_audience_exports_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "post_list_audience_exports") as post, \ + mock.patch.object(transports.BetaAnalyticsDataRestInterceptor, "pre_list_audience_exports") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.ListAudienceExportsRequest.pb(analytics_data_api.ListAudienceExportsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = analytics_data_api.ListAudienceExportsResponse.to_json(analytics_data_api.ListAudienceExportsResponse()) + req.return_value.content = return_value + + request = analytics_data_api.ListAudienceExportsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.ListAudienceExportsResponse() + + client.list_audience_exports(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +def test_initialize_client_w_rest(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_report_empty_call_rest(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_report), + '__call__') as call: + client.run_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_pivot_report_empty_call_rest(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_pivot_report), + '__call__') as call: + client.run_pivot_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunPivotReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_run_reports_empty_call_rest(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_reports), + '__call__') as call: + client.batch_run_reports(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.BatchRunReportsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_run_pivot_reports_empty_call_rest(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_run_pivot_reports), + '__call__') as call: + client.batch_run_pivot_reports(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.BatchRunPivotReportsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_metadata_empty_call_rest(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata), + '__call__') as call: + client.get_metadata(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetMetadataRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_realtime_report_empty_call_rest(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_realtime_report), + '__call__') as call: + client.run_realtime_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunRealtimeReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_check_compatibility_empty_call_rest(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.check_compatibility), + '__call__') as call: + client.check_compatibility(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CheckCompatibilityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_audience_export_empty_call_rest(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), + '__call__') as call: + client.create_audience_export(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.CreateAudienceExportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_audience_export_empty_call_rest(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), + '__call__') as call: + client.query_audience_export(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.QueryAudienceExportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_audience_export_empty_call_rest(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), + '__call__') as call: + client.get_audience_export(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetAudienceExportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_audience_exports_empty_call_rest(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + '__call__') as call: + client.list_audience_exports(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.ListAudienceExportsRequest() + + assert args[0] == request_msg + + +def test_beta_analytics_data_rest_lro_client(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.BetaAnalyticsDataGrpcTransport, + ) + +def test_beta_analytics_data_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.BetaAnalyticsDataTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_beta_analytics_data_base_transport(): + # Instantiate the base transport. + with mock.patch('google.analytics.data_v1beta.services.beta_analytics_data.transports.BetaAnalyticsDataTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.BetaAnalyticsDataTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'run_report', + 'run_pivot_report', + 'batch_run_reports', + 'batch_run_pivot_reports', + 'get_metadata', + 'run_realtime_report', + 'check_compatibility', + 'create_audience_export', + 'query_audience_export', + 'get_audience_export', + 'list_audience_exports', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_beta_analytics_data_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.analytics.data_v1beta.services.beta_analytics_data.transports.BetaAnalyticsDataTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BetaAnalyticsDataTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/analytics', + 'https://www.googleapis.com/auth/analytics.readonly', +), + quota_project_id="octopus", + ) + + +def test_beta_analytics_data_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.analytics.data_v1beta.services.beta_analytics_data.transports.BetaAnalyticsDataTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BetaAnalyticsDataTransport() + adc.assert_called_once() + + +def test_beta_analytics_data_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BetaAnalyticsDataClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/analytics', + 'https://www.googleapis.com/auth/analytics.readonly', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BetaAnalyticsDataGrpcTransport, + transports.BetaAnalyticsDataGrpcAsyncIOTransport, + ], +) +def test_beta_analytics_data_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/analytics', 'https://www.googleapis.com/auth/analytics.readonly',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BetaAnalyticsDataGrpcTransport, + transports.BetaAnalyticsDataGrpcAsyncIOTransport, + transports.BetaAnalyticsDataRestTransport, + ], +) +def test_beta_analytics_data_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.BetaAnalyticsDataGrpcTransport, grpc_helpers), + (transports.BetaAnalyticsDataGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_beta_analytics_data_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "analyticsdata.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/analytics', + 'https://www.googleapis.com/auth/analytics.readonly', +), + scopes=["1", "2"], + default_host="analyticsdata.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.BetaAnalyticsDataGrpcTransport, transports.BetaAnalyticsDataGrpcAsyncIOTransport]) +def test_beta_analytics_data_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_beta_analytics_data_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.BetaAnalyticsDataRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_beta_analytics_data_host_no_port(transport_name): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='analyticsdata.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'analyticsdata.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://analyticsdata.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_beta_analytics_data_host_with_port(transport_name): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='analyticsdata.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'analyticsdata.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://analyticsdata.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_beta_analytics_data_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = BetaAnalyticsDataClient( + credentials=creds1, + transport=transport_name, + ) + client2 = BetaAnalyticsDataClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.run_report._session + session2 = client2.transport.run_report._session + assert session1 != session2 + session1 = client1.transport.run_pivot_report._session + session2 = client2.transport.run_pivot_report._session + assert session1 != session2 + session1 = client1.transport.batch_run_reports._session + session2 = client2.transport.batch_run_reports._session + assert session1 != session2 + session1 = client1.transport.batch_run_pivot_reports._session + session2 = client2.transport.batch_run_pivot_reports._session + assert session1 != session2 + session1 = client1.transport.get_metadata._session + session2 = client2.transport.get_metadata._session + assert session1 != session2 + session1 = client1.transport.run_realtime_report._session + session2 = client2.transport.run_realtime_report._session + assert session1 != session2 + session1 = client1.transport.check_compatibility._session + session2 = client2.transport.check_compatibility._session + assert session1 != session2 + session1 = client1.transport.create_audience_export._session + session2 = client2.transport.create_audience_export._session + assert session1 != session2 + session1 = client1.transport.query_audience_export._session + session2 = client2.transport.query_audience_export._session + assert session1 != session2 + session1 = client1.transport.get_audience_export._session + session2 = client2.transport.get_audience_export._session + assert session1 != session2 + session1 = client1.transport.list_audience_exports._session + session2 = client2.transport.list_audience_exports._session + assert session1 != session2 +def test_beta_analytics_data_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BetaAnalyticsDataGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_beta_analytics_data_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BetaAnalyticsDataGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.BetaAnalyticsDataGrpcTransport, transports.BetaAnalyticsDataGrpcAsyncIOTransport]) +def test_beta_analytics_data_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.BetaAnalyticsDataGrpcTransport, transports.BetaAnalyticsDataGrpcAsyncIOTransport]) +def test_beta_analytics_data_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_beta_analytics_data_grpc_lro_client(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_beta_analytics_data_grpc_lro_async_client(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_audience_export_path(): + property = "squid" + audience_export = "clam" + expected = "properties/{property}/audienceExports/{audience_export}".format(property=property, audience_export=audience_export, ) + actual = BetaAnalyticsDataClient.audience_export_path(property, audience_export) + assert expected == actual + + +def test_parse_audience_export_path(): + expected = { + "property": "whelk", + "audience_export": "octopus", + } + path = BetaAnalyticsDataClient.audience_export_path(**expected) + + # Check that the path construction is reversible. + actual = BetaAnalyticsDataClient.parse_audience_export_path(path) + assert expected == actual + +def test_metadata_path(): + property = "oyster" + expected = "properties/{property}/metadata".format(property=property, ) + actual = BetaAnalyticsDataClient.metadata_path(property) + assert expected == actual + + +def test_parse_metadata_path(): + expected = { + "property": "nudibranch", + } + path = BetaAnalyticsDataClient.metadata_path(**expected) + + # Check that the path construction is reversible. + actual = BetaAnalyticsDataClient.parse_metadata_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = BetaAnalyticsDataClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = BetaAnalyticsDataClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BetaAnalyticsDataClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder, ) + actual = BetaAnalyticsDataClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = BetaAnalyticsDataClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BetaAnalyticsDataClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization, ) + actual = BetaAnalyticsDataClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = BetaAnalyticsDataClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BetaAnalyticsDataClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project, ) + actual = BetaAnalyticsDataClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = BetaAnalyticsDataClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BetaAnalyticsDataClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = BetaAnalyticsDataClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = BetaAnalyticsDataClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BetaAnalyticsDataClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.BetaAnalyticsDataTransport, '_prep_wrapped_messages') as prep: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.BetaAnalyticsDataTransport, '_prep_wrapped_messages') as prep: + transport_class = BetaAnalyticsDataClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_grpc(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = BetaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (BetaAnalyticsDataClient, transports.BetaAnalyticsDataGrpcTransport), + (BetaAnalyticsDataAsyncClient, transports.BetaAnalyticsDataGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + )