diff --git a/README.md b/README.md
index c57708d276ca..aa704ef3ed86 100644
--- a/README.md
+++ b/README.md
@@ -62,16 +62,12 @@ Workflow activity is tracked and can be monitored with a self-hosted [Prefect se
## Getting started
-Prefect requires Python 3.10+. To [install the latest version of Prefect](https://docs.prefect.io/v3/get-started/install), run one of the following commands:
+Prefect requires Python 3.10+. To [install the latest version of Prefect](https://docs.prefect.io/v3/get-started/install), run the following command:
```bash
pip install -U prefect
```
-```bash
-uv add prefect
-```
-
Then create and run a Python file that uses Prefect `flow` and `task` decorators to orchestrate and observe your workflow - in this case, a simple script that fetches the number of GitHub stars from a repository:
```python
diff --git a/docs/v3/api-ref/python/prefect-server-api-flow_runs.mdx b/docs/v3/api-ref/python/prefect-server-api-flow_runs.mdx
index feea1bb3a6c8..7cd099f3717d 100644
--- a/docs/v3/api-ref/python/prefect-server-api-flow_runs.mdx
+++ b/docs/v3/api-ref/python/prefect-server-api-flow_runs.mdx
@@ -60,14 +60,14 @@ Query for average flow-run lateness in seconds.
### `flow_run_history`
```python
-flow_run_history(history_start: DateTime = Body(..., description="The history's start time."), history_end: DateTime = Body(..., description="The history's end time."), history_interval: float = Body(..., description='The size of each history interval, in seconds. Must be at least 1 second.', json_schema_extra={'format': 'time-delta'}, alias='history_interval_seconds'), flows: Optional[schemas.filters.FlowFilter] = None, flow_runs: Optional[schemas.filters.FlowRunFilter] = None, task_runs: Optional[schemas.filters.TaskRunFilter] = None, deployments: Optional[schemas.filters.DeploymentFilter] = None, work_pools: Optional[schemas.filters.WorkPoolFilter] = None, work_queues: Optional[schemas.filters.WorkQueueFilter] = None, db: PrefectDBInterface = Depends(provide_database_interface)) -> List[schemas.responses.HistoryResponse]
+flow_run_history(history_start: DateTime = Body(..., description="The history's start time."), history_end: DateTime = Body(..., description="The history's end time."), history_interval_seconds: float = Body(..., description='The size of each history interval, in seconds. Must be at least 1 second.', json_schema_extra={'format': 'time-delta'}), flows: Optional[schemas.filters.FlowFilter] = None, flow_runs: Optional[schemas.filters.FlowRunFilter] = None, task_runs: Optional[schemas.filters.TaskRunFilter] = None, deployments: Optional[schemas.filters.DeploymentFilter] = None, work_pools: Optional[schemas.filters.WorkPoolFilter] = None, work_queues: Optional[schemas.filters.WorkQueueFilter] = None, db: PrefectDBInterface = Depends(provide_database_interface)) -> List[schemas.responses.HistoryResponse]
```
Query for flow run history data across a given range and interval.
-### `read_flow_run`
+### `read_flow_run`
```python
read_flow_run(flow_run_id: UUID = Path(..., description='The flow run id', alias='id'), db: PrefectDBInterface = Depends(provide_database_interface)) -> schemas.responses.FlowRunResponse
@@ -77,7 +77,7 @@ read_flow_run(flow_run_id: UUID = Path(..., description='The flow run id', alias
Get a flow run by id.
-### `read_flow_run_graph_v1`
+### `read_flow_run_graph_v1`
```python
read_flow_run_graph_v1(flow_run_id: UUID = Path(..., description='The flow run id', alias='id'), db: PrefectDBInterface = Depends(provide_database_interface)) -> List[DependencyResult]
@@ -87,7 +87,7 @@ read_flow_run_graph_v1(flow_run_id: UUID = Path(..., description='The flow run i
Get a task run dependency map for a given flow run.
-### `read_flow_run_graph_v2`
+### `read_flow_run_graph_v2`
```python
read_flow_run_graph_v2(flow_run_id: UUID = Path(..., description='The flow run id', alias='id'), since: datetime.datetime = Query(default=jsonable_encoder(earliest_possible_datetime()), description='Only include runs that start or end after this time.'), db: PrefectDBInterface = Depends(provide_database_interface)) -> Graph
@@ -97,7 +97,7 @@ read_flow_run_graph_v2(flow_run_id: UUID = Path(..., description='The flow run i
Get a graph of the tasks and subflow runs for the given flow run
-### `resume_flow_run`
+### `resume_flow_run`
```python
resume_flow_run(response: Response, flow_run_id: UUID = Path(..., description='The flow run id', alias='id'), db: PrefectDBInterface = Depends(provide_database_interface), run_input: Optional[dict[str, Any]] = Body(default=None, embed=True), flow_policy: type[FlowRunOrchestrationPolicy] = Depends(orchestration_dependencies.provide_flow_policy), task_policy: type[TaskRunOrchestrationPolicy] = Depends(orchestration_dependencies.provide_task_policy), orchestration_parameters: Dict[str, Any] = Depends(orchestration_dependencies.provide_flow_orchestration_parameters), api_version: str = Depends(dependencies.provide_request_api_version), client_version: Optional[str] = Depends(dependencies.get_prefect_client_version)) -> OrchestrationResult
@@ -107,7 +107,7 @@ resume_flow_run(response: Response, flow_run_id: UUID = Path(..., description='T
Resume a paused flow run.
-### `read_flow_runs`
+### `read_flow_runs`
```python
read_flow_runs(sort: schemas.sorting.FlowRunSort = Body(schemas.sorting.FlowRunSort.ID_DESC), limit: int = dependencies.LimitBody(), offset: int = Body(0, ge=0), flows: Optional[schemas.filters.FlowFilter] = None, flow_runs: Optional[schemas.filters.FlowRunFilter] = None, task_runs: Optional[schemas.filters.TaskRunFilter] = None, deployments: Optional[schemas.filters.DeploymentFilter] = None, work_pools: Optional[schemas.filters.WorkPoolFilter] = None, work_pool_queues: Optional[schemas.filters.WorkQueueFilter] = None, db: PrefectDBInterface = Depends(provide_database_interface)) -> List[schemas.responses.FlowRunResponse]
@@ -117,7 +117,7 @@ read_flow_runs(sort: schemas.sorting.FlowRunSort = Body(schemas.sorting.FlowRunS
Query for flow runs.
-### `delete_flow_run`
+### `delete_flow_run`
```python
delete_flow_run(docket: dependencies.Docket, flow_run_id: UUID = Path(..., description='The flow run id', alias='id'), db: PrefectDBInterface = Depends(provide_database_interface)) -> None
@@ -127,13 +127,13 @@ delete_flow_run(docket: dependencies.Docket, flow_run_id: UUID = Path(..., descr
Delete a flow run by id.
-### `delete_flow_run_logs`
+### `delete_flow_run_logs`
```python
delete_flow_run_logs() -> None
```
-### `set_flow_run_state`
+### `set_flow_run_state`
```python
set_flow_run_state(response: Response, flow_run_id: UUID = Path(..., description='The flow run id', alias='id'), state: schemas.actions.StateCreate = Body(..., description='The intended state.'), force: bool = Body(False, description='If false, orchestration rules will be applied that may alter or prevent the state transition. If True, orchestration rules are not applied.'), db: PrefectDBInterface = Depends(provide_database_interface), flow_policy: type[FlowRunOrchestrationPolicy] = Depends(orchestration_dependencies.provide_flow_policy), orchestration_parameters: Dict[str, Any] = Depends(orchestration_dependencies.provide_flow_orchestration_parameters), api_version: str = Depends(dependencies.provide_request_api_version), client_version: Optional[str] = Depends(dependencies.get_prefect_client_version)) -> OrchestrationResult
@@ -143,7 +143,7 @@ set_flow_run_state(response: Response, flow_run_id: UUID = Path(..., description
Set a flow run state, invoking any orchestration rules.
-### `create_flow_run_input`
+### `create_flow_run_input`
```python
create_flow_run_input(flow_run_id: UUID = Path(..., description='The flow run id', alias='id'), key: str = Body(..., description='The input key'), value: bytes = Body(..., description='The value of the input'), sender: Optional[str] = Body(None, description='The sender of the input'), db: PrefectDBInterface = Depends(provide_database_interface)) -> None
@@ -153,7 +153,7 @@ create_flow_run_input(flow_run_id: UUID = Path(..., description='The flow run id
Create a key/value input for a flow run.
-### `filter_flow_run_input`
+### `filter_flow_run_input`
```python
filter_flow_run_input(flow_run_id: UUID = Path(..., description='The flow run id', alias='id'), prefix: str = Body(..., description='The input key prefix', embed=True), limit: int = Body(1, description='The maximum number of results to return', embed=True), exclude_keys: List[str] = Body([], description='Exclude inputs with these keys', embed=True), db: PrefectDBInterface = Depends(provide_database_interface)) -> List[schemas.core.FlowRunInput]
@@ -163,7 +163,7 @@ filter_flow_run_input(flow_run_id: UUID = Path(..., description='The flow run id
Filter flow run inputs by key prefix
-### `read_flow_run_input`
+### `read_flow_run_input`
```python
read_flow_run_input(flow_run_id: UUID = Path(..., description='The flow run id', alias='id'), key: str = Path(..., description='The input key', alias='key'), db: PrefectDBInterface = Depends(provide_database_interface)) -> PlainTextResponse
@@ -173,7 +173,7 @@ read_flow_run_input(flow_run_id: UUID = Path(..., description='The flow run id',
Create a value from a flow run input
-### `delete_flow_run_input`
+### `delete_flow_run_input`
```python
delete_flow_run_input(flow_run_id: UUID = Path(..., description='The flow run id', alias='id'), key: str = Path(..., description='The input key', alias='key'), db: PrefectDBInterface = Depends(provide_database_interface)) -> None
@@ -183,7 +183,7 @@ delete_flow_run_input(flow_run_id: UUID = Path(..., description='The flow run id
Delete a flow run input
-### `paginate_flow_runs`
+### `paginate_flow_runs`
```python
paginate_flow_runs(sort: schemas.sorting.FlowRunSort = Body(schemas.sorting.FlowRunSort.ID_DESC), limit: int = dependencies.LimitBody(), page: int = Body(1, ge=1), flows: Optional[schemas.filters.FlowFilter] = None, flow_runs: Optional[schemas.filters.FlowRunFilter] = None, task_runs: Optional[schemas.filters.TaskRunFilter] = None, deployments: Optional[schemas.filters.DeploymentFilter] = None, work_pools: Optional[schemas.filters.WorkPoolFilter] = None, work_pool_queues: Optional[schemas.filters.WorkQueueFilter] = None, db: PrefectDBInterface = Depends(provide_database_interface)) -> FlowRunPaginationResponse
@@ -193,7 +193,7 @@ paginate_flow_runs(sort: schemas.sorting.FlowRunSort = Body(schemas.sorting.Flow
Pagination query for flow runs.
-### `download_logs`
+### `download_logs`
```python
download_logs(flow_run_id: UUID = Path(..., description='The flow run id', alias='id'), db: PrefectDBInterface = Depends(provide_database_interface)) -> StreamingResponse
@@ -203,7 +203,7 @@ download_logs(flow_run_id: UUID = Path(..., description='The flow run id', alias
Download all flow run logs as a CSV file, collecting all logs until there are no more logs to retrieve.
-### `update_flow_run_labels`
+### `update_flow_run_labels`
```python
update_flow_run_labels(flow_run_id: UUID = Path(..., description='The flow run id', alias='id'), labels: Dict[str, Any] = Body(..., description='The labels to update'), db: PrefectDBInterface = Depends(provide_database_interface)) -> None
diff --git a/docs/v3/api-ref/python/prefect-server-api-task_runs.mdx b/docs/v3/api-ref/python/prefect-server-api-task_runs.mdx
index 56840333b009..ef42b0495f80 100644
--- a/docs/v3/api-ref/python/prefect-server-api-task_runs.mdx
+++ b/docs/v3/api-ref/python/prefect-server-api-task_runs.mdx
@@ -51,14 +51,14 @@ Count task runs.
### `task_run_history`
```python
-task_run_history(history_start: DateTime = Body(..., description="The history's start time."), history_end: DateTime = Body(..., description="The history's end time."), history_interval: float = Body(..., description='The size of each history interval, in seconds. Must be at least 1 second.', json_schema_extra={'format': 'time-delta'}, alias='history_interval_seconds'), flows: schemas.filters.FlowFilter = None, flow_runs: schemas.filters.FlowRunFilter = None, task_runs: schemas.filters.TaskRunFilter = None, deployments: schemas.filters.DeploymentFilter = None, db: PrefectDBInterface = Depends(provide_database_interface)) -> List[schemas.responses.HistoryResponse]
+task_run_history(history_start: DateTime = Body(..., description="The history's start time."), history_end: DateTime = Body(..., description="The history's end time."), history_interval_seconds: float = Body(..., description='The size of each history interval, in seconds. Must be at least 1 second.', json_schema_extra={'format': 'time-delta'}), flows: schemas.filters.FlowFilter = None, flow_runs: schemas.filters.FlowRunFilter = None, task_runs: schemas.filters.TaskRunFilter = None, deployments: schemas.filters.DeploymentFilter = None, db: PrefectDBInterface = Depends(provide_database_interface)) -> List[schemas.responses.HistoryResponse]
```
Query for task run history data across a given range and interval.
-### `read_task_run`
+### `read_task_run`
```python
read_task_run(task_run_id: UUID = Path(..., description='The task run id', alias='id'), db: PrefectDBInterface = Depends(provide_database_interface)) -> schemas.core.TaskRun
@@ -68,7 +68,7 @@ read_task_run(task_run_id: UUID = Path(..., description='The task run id', alias
Get a task run by id.
-### `read_task_runs`
+### `read_task_runs`
```python
read_task_runs(sort: schemas.sorting.TaskRunSort = Body(schemas.sorting.TaskRunSort.ID_DESC), limit: int = dependencies.LimitBody(), offset: int = Body(0, ge=0), flows: Optional[schemas.filters.FlowFilter] = None, flow_runs: Optional[schemas.filters.FlowRunFilter] = None, task_runs: Optional[schemas.filters.TaskRunFilter] = None, deployments: Optional[schemas.filters.DeploymentFilter] = None, db: PrefectDBInterface = Depends(provide_database_interface)) -> List[schemas.core.TaskRun]
@@ -78,7 +78,7 @@ read_task_runs(sort: schemas.sorting.TaskRunSort = Body(schemas.sorting.TaskRunS
Query for task runs.
-### `paginate_task_runs`
+### `paginate_task_runs`
```python
paginate_task_runs(sort: schemas.sorting.TaskRunSort = Body(schemas.sorting.TaskRunSort.ID_DESC), limit: int = dependencies.LimitBody(), page: int = Body(1, ge=1), flows: Optional[schemas.filters.FlowFilter] = None, flow_runs: Optional[schemas.filters.FlowRunFilter] = None, task_runs: Optional[schemas.filters.TaskRunFilter] = None, deployments: Optional[schemas.filters.DeploymentFilter] = None, db: PrefectDBInterface = Depends(provide_database_interface)) -> TaskRunPaginationResponse
@@ -88,7 +88,7 @@ paginate_task_runs(sort: schemas.sorting.TaskRunSort = Body(schemas.sorting.Task
Pagination query for task runs.
-### `delete_task_run`
+### `delete_task_run`
```python
delete_task_run(docket: dependencies.Docket, task_run_id: UUID = Path(..., description='The task run id', alias='id'), db: PrefectDBInterface = Depends(provide_database_interface)) -> None
@@ -98,13 +98,13 @@ delete_task_run(docket: dependencies.Docket, task_run_id: UUID = Path(..., descr
Delete a task run by id.
-### `delete_task_run_logs`
+### `delete_task_run_logs`
```python
delete_task_run_logs() -> None
```
-### `set_task_run_state`
+### `set_task_run_state`
```python
set_task_run_state(task_run_id: UUID = Path(..., description='The task run id', alias='id'), state: schemas.actions.StateCreate = Body(..., description='The intended state.'), force: bool = Body(False, description='If false, orchestration rules will be applied that may alter or prevent the state transition. If True, orchestration rules are not applied.'), db: PrefectDBInterface = Depends(provide_database_interface), response: Response = None, task_policy: TaskRunOrchestrationPolicy = Depends(orchestration_dependencies.provide_task_policy), orchestration_parameters: Dict[str, Any] = Depends(orchestration_dependencies.provide_task_orchestration_parameters)) -> OrchestrationResult
@@ -114,7 +114,7 @@ set_task_run_state(task_run_id: UUID = Path(..., description='The task run id',
Set a task run state, invoking any orchestration rules.
-### `scheduled_task_subscription`
+### `scheduled_task_subscription`
```python
scheduled_task_subscription(websocket: WebSocket) -> None
diff --git a/docs/v3/api-ref/python/prefect-workers-server.mdx b/docs/v3/api-ref/python/prefect-workers-server.mdx
index 204fc7adc679..976bb6c8ab6e 100644
--- a/docs/v3/api-ref/python/prefect-workers-server.mdx
+++ b/docs/v3/api-ref/python/prefect-workers-server.mdx
@@ -7,7 +7,7 @@ sidebarTitle: server
## Functions
-### `build_healthcheck_server`
+### `build_healthcheck_server`
```python
build_healthcheck_server(worker: BaseWorker[Any, Any, Any], query_interval_seconds: float, log_level: str = 'error') -> uvicorn.Server
@@ -21,7 +21,7 @@ Build a healthcheck FastAPI server for a worker.
- `log_level`: the log
-### `start_healthcheck_server`
+### `start_healthcheck_server`
```python
start_healthcheck_server(worker: BaseWorker[Any, Any, Any], query_interval_seconds: float, log_level: str = 'error') -> None
diff --git a/docs/v3/api-ref/rest-api/server/schema.json b/docs/v3/api-ref/rest-api/server/schema.json
index 026d96d6ae39..5d969afd7009 100644
--- a/docs/v3/api-ref/rest-api/server/schema.json
+++ b/docs/v3/api-ref/rest-api/server/schema.json
@@ -12932,10 +12932,10 @@
"title": "History End",
"description": "The history's end time."
},
- "history_interval": {
+ "history_interval_seconds": {
"type": "number",
"format": "time-delta",
- "title": "History Interval",
+ "title": "History Interval Seconds",
"description": "The size of each history interval, in seconds. Must be at least 1 second."
},
"flows": {
@@ -13003,7 +13003,7 @@
"required": [
"history_start",
"history_end",
- "history_interval"
+ "history_interval_seconds"
],
"title": "Body_flow_run_history_flow_runs_history_post"
},
@@ -14451,10 +14451,10 @@
"title": "History End",
"description": "The history's end time."
},
- "history_interval": {
+ "history_interval_seconds": {
"type": "number",
"format": "time-delta",
- "title": "History Interval",
+ "title": "History Interval Seconds",
"description": "The size of each history interval, in seconds. Must be at least 1 second."
},
"flows": {
@@ -14474,7 +14474,7 @@
"required": [
"history_start",
"history_end",
- "history_interval"
+ "history_interval_seconds"
],
"title": "Body_task_run_history_task_runs_history_post"
},