Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(worker): support workers to run natively on windows [WIP] #4446

Open
wants to merge 22 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 19 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions .github/workflows/build_windows_worker.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
name: Build Windows Worker

on:
push:
branches:
- "alp/build_windows"
paths:
- "backend/**"
- ".github/workflows/build_windows_worker.yml"
# pull_request:
# types: [opened, synchronize, reopened]
# paths:
# - "backend/**"
# - ".github/workflows/backend-test.yml"

env:
CARGO_INCREMENTAL: 0
DATABASE_URL: postgres://postgres:changeme@postgres:5432/windmill
DISABLE_EMBEDDING: true
RUST_LOG: info


jobs:
cargo_build_windows:
runs-on: windows-latest
services:
postgres:
image: postgres
env:
POSTGRES_DB: windmill
POSTGRES_PASSWORD: changeme

options: >-
--health-cmd pg_isready --health-interval 10s --health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: cargo build windows
timeout-minutes: 15
run: cargo build --release
# mkdir frontend/build && cd backend && touch
# windmill-api/openapi-deref.yaml &&


10 changes: 3 additions & 7 deletions backend/parsers/windmill-parser-yaml/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -399,15 +399,11 @@ fn parse_ansible_options(opts: &Vec<Yaml>) -> AnsiblePlaybookOptions {
if c > 0 && c <= 6 {
ret.verbosity = Some("v".repeat(c.min(6)));
}

}
}
_ => ()

_ => (),
}
}


}
}

Expand All @@ -422,10 +418,10 @@ fn count_consecutive_vs(s: &str) -> usize {
if c == 'v' {
current_count += 1;
if current_count == 6 {
return 6; // Stop early if we reach 6
return 6; // Stop early if we reach 6
}
} else {
current_count = 0; // Reset count if the character is not 'v'
current_count = 0; // Reset count if the character is not 'v'
}
max_count = max_count.max(current_count);
}
Expand Down
12 changes: 10 additions & 2 deletions backend/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -373,8 +373,16 @@ async fn windmill_main() -> anyhow::Result<()> {
let is_agent = mode == Mode::Agent;

if !is_agent {
// migration code to avoid break
windmill_api::migrate_db(&db).await?;
let skip_migration = std::env::var("SKIP_MIGRATION")
.map(|val| val == "true")
.unwrap_or(false);

if !skip_migration {
// migration code to avoid break
windmill_api::migrate_db(&db).await?;
} else {
tracing::info!("SKIP_MIGRATION set, skipping db migration...")
}
}

let (killpill_tx, mut killpill_rx) = tokio::sync::broadcast::channel::<()>(2);
Expand Down
18 changes: 7 additions & 11 deletions backend/windmill-api/src/jobs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@ use axum::http::HeaderValue;
use quick_cache::sync::Cache;
use serde_json::value::RawValue;
use sqlx::Pool;
use windmill_common::error::JsonResult;
use std::collections::HashMap;
#[cfg(feature = "prometheus")]
use std::sync::atomic::Ordering;
use tokio::io::AsyncReadExt;
#[cfg(feature = "prometheus")]
use tokio::time::Instant;
use tower::ServiceBuilder;
use windmill_common::error::JsonResult;
use windmill_common::flow_status::{JobResult, RestartedFrom};
use windmill_common::jobs::{
format_completed_job_result, format_result, CompletedJobWithFormattedResult, FormattedResult,
Expand Down Expand Up @@ -78,6 +78,7 @@ use windmill_common::s3_helpers::OBJECT_STORE_CACHE_SETTINGS;
#[cfg(feature = "prometheus")]
use windmill_common::{METRICS_DEBUG_ENABLED, METRICS_ENABLED};

use windmill_common::utils::paginate_without_limits;
use windmill_common::{get_latest_deployed_hash_for_path, BASE_URL};
use windmill_queue::{
cancel_job, get_queued_job, get_result_by_id_from_running_flow, job_is_complete, push,
Expand Down Expand Up @@ -293,11 +294,8 @@ pub fn workspace_unauthed_service() -> Router {

pub fn global_root_service() -> Router {
Router::new()
.route("/db_clock", get(get_db_clock))
.route(
"/completed/count_by_tag",
get(count_by_tag),
)
.route("/db_clock", get(get_db_clock))
.route("/completed/count_by_tag", get(count_by_tag))
}

#[derive(Deserialize)]
Expand Down Expand Up @@ -4683,8 +4681,8 @@ async fn get_job_update(
.fetch_optional(&db)
.await?;

let progress: Option<i32> = if get_progress == Some(true){
sqlx::query_scalar!(
let progress: Option<i32> = if get_progress == Some(true) {
sqlx::query_scalar!(
"SELECT scalar_int FROM job_stats WHERE workspace_id = $1 AND job_id = $2 AND metric_id = $3",
&w_id,
job_id,
Expand Down Expand Up @@ -5115,8 +5113,6 @@ async fn get_completed_job_result(
Ok(Json(result).into_response())
}



#[derive(Deserialize)]
struct CountByTagQuery {
horizon_secs: Option<i64>,
Expand All @@ -5130,7 +5126,7 @@ struct TagCount {
}

async fn count_by_tag(
ApiAuthed { email, ..}: ApiAuthed,
ApiAuthed { email, .. }: ApiAuthed,
Extension(db): Extension<DB>,
Query(query): Query<CountByTagQuery>,
) -> JsonResult<Vec<TagCount>> {
Expand Down
5 changes: 4 additions & 1 deletion backend/windmill-api/src/resources.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,10 @@ pub fn workspaced_service() -> Router {
.route("/type/exists/:name", get(exists_resource_type))
.route("/type/update/:name", post(update_resource_type))
.route("/type/delete/:name", delete(delete_resource_type))
.route("/file_resource_type_to_file_ext_map", get(file_resource_ext_to_resource_type))
.route(
"/file_resource_type_to_file_ext_map",
get(file_resource_ext_to_resource_type),
)
.route("/type/create", post(create_resource_type))
}

Expand Down
2 changes: 1 addition & 1 deletion backend/windmill-api/src/settings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ use axum::{
#[cfg(feature = "enterprise")]
use axum::extract::Query;

use serde::Deserialize;
#[cfg(feature = "enterprise")]
use windmill_common::ee::{send_critical_alert, CriticalAlertKind, CriticalErrorChannel};
use serde::Deserialize;
use windmill_common::{
error::{self, JsonResult, Result},
global_settings::{
Expand Down
3 changes: 3 additions & 0 deletions backend/windmill-common/src/worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -449,10 +449,13 @@ pub async fn save_cache(
fn write_binary_file(main_path: &str, byts: &mut bytes::Bytes) -> error::Result<()> {
use std::fs::{File, Permissions};
use std::io::Write;

#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;

let mut file = File::create(main_path)?;
file.write_all(byts)?;
#[cfg(unix)]
file.set_permissions(Permissions::from_mode(0o755))?;
file.flush()?;
Ok(())
Expand Down
2 changes: 1 addition & 1 deletion backend/windmill-indexer/src/indexer_ee.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use anyhow::anyhow;
use sqlx::{Pool, Postgres};
use windmill_common::error::Error;
use anyhow::anyhow;

#[derive(Clone)]
pub struct IndexReader;
Expand Down
9 changes: 9 additions & 0 deletions backend/windmill-worker/src/ansible_executor.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,18 @@
#[cfg(unix)]
use std::{
collections::HashMap,
os::unix::fs::PermissionsExt,
path::{Path, PathBuf},
process::Stdio,
};

#[cfg(windows)]
use std::{
collections::HashMap,
path::{Path, PathBuf},
process::Stdio,
};

use anyhow::anyhow;
use itertools::Itertools;
use serde_json::value::RawValue;
Expand Down Expand Up @@ -378,6 +386,7 @@ fi

let file = write_file(job_dir, "wrapper.sh", &wrapper)?;

#[cfg(unix)]
file.metadata()?.permissions().set_mode(0o777);
// let mut nsjail_cmd = Command::new(NSJAIL_PATH.as_str());
let mut nsjail_cmd = Command::new(NSJAIL_PATH.as_str());
Expand Down
Loading
Loading