-
Notifications
You must be signed in to change notification settings - Fork 445
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
3fac66a
commit 654c6bf
Showing
12 changed files
with
288 additions
and
376 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
import json | ||
import matplotlib.pyplot as plt | ||
|
||
# Function to load JSON data from a file | ||
def load_json_data(filepath): | ||
with open(filepath, 'r') as file: | ||
data = json.load(file) | ||
return data | ||
|
||
# Function to plot two arrays of subarrays with tuples (step_name, duration) | ||
def plot_two_arrays_of_subarrays(arrays1, arrays2): | ||
# Function to calculate sum of durations for each step | ||
def calculate_sums(arrays): | ||
steps = [step for step, _ in arrays[0]] # Extract steps from the first iteration | ||
sums = {step: 0 for step in steps} # Initialize sums dictionary with step names | ||
|
||
# Sum up the durations for each step across all subarrays | ||
for subarray in arrays: | ||
for step_name, duration in subarray: | ||
sums[step_name] += duration | ||
|
||
# Convert the sums dictionary to two lists (for plotting) | ||
step_names = list(sums.keys()) | ||
durations = list(sums.values()) | ||
return step_names, durations | ||
|
||
# Calculate sums for both arrays of subarrays | ||
step_names1, sums1 = calculate_sums(arrays1) | ||
step_names2, sums2 = calculate_sums(arrays2) | ||
|
||
# Create two subplots, one on top of the other | ||
fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(10, 8)) | ||
|
||
# First plot (top) for the first array of subarrays | ||
ax1.plot(step_names1, sums1, marker='o', linestyle='-', color='b') | ||
ax1.set_title('Total Duration per Step - Main Loop') | ||
ax1.set_xlabel('Step Name') | ||
ax1.set_ylabel('Total Duration') | ||
ax1.grid(True) | ||
|
||
# Second plot (bottom) for the second array of subarrays | ||
ax2.plot(step_names2, sums2, marker='o', linestyle='-', color='r') | ||
ax2.set_title('Total Duration per Step - Result Processor') | ||
ax2.set_xlabel('Step Name') | ||
ax2.set_ylabel('Total Duration') | ||
ax2.grid(True) | ||
|
||
# Adjust layout so the plots don't overlap | ||
plt.tight_layout() | ||
|
||
# Display the plot | ||
plt.show() | ||
|
||
# Load arrays from the JSON files | ||
arrays1 = load_json_data('/tmp/windmill/profiling_main.json') | ||
arrays2 = load_json_data('/tmp/windmill/profiling_result_processor.json') | ||
|
||
# Plot the data | ||
plot_two_arrays_of_subarrays(arrays1, arrays2) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -37,8 +37,7 @@ use ulid::Ulid; | |
use uuid::Uuid; | ||
use windmill_audit::audit_ee::{audit_log, AuditAuthor}; | ||
use windmill_audit::ActionKind; | ||
#[cfg(not(feature = "enterprise"))] | ||
use windmill_common::worker::PriorityTags; | ||
|
||
use windmill_common::{ | ||
auth::{fetch_authed_from_permissioned_as, permissioned_as_to_username}, | ||
db::{Authed, UserDB}, | ||
|
@@ -62,9 +61,12 @@ use windmill_common::{ | |
to_raw_value, DEFAULT_TAGS_PER_WORKSPACE, DEFAULT_TAGS_WORKSPACES, NO_LOGS, WORKER_CONFIG, | ||
WORKER_PULL_QUERIES, WORKER_SUSPENDED_PULL_QUERY, | ||
}, | ||
BASE_URL, DB, METRICS_ENABLED, | ||
DB, METRICS_ENABLED, | ||
}; | ||
|
||
#[cfg(feature = "enterprise")] | ||
use windmill_common::BASE_URL; | ||
|
||
#[cfg(feature = "cloud")] | ||
use windmill_common::users::SUPERADMIN_SYNC_EMAIL; | ||
|
||
|
@@ -125,10 +127,12 @@ const MAX_FREE_CONCURRENT_RUNS: i32 = 30; | |
|
||
const ERROR_HANDLER_USERNAME: &str = "error_handler"; | ||
const SCHEDULE_ERROR_HANDLER_USERNAME: &str = "schedule_error_handler"; | ||
#[cfg(feature = "enterprise")] | ||
const SCHEDULE_RECOVERY_HANDLER_USERNAME: &str = "schedule_recovery_handler"; | ||
const ERROR_HANDLER_USER_GROUP: &str = "g/error_handler"; | ||
const ERROR_HANDLER_USER_EMAIL: &str = "[email protected]"; | ||
const SCHEDULE_ERROR_HANDLER_USER_EMAIL: &str = "[email protected]"; | ||
#[cfg(feature = "enterprise")] | ||
const SCHEDULE_RECOVERY_HANDLER_USER_EMAIL: &str = "[email protected]"; | ||
|
||
#[derive(Clone, Debug)] | ||
|
@@ -477,6 +481,7 @@ where | |
|
||
#[derive(Deserialize)] | ||
struct RawFlowFailureModule { | ||
#[cfg(feature = "enterprise")] | ||
failure_module: Option<Box<RawValue>>, | ||
} | ||
|
||
|
@@ -671,6 +676,7 @@ pub async fn add_completed_job< | |
} | ||
} | ||
// tracing::error!("Added completed job {:#?}", queued_job); | ||
#[cfg(feature = "enterprise")] | ||
let mut skip_downstream_error_handlers = false; | ||
tx = delete_job(tx, &queued_job.workspace_id, job_id).await?; | ||
// tracing::error!("3 {:?}", start.elapsed()); | ||
|
@@ -716,7 +722,10 @@ pub async fn add_completed_job< | |
.await?; | ||
|
||
if let Some(schedule) = schedule { | ||
skip_downstream_error_handlers = schedule.ws_error_handler_muted; | ||
#[cfg(feature = "enterprise")] | ||
{ | ||
skip_downstream_error_handlers = schedule.ws_error_handler_muted; | ||
} | ||
|
||
// script or flow that failed on start and might not have been rescheduled | ||
let schedule_next_tick = !queued_job.is_flow() | ||
|
@@ -749,6 +758,7 @@ pub async fn add_completed_job< | |
}; | ||
} | ||
|
||
#[cfg(feature = "enterprise")] | ||
if let Err(err) = apply_schedule_handlers( | ||
rsmq.clone(), | ||
db, | ||
|
@@ -1324,6 +1334,8 @@ struct CompletedJobSubset { | |
result: Option<sqlx::types::Json<Box<RawValue>>>, | ||
started_at: chrono::DateTime<chrono::Utc>, | ||
} | ||
|
||
#[cfg(feature = "enterprise")] | ||
async fn apply_schedule_handlers< | ||
'a, | ||
'c, | ||
|
@@ -1342,7 +1354,6 @@ async fn apply_schedule_handlers< | |
job_priority: Option<i16>, | ||
) -> windmill_common::error::Result<()> { | ||
if !success { | ||
#[cfg(feature = "enterprise")] | ||
if let Some(on_failure_path) = schedule.on_failure.clone() { | ||
let times = schedule.on_failure_times.unwrap_or(1).max(1); | ||
let exact = schedule.on_failure_exact.unwrap_or(false); | ||
|
@@ -1392,7 +1403,6 @@ async fn apply_schedule_handlers< | |
.await?; | ||
} | ||
} else { | ||
#[cfg(feature = "enterprise")] | ||
if let Some(ref on_success_path) = schedule.on_success { | ||
handle_successful_schedule( | ||
db, | ||
|
@@ -1410,7 +1420,6 @@ async fn apply_schedule_handlers< | |
.await?; | ||
} | ||
|
||
#[cfg(feature = "enterprise")] | ||
if let Some(ref on_recovery_path) = schedule.on_recovery.clone() { | ||
let tx: QueueTransaction<'_, R> = (rsmq.clone(), db.begin().await?).into(); | ||
let times = schedule.on_recovery_times.unwrap_or(1).max(1); | ||
|
@@ -1579,6 +1588,7 @@ fn sanitize_result<T: Serialize + Send + Sync>(result: Json<&T>) -> HashMap<Stri | |
// is_flow: boolean, | ||
// extra_args: serde_json::Value | ||
// } | ||
#[cfg(feature = "enterprise")] | ||
async fn handle_recovered_schedule< | ||
'a, | ||
'c, | ||
|
@@ -1671,6 +1681,7 @@ async fn handle_recovered_schedule< | |
Ok(()) | ||
} | ||
|
||
#[cfg(feature = "enterprise")] | ||
async fn handle_successful_schedule< | ||
'a, | ||
'c, | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,111 @@ | ||
use serde::Serialize; | ||
use tokio::time::Instant; | ||
use windmill_common::{ | ||
worker::{write_file, TMP_DIR}, | ||
DB, | ||
}; | ||
|
||
pub struct BenchmarkInfo { | ||
iters: u64, | ||
timings: Vec<BenchmarkIter>, | ||
} | ||
|
||
impl Serialize for BenchmarkInfo { | ||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> | ||
where | ||
S: serde::Serializer, | ||
{ | ||
let timings: Vec<Vec<(String, u32)>> = self | ||
.timings | ||
.iter() | ||
.map(|x| x.timings.clone()) | ||
.collect::<Vec<Vec<(String, u32)>>>(); | ||
//serialize timings as vec of vec of tuples | ||
timings.serialize(serializer) | ||
} | ||
} | ||
|
||
impl BenchmarkInfo { | ||
pub fn new() -> Self { | ||
BenchmarkInfo { iters: 0, timings: vec![] } | ||
} | ||
|
||
pub fn add_iter(&mut self, bench: BenchmarkIter) { | ||
self.iters += 1; | ||
self.timings.push(bench); | ||
} | ||
|
||
pub fn write_to_file(&self, path: &str) -> anyhow::Result<()> { | ||
println!("Writing benchmark {path}"); | ||
write_file(TMP_DIR, path, &serde_json::to_string(&self).unwrap()).expect("write profiling"); | ||
Ok(()) | ||
} | ||
} | ||
|
||
pub struct BenchmarkIter { | ||
last_instant: Instant, | ||
timings: Vec<(String, u32)>, | ||
} | ||
|
||
impl BenchmarkIter { | ||
pub fn new() -> Self { | ||
BenchmarkIter { last_instant: Instant::now(), timings: vec![] } | ||
} | ||
|
||
pub fn add_timing(&mut self, name: &str) { | ||
let elapsed = self.last_instant.elapsed().as_nanos() as u32; | ||
self.timings.push((name.to_string(), elapsed)); | ||
self.last_instant = Instant::now(); | ||
} | ||
} | ||
|
||
pub async fn benchmark_init(is_dedicated_worker: bool, db: &DB) { | ||
use windmill_common::{jobs::JobKind, scripts::ScriptLang}; | ||
|
||
let benchmark_jobs: i32 = std::env::var("BENCHMARK_JOBS_AT_INIT") | ||
.unwrap_or("5000".to_string()) | ||
.parse::<i32>() | ||
.unwrap(); | ||
if is_dedicated_worker { | ||
// you need to create the script first, check https://github.com/windmill-labs/windmill/blob/b76a92cfe454c686f005c65f534e29e039f3c706/benchmarks/lib.ts#L47 | ||
let hash = sqlx::query_scalar!( | ||
"SELECT hash FROM script WHERE path = $1 AND workspace_id = $2", | ||
"f/benchmarks/dedicated", | ||
"admins" | ||
) | ||
.fetch_one(db) | ||
.await | ||
.unwrap_or_else(|_e| panic!("failed to insert dedicated jobs")); | ||
sqlx::query!("INSERT INTO queue (id, script_hash, script_path, job_kind, language, tag, created_by, permissioned_as, email, scheduled_for, workspace_id) (SELECT gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10 FROM generate_series(1, $11))", | ||
hash, | ||
"f/benchmarks/dedicated", | ||
JobKind::Script as JobKind, | ||
ScriptLang::Bun as ScriptLang, | ||
"admins:f/benchmarks/dedicated", | ||
"admin", | ||
"u/admin", | ||
"[email protected]", | ||
chrono::Utc::now(), | ||
"admins", | ||
benchmark_jobs | ||
) | ||
.execute(db) | ||
.await.unwrap_or_else(|_e| panic!("failed to insert dedicated jobs")); | ||
} else { | ||
sqlx::query!("INSERT INTO queue (id, script_hash, script_path, job_kind, language, tag, created_by, permissioned_as, email, scheduled_for, workspace_id) (SELECT gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10 FROM generate_series(1, $11))", | ||
None::<i64>, | ||
None::<String>, | ||
JobKind::Noop as JobKind, | ||
ScriptLang::Deno as ScriptLang, | ||
"deno", | ||
"admin", | ||
"u/admin", | ||
"[email protected]", | ||
chrono::Utc::now(), | ||
"admins", | ||
benchmark_jobs | ||
) | ||
.execute(db) | ||
.await.unwrap_or_else(|_e| panic!("failed to insert noop jobs")); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.