From b1e924645c62aa3bdc255688708edb86e3d91157 Mon Sep 17 00:00:00 2001 From: eduardocerqueira Date: Thu, 12 Sep 2024 17:12:36 +0000 Subject: [PATCH] 2024-09-12 17:12:36.710489 new snippets --- seeker/report.txt | 23 +++ seeker/snippet/CuteBot.py | 139 ------------------ .../snippet/Generic 4-Quadrant Regression.py | 127 ---------------- seeker/snippet/a.sh | 32 ---- seeker/snippet/circular_heatexchange.py | 92 ------------ .../snippet/dumb-random-number-generator.go | 79 ---------- seeker/snippet/impress.py | 59 ++++++++ seeker/snippet/main.py | 24 ++- seeker/snippet/resize.py | 79 ---------- 9 files changed, 98 insertions(+), 556 deletions(-) delete mode 100644 seeker/snippet/CuteBot.py delete mode 100644 seeker/snippet/Generic 4-Quadrant Regression.py delete mode 100644 seeker/snippet/a.sh delete mode 100644 seeker/snippet/circular_heatexchange.py delete mode 100644 seeker/snippet/dumb-random-number-generator.go create mode 100644 seeker/snippet/impress.py delete mode 100644 seeker/snippet/resize.py diff --git a/seeker/report.txt b/seeker/report.txt index a995b92c..81653155 100644 --- a/seeker/report.txt +++ b/seeker/report.txt @@ -1,3 +1,26 @@ +-------------------------------------------------------------------------------- + 2024-09-12 17:12:36.710489 +-------------------------------------------------------------------------------- + On branch main +Your branch is up to date with 'origin/main'. + +Changes not staged for commit: + (use "git add/rm ..." to update what will be committed) + (use "git restore ..." to discard changes in working directory) + deleted: snippet/CuteBot.py + deleted: snippet/Generic 4-Quadrant Regression.py + deleted: snippet/a.sh + deleted: snippet/circular_heatexchange.py + deleted: snippet/dumb-random-number-generator.go + modified: snippet/main.py + deleted: snippet/resize.py + +Untracked files: + (use "git add ..." to include in what will be committed) + snippet/impress.py + +no changes added to commit (use "git add" and/or "git commit -a") + -------------------------------------------------------------------------------- 2024-09-11 17:11:33.860081 -------------------------------------------------------------------------------- diff --git a/seeker/snippet/CuteBot.py b/seeker/snippet/CuteBot.py deleted file mode 100644 index 3f4b2121..00000000 --- a/seeker/snippet/CuteBot.py +++ /dev/null @@ -1,139 +0,0 @@ -#date: 2024-09-10T16:45:57Z -#url: https://api.github.com/gists/0150766320516a0cfcfda5d94c2d18fe -#owner: https://api.github.com/users/MattieOF - -from telethon import * -import asyncio -import json -import os -import time -import dataclasses -import sys -import datetime -from dataclasses import dataclass - -# Thanks to https://stackoverflow.com/a/54769644 -def dataclass_from_dict(klass, d): - try: - fieldtypes = {f.name:f.type for f in dataclasses.fields(klass)} - return klass(**{f:dataclass_from_dict(fieldtypes[f],d[f]) for f in d}) - except: - return d # Not a dataclass field - -api_id = 69420 # get your own at: https://my.telegram.org/apps -api_hash = "nuh uh, you're not getting this one :3" - -client = TelegramClient('CuteBot', api_id, api_hash) - -@dataclass -class ScheduledMessage: - username: str - message: str - interval: int - last_sent: float - -scheduledMessages = [] -if (os.path.exists("scheduledMessages.json")): - with open("scheduledMessages.json", "r") as file: - loadedMessages = json.load(file) - scheduledMessages = [dataclass_from_dict(ScheduledMessage, msg) for msg in loadedMessages] - -def save_messages(): - with open("scheduledMessages.json", "w") as file: - scheduledMessagesAsDict = list(map(lambda msg: dataclasses.asdict(msg), scheduledMessages)) - json.dump(scheduledMessagesAsDict, file) - -async def loop(): - logOut = False - - async def check_messages(): - didEdit = False - for msg in scheduledMessages: - if time.time() - msg.last_sent >= msg.interval: - try: - await client.send_message(msg.username, msg.message) - msg.last_sent = time.time() - print(f"Sent message to {msg.username}! Next message in {msg.interval} seconds (at {datetime.datetime.fromtimestamp(time.time() + msg.interval).strftime('%Y-%m-%d %H:%M:%S')})") - didEdit = True - except Exception as error: - print(f"Failed to send message to {msg.username}! Due to {error}") - if didEdit: - save_messages() - - if "autorun" in sys.argv: - print("Running!") - while True: - await check_messages() - await asyncio.sleep(1) - - while True: - cmd = input("Enter command: ") - if cmd == "exit": - break - elif cmd == "logout": - logOut = True - break - - cmd = cmd.split(" ") - if cmd[0] == "list": - # List all scheduled messages - # Format: list - if len(scheduledMessages) == 0: - print("No scheduled messages yet :( Use the 'add' command to add one!") - continue - - for i, msg in enumerate(scheduledMessages): - print(f"{i}: To {msg.username}, \"{msg.message}\" (every {msg.interval} seconds)") - elif cmd[0] == "add": - # Add a scheduled message - # Format: add - if len(cmd) < 4: - print("Not enough parameters! Format: add ") - continue - - try: - interval = float(cmd[2]) - if interval < 1: - raise ValueError - except ValueError: - print("Invalid interval! Must be an integer above 1.") - continue - - try: - await client.send_message(cmd[1], "new scheduled message added :3") - except: - print("Invalid username! Make sure you have the correct username and that you've sent a message to the user at least once.") - continue - - scheduledMessages.append(ScheduledMessage(username=cmd[1], interval=interval, message=" ".join(cmd[3:]), last_sent=time.time())) - save_messages() - elif cmd[0] == "remove": - # Remove a scheduled message by index from the list - # Format: remove - if len(cmd) < 2: - print("Not enough parameters! Format: remove ") - continue - - try: - index = int(cmd[1]) - if index < 0 or index >= len(scheduledMessages): - raise ValueError - except ValueError: - print("Invalid index! Must be an integer within the range of the list.") - continue - - scheduledMessages.pop(index) - save_messages() - elif cmd[0] == "run": - while True: - await check_messages() - await asyncio.sleep(1) - - if logOut: - await client.log_out() - print("Logged out!") - -with client: - client.loop.run_until_complete(loop()) - -print("Bye!") diff --git a/seeker/snippet/Generic 4-Quadrant Regression.py b/seeker/snippet/Generic 4-Quadrant Regression.py deleted file mode 100644 index 9d59c0a6..00000000 --- a/seeker/snippet/Generic 4-Quadrant Regression.py +++ /dev/null @@ -1,127 +0,0 @@ -#date: 2024-09-10T16:44:44Z -#url: https://api.github.com/gists/01e1f2f2fd5a413db7dbd68df088489a -#owner: https://api.github.com/users/srkim - -import pandas as pd -import statsmodels.api as sm -import matplotlib.pyplot as plt -import seaborn as sns -from sklearn.metrics import mutual_info_score -import scipy.stats as stats -from scipy.stats import linregress -from sklearn.linear_model import LinearRegression -from sklearn.model_selection import train_test_split -import numpy as np - -# Set the aesthetic style of the plots -sns.set(style="whitegrid") - -# Read data from an Excel file and store it in a Pandas DataFrame -df = pd.read_excel('CorrelationsHedge2.xlsx') - -# Define the response and explanatory variables -y = df['Y'] -x = df['X'] - -# Calculate medians and IQRs for x and y -median_x = np.median(x) -median_y = np.median(y) -q1_x, q3_x = np.percentile(x, [25, 75]) -q1_y, q3_y = np.percentile(y, [25, 75]) -iqr_x = q3_x - q1_x -iqr_y = q3_y - q1_y - -# Identify outliers -outliers = (x < q1_x - 1.5 * iqr_x) | (x > q3_x + 1.5 * iqr_x) | (y < q1_y - 1.5 * iqr_y) | (y > q3_y + 1.5 * iqr_y) - -# Calculate the slope, intercept, and R-squared value of the regression line -slope, intercept, r_value, _, _ = linregress(x, y) -r_squared = r_value**2 - -# Generate the regression line -regression_line = slope * x + intercept - -# Divide data into quadrants counterclockwise from the top right -q1 = (x >= 0) & (y >= 0) # Top right -q2 = (x <= 0) & (y >= 0) # Top left -q3 = (x <= 0) & (y <= 0) # Bottom left -q4 = (x >= 0) & (y <= 0) # Bottom right - -# Calculate correlations for each quadrant -corr_q1 = np.corrcoef(x[q1], y[q1])[0, 1] -corr_q2 = np.corrcoef(x[q2], y[q2])[0, 1] -corr_q3 = np.corrcoef(x[q3], y[q3])[0, 1] -corr_q4 = np.corrcoef(x[q4], y[q4])[0, 1] - -# Calculate overall correlation -overall_corr = np.corrcoef(x, y)[0, 1] - -# Calculate the number of points in each quadrant and the percentage of total points -total_points = len(x) -q1_count = np.sum(q1) -q2_count = np.sum(q2) -q3_count = np.sum(q3) -q4_count = np.sum(q4) - -q1_percentage = (q1_count / total_points) * 100 -q2_percentage = (q2_count / total_points) * 100 -q3_percentage = (q3_count / total_points) * 100 -q4_percentage = (q4_count / total_points) * 100 - -# Calculate the number of points where x is greater than y in each quadrant -q1_x_greater_y = np.sum((q1) & (x > y)) -q2_x_greater_y = np.sum((q2) & (x > y)) -q3_x_greater_y = np.sum((q3) & (x > y)) -q4_x_greater_y = np.sum((q4) & (x > y)) - -# Calculate the percentage of points where x is greater than y in each quadrant -q1_x_greater_y_percentage = (q1_x_greater_y / q1_count) * 100 -q2_x_greater_y_percentage = (q2_x_greater_y / q2_count) * 100 -q3_x_greater_y_percentage = (q3_x_greater_y / q3_count) * 100 -q4_x_greater_y_percentage = (q4_x_greater_y / q4_count) * 100 - -# Print summary statistics -print(f"Sum of correlations in quadrants: {corr_q1 + corr_q2 + corr_q3 + corr_q4:.4f}") -print(f"Overall correlation: {overall_corr:.4f}") -print('________________________________________________') - -# Print the number of points in each quadrant and the percentage of points from the total points -print(f"Number of points in Q1: {q1_count} ({q1_percentage:.2f}%)") -print(f"Number of points in Q2: {q2_count} ({q2_percentage:.2f}%)") -print(f"Number of points in Q3: {q3_count} ({q3_percentage:.2f}%)") -print(f"Number of points in Q4: {q4_count} ({q4_percentage:.2f}%)") -print('________________________________________________') - -# Print the percentage of points where x is greater than y in each quadrant -print(f"Percentage of points where x > y in Q1: {q1_x_greater_y_percentage:.2f}%") -print(f"Percentage of points where x > y in Q2: {q2_x_greater_y_percentage:.2f}%") -print(f"Percentage of points where x > y in Q3: {q3_x_greater_y_percentage:.2f}%") -print(f"Percentage of points where x > y in Q4: {q4_x_greater_y_percentage:.2f}%") - -# Create scatter plot with colored quadrants and 'x' marker for outliers -plt.figure(figsize=(12, 8)) -plt.scatter(x[q1 & ~outliers], y[q1 & ~outliers], alpha=.6, s=50, label=f'Q1: {corr_q1:.4f}') -plt.scatter(x[q2 & ~outliers], y[q2 & ~outliers], alpha=.6, s=50, label=f'Q2: {corr_q2:.4f}') -plt.scatter(x[q3 & ~outliers], y[q3 & ~outliers], alpha=.6, s=50, label=f'Q3: {corr_q3:.4f}') -plt.scatter(x[q4 & ~outliers], y[q4 & ~outliers], alpha=.6, s=50, label=f'Q4: {corr_q4:.4f}') - -# Plot outliers with 'x' marker with the same color as their respective quadrants -plt.scatter(x[q1 & outliers], y[q1 & outliers], s=40, color='C0', marker='x') -plt.scatter(x[q2 & outliers], y[q2 & outliers], s=40, color='C1', marker='x') -plt.scatter(x[q3 & outliers], y[q3 & outliers], s=40, color='C2', marker='x') -plt.scatter(x[q4 & outliers], y[q4 & outliers], s=40, color='C3', marker='x') - -# Plot regression line and axis lines -plt.plot(x, regression_line, color="gray", label="Regression Line") -plt.axvline(x=0, color='gray', linestyle='--') -plt.axhline(y=0, color='gray', linestyle='--') - -plt.title(f'Y vs. X\nOverall Correlation: {overall_corr:.4f}\nR-squared: {r_squared:.4f}\nSlope: {slope:.4f}: x are Outliers', fontsize=14) -plt.xlabel('X') -plt.ylabel('Y') -plt.legend() - -# Save the plot as an SVG file -plt.savefig('Correlation.svg') - -plt.show() \ No newline at end of file diff --git a/seeker/snippet/a.sh b/seeker/snippet/a.sh deleted file mode 100644 index 9f322e57..00000000 --- a/seeker/snippet/a.sh +++ /dev/null @@ -1,32 +0,0 @@ -#date: 2024-09-10T16:56:58Z -#url: https://api.github.com/gists/b5462dcec2d47ef4e4bdcdf05d3e4303 -#owner: https://api.github.com/users/justin2004 - -# first download https://dlcdn.apache.org/jena/binaries/apache-jena-5.1.0.zip -# then unzip it - -% mkdir /tmp/db1 -% ls ~/Downloads/labels.ttl -/Users/justin/Downloads/labels.ttl -% ~/Downloads/apache-jena-5.1.0/bin/tdb2.tdbloader --loader=parallel --loc /tmp/db1 ~/Downloads/labels.ttl -11:52:38 INFO loader :: Loader = LoaderParallel -11:52:38 INFO loader :: Start: /Users/justin/Downloads/labels.ttl -11:52:38 INFO loader :: Finished: /Users/justin/Downloads/labels.ttl: 5 tuples in 0.11s (Avg: 46) -11:52:38 INFO loader :: Finish - index OSP -11:52:38 INFO loader :: Finish - index POS -11:52:38 INFO loader :: Finish - index SPO -% cat /tmp/some.rq -select * where { -?s ?p ?o -} limit 2 -% ~/Downloads/apache-jena-5.1.0/bin/tdb2.tdbquery --loc /tmp/db1 --query /tmp/some.rq ----------------------------------------------------------------------------------------------------------------------- -| s | p | o | -====================================================================================================================== -| | | "line number"@en | -| | | "contiguous lines"@en | ----------------------------------------------------------------------------------------------------------------------- -% ~/Downloads/apache-jena-5.1.0/bin/tdb2.tdbquery --results=csv --loc /tmp/db1 --query /tmp/some.rq -s,p,o -http://www.wikidata.org/entity/Q6553274,http://www.w3.org/2000/01/rdf-schema#label,line number -http://www.wikidata.org/entity/Q113515824,http://www.w3.org/2000/01/rdf-schema#label,contiguous lines diff --git a/seeker/snippet/circular_heatexchange.py b/seeker/snippet/circular_heatexchange.py deleted file mode 100644 index 694b8204..00000000 --- a/seeker/snippet/circular_heatexchange.py +++ /dev/null @@ -1,92 +0,0 @@ -#date: 2024-09-10T16:41:04Z -#url: https://api.github.com/gists/03a3e74d6a0637229ae515d69928eb1d -#owner: https://api.github.com/users/thomasahle - -from manim import * -import numpy as np - -class DualRingHeatTransferAnimation(Scene): - def construct(self): - # Configuration - num_blocks = 20 - num_around = 39 - block_size = 0.4 - outer_radius = 3.3 - inner_radius = 2.9 - step_time = 0.3 - pause_time = 0.1 - color_change_time = 0.4 - num_steps = 2 * num_around - num_steps = 13 - step_angle = TAU / num_around - - # Create outer and inner rings of blocks - outer_blocks = VGroup(*[Square(side_length=block_size, stroke_width=1) for _ in range(num_blocks)]) - inner_blocks = VGroup(*[Square(side_length=block_size, stroke_width=1) for _ in range(num_blocks)]) - - # Position blocks in circles - def position_blocks(blocks, radius, sign=1, start_angle=0): - for i, block in enumerate(blocks): - angle = i * step_angle * sign + start_angle - block.move_to([radius, 0, 0]) - block.rotate(angle, about_point=ORIGIN) - - position_blocks(outer_blocks, outer_radius, -1) - position_blocks(inner_blocks, inner_radius, +1, step_angle) - - # Set initial temperatures and colors - outer_temps = [1.0] * num_blocks - inner_temps = [0.0] * num_blocks - - def get_color(temp): - return color_gradient((BLACK, RED), 101)[int(temp * 100)] - - def update_colors(blocks, temps): - for block, temp in zip(blocks, temps): - block.set_fill(color=get_color(temp), opacity=1) - - update_colors(outer_blocks, outer_temps) - update_colors(inner_blocks, inner_temps) - - # Add blocks to the scene - self.add(outer_blocks, inner_blocks) - - # Animate the movement and heat transfer - for offset in range(num_steps): - # Move rings - move_animations = [ - outer_blocks.animate.rotate(step_angle/2, about_point=ORIGIN), - inner_blocks.animate.rotate(-step_angle/2, about_point=ORIGIN), - ] - self.play(*move_animations, run_time=step_time) - - # Pause movement - self.wait(pause_time) - - # Perform heat transfer and prepare color change animations - color_animations = [] - for i in range(num_blocks): - outer_index = i % num_around - inner_index = (offset - i) % num_around - if inner_index >= num_blocks or outer_index >= num_blocks: - continue - - avg_temp = (outer_temps[outer_index] + inner_temps[inner_index]) / 2 - outer_temps[outer_index] = avg_temp - inner_temps[inner_index] = avg_temp - - color_animations += [ - outer_blocks[outer_index].animate.set_fill(color=get_color(avg_temp)), - inner_blocks[inner_index].animate.set_fill(color=get_color(avg_temp)), - ] - - - # Animate all color changes concurrently - if color_animations: - self.play(*color_animations, run_time=color_change_time) - - # Pause movement - self.wait(pause_time) - - # Final wait - self.wait(1) diff --git a/seeker/snippet/dumb-random-number-generator.go b/seeker/snippet/dumb-random-number-generator.go deleted file mode 100644 index 602fe545..00000000 --- a/seeker/snippet/dumb-random-number-generator.go +++ /dev/null @@ -1,79 +0,0 @@ -//date: 2024-09-10T17:10:57Z -//url: https://api.github.com/gists/1ca01da35b1115bf90f0484c4562d401 -//owner: https://api.github.com/users/timmattison - -func DumbGenerateRandomNumber(length int) string { - output := "" - - var channels []chan struct{} - var results []int - ctx, cancelFunc := context.WithCancelCause(context.Background()) - - for range 10 { - channels = append(channels, make(chan struct{}, 100)) - - for range 100 { - channels[len(channels)-1] <- struct{}{} - } - } - - for range 10 { - results = append(results, 0) - } - - outputDigitsChannel := make(chan rune) - - for i := range 10 { - go func() { - defer close(channels[i]) - select { - case <-ctx.Done(): - return - case channels[i] <- struct{}{}: - } - }() - } - - go func() { - defer close(outputDigitsChannel) - - for { - select { - case <-ctx.Done(): - return - case <-channels[0]: - outputDigitsChannel <- '0' - case <-channels[1]: - outputDigitsChannel <- '1' - case <-channels[2]: - outputDigitsChannel <- '2' - case <-channels[3]: - outputDigitsChannel <- '3' - case <-channels[4]: - outputDigitsChannel <- '4' - case <-channels[5]: - outputDigitsChannel <- '5' - case <-channels[6]: - outputDigitsChannel <- '6' - case <-channels[7]: - outputDigitsChannel <- '7' - case <-channels[8]: - outputDigitsChannel <- '8' - case <-channels[9]: - outputDigitsChannel <- '9' - } - } - }() - - for outputDigit := range outputDigitsChannel { - results[outputDigit-'0']++ - output += string(outputDigit) - - if len(output) == length { - cancelFunc(nil) - break - } - } - - return output -} \ No newline at end of file diff --git a/seeker/snippet/impress.py b/seeker/snippet/impress.py new file mode 100644 index 00000000..b4a036fd --- /dev/null +++ b/seeker/snippet/impress.py @@ -0,0 +1,59 @@ +#date: 2024-09-12T17:09:29Z +#url: https://api.github.com/gists/cea1753cdb2140862f5f4982dfac312a +#owner: https://api.github.com/users/rk9777 + +li = range(100) +# multiply each item in a list by 2 +[x*2 for x in li] + +# sum a list +sum(li) + +# verify if exists in a string. +word_list = ["iterate", "comprehension", "for", "yield", "Guido"] + +tweet_yes = "Guido said to iterate over the comprehension" + +any(w in tweet_yes for w in word_list) + +# read in a file +open(__file__).read() +# lines +open(__file__).readlines() or list(open(__file__)) +# iterator +open(__file__) + +# happy birthday +print "\n".join("happy birthday %s" % ("dear NAME" if i == 3 else "to you") \ + for i in range(1, 4)) + +# filter a list of number +over_60 = [x for x in range(10, 100, 10) if x > 60] + +# find max / min of a list +max(li) +min(li) + +# xml +import urllib +import xml.etree.ElementTree as et +xml = urllib.urlopen('http://search.twitter.com/search.atom?&q=python').read() +print et.parse(xml) + + + +# parallel processing +import multiprocessing as mp +def x2(v): return v * 2 +mp.Pool(3).map(x2, li) + +# prime number generation. (http://wiki.python.org/moin/SimplePrograms) +import itertools +def pgen(): + numbers = itertools.count(2) + while True: + prime = numbers.next() + yield prime + numbers = itertools.ifilter(prime.__rmod__, numbers) + +itertools.islice(pgen(), 40) \ No newline at end of file diff --git a/seeker/snippet/main.py b/seeker/snippet/main.py index d1f9b5ad..507a1af8 100644 --- a/seeker/snippet/main.py +++ b/seeker/snippet/main.py @@ -1,12 +1,20 @@ -#date: 2024-09-11T17:10:14Z -#url: https://api.github.com/gists/dd08b2806979cb4b7228e7565046f675 +#date: 2024-09-12T17:06:45Z +#url: https://api.github.com/gists/90f6017582cecd5f00eb132d45a73172 #owner: https://api.github.com/users/mypy-play -def handle(err: tuple[type[ValueError] | type[TypeError], ...]): - try: - return None - except err: - pass +from dataclasses import asdict +import dataclasses -handle((ValueError, TypeError)) +@dataclasses.dataclass +class Foo: + a_string: int = 1 + a_float: str = "Hey" + invalid_param: None = None + + +def bar(a_string: str, a_float: float) -> None: + ... + +foo = Foo() +bar(**asdict(foo)) \ No newline at end of file diff --git a/seeker/snippet/resize.py b/seeker/snippet/resize.py deleted file mode 100644 index 5415c65f..00000000 --- a/seeker/snippet/resize.py +++ /dev/null @@ -1,79 +0,0 @@ -#date: 2024-09-10T16:42:12Z -#url: https://api.github.com/gists/f8c671c358e45348b2e6fbbd523a78c5 -#owner: https://api.github.com/users/samhaswon - -import argparse -import multiprocessing -import os -import re -import time - -from PIL import Image -from PIL.Image import Image as PILImage - - -def resize_image(img_path: str, max_size: int) -> None: - """ - Resize a given image if it is greater than the specified size. - :param img_path: The path to the image to resize. - :param max_size: The maximum side length of the image. - :return: None - """ - img: PILImage = Image.open(img_path) - if max(img.size) > max_size: - scale_factor = max_size / max(img.size) - img = img.resize((int(img.size[0] * scale_factor), int(img.size[1] * scale_factor))) - img.save(img_path) - img.close() - - -if __name__ == '__main__': - start = time.perf_counter() - parser = argparse.ArgumentParser( - description="A script to convert all of the images in a directory to a given max side length." - ) - - parser.add_argument( - "-s", - "--size", - type=int, - default=1024, - help="The maximum side length of processed images" - ) - parser.add_argument( - "-d", - "--directory", - type=str, - default=".", - help="The directory with images to process" - ) - - args = parser.parse_args() - - assert os.path.isdir(args.directory), f"Input directory `{args.directory}` is not a directory" - - print(f"Using path {args.directory} and size {args.size}") - - # Get the list of image files - file_list = [ - x.path - for x in os.scandir(args.directory) - if os.path.isfile(x.path) and re.search(r"\.(png|jpe?g|bmp|webp|jfif)$", x.path, re.IGNORECASE) - ] - - print(f"Found {len(file_list)} images") - - # Initialization of pool - num_processes = multiprocessing.cpu_count() - 1 - pool = multiprocessing.Pool(processes=num_processes) - - # Add files to the pool - for file in file_list: - pool.apply_async(resize_image, args=(file, args.size,)) - - # Start, do the work, and wait for results - pool.close() - pool.join() - end = time.perf_counter() - print("Done") - print(f"Took {end - start:.3f} seconds")