-
-
Notifications
You must be signed in to change notification settings - Fork 36
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Started work on generating report data
- Loading branch information
1 parent
bc22aaa
commit 9f60da8
Showing
11 changed files
with
201 additions
and
16 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,13 +1,19 @@ | ||
{ | ||
"name": "rubedo", | ||
"build": { | ||
"dockerfile":"Dockerfile", | ||
"dockerfile": "Dockerfile", | ||
"args": { | ||
"USERNAME": "vscode", | ||
"BUILDKIT_INLINE_CACHE": "0" | ||
} | ||
} | ||
}, | ||
"runArgs": ["--device=/dev/video2"], | ||
"extensions": ["ms-python.python", "076923.python-image-preview"] | ||
|
||
"runArgs": [ | ||
"--device=/dev/video2" | ||
], | ||
"customizations": { | ||
"extensions": [ | ||
"ms-python.python", | ||
"076923.python-image-preview" | ||
] | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
from pprint import pprint | ||
import klipper.gcode as g | ||
from main import generate_pa_results_for_pattern, PRINT_START | ||
from pa import * | ||
from pa_result import PaResult | ||
import pickle | ||
|
||
def main(): | ||
patterns: list[PatternInfo] = [] | ||
for x in range(20, 286, 31): | ||
for y in range(20, 130, 45): | ||
patterns.append( | ||
PatternInfo( | ||
0, 0.06, | ||
x, y, | ||
10, | ||
30, 4 | ||
)) | ||
|
||
# g.send_gcode(PRINT_START) | ||
# g.send_gcode("M109 S255") | ||
# g.send_gcode("CLEAN_NOZZLE") | ||
# for pattern in patterns: | ||
# g.send_gcode(generate_pa_tune_gcode(pattern, False)) | ||
# g.send_gcode("G90;") | ||
# g.send_gcode(f"G1 X{FINISHED_X} Y{FINISHED_Y} F30000") | ||
# g.wait_until_printer_at_location(FINISHED_X, FINISHED_Y) | ||
# g.send_gcode("M104 S0; let the hotend cool") | ||
|
||
pa_scans: list[PaResult] = [] | ||
|
||
for pattern in patterns: | ||
pa_scans.extend( | ||
zip(pattern.pa_values, | ||
generate_pa_results_for_pattern( | ||
pattern | ||
)) | ||
) | ||
break | ||
|
||
with open("testing_adjustments.pkl", "wb") as f: | ||
pickle.dump(pa_scans, f) | ||
|
||
# results = generate_pa_results_for_pattern(calibration_pattern) | ||
|
||
# sorted_results = list(sorted(zip(results, calibration_pattern.pa_values), key=lambda x: x[0].score)) | ||
# sorted_results = list([(x.score, y) for x, y in sorted_results]) | ||
|
||
# best_pa_value = sorted_results[0][1] | ||
# print() | ||
# pprint(sorted_results) | ||
# print() | ||
# print(f"Recommended PA Value: {best_pa_value}, with a score of {sorted_results[0][0]}") | ||
# print() | ||
# g.send_gcode(f"SET_PRESSURE_ADVANCE ADVANCE={best_pa_value}") | ||
|
||
if __name__=="__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,57 @@ | ||
import pickle | ||
import matplotlib.pyplot as plt | ||
from pprint import pprint | ||
import numpy as np | ||
from pa_result import PaResult | ||
|
||
with open("testing_adjustments.pkl", "rb") as f: | ||
data: list[PaResult] = pickle.load(f) | ||
|
||
pa_values = list([x[0] for x in data[:10]]) | ||
|
||
data_clean = list([(x, y.score) for x, y in data]) | ||
pprint(list(sorted(data_clean, key=lambda x: x[1]))) | ||
x, y = list(zip(*data_clean)) | ||
p = np.polyfit(x, y, 3) | ||
plt.plot(pa_values, np.poly1d(p)(pa_values)) | ||
plt.scatter(x, y) | ||
plt.plot(pa_values, np.poly1d(p)(pa_values)) | ||
|
||
|
||
from matplotlib.colors import LinearSegmentedColormap | ||
from scipy.stats import gaussian_kde | ||
from collections import Counter | ||
|
||
|
||
# Calculate the point density | ||
# xy = np.vstack([x,y]) | ||
# z = gaussian_kde(xy)(xy) | ||
|
||
# fig, ax = plt.subplots() | ||
# ax.set_xlabel("PA Value") | ||
# ax.set_ylabel("Score") | ||
# ax.scatter(x, y, c=z, s=100) | ||
# ax.plot(pa_values, np.poly1d(p)(pa_values)) | ||
|
||
|
||
winning_results = [] | ||
|
||
for i in range(0, len(data_clean), 10): | ||
x = i | ||
individual_scan = list(sorted(data_clean[x:x+10], key=lambda x: x[1])) | ||
pprint(individual_scan[0]) | ||
winning_results.append(individual_scan[0][0]) | ||
# pprint(data_clean[x:x+10]) | ||
|
||
counter = Counter(winning_results) | ||
print(counter) | ||
fig, ax = plt.subplots() | ||
ax.set_ylabel("Winning Frequency") | ||
ax.set_xlabel("PA Value") | ||
ax.bar(counter.keys(), counter.values(), width=0.06/10) | ||
|
||
from visualization import generate_color_map, generate_3d_height_map | ||
generate_color_map(data[3][1]) | ||
generate_3d_height_map(data[3][1]) | ||
|
||
plt.show() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
import numpy as np | ||
control = \ | ||
[186.56256372513926, | ||
236.4280909963605, | ||
194.8965990884127, | ||
186.02849956667927, | ||
201.05698347607975, | ||
201.70169943918023, | ||
195.83328724309604, | ||
236.14796974386718, | ||
224.61775628475698, | ||
443.5180396174067 | ||
] | ||
print("Average deviation of lines in control pattern") | ||
print(np.average(control)) | ||
calibrated = \ | ||
[ | ||
35.745380947164946, | ||
42.096965823872175, | ||
45.43428879223724, | ||
41.415640249952666, | ||
52.08084270611824, | ||
50.53732451711894, | ||
44.22630732805901, | ||
42.33189729658413, | ||
52.967477038659496 | ||
] | ||
print("Average deviation of lines in calibrated pattern") | ||
print(np.average(calibrated)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters