From 9cd6cc837522ed5409a81c8f544e6fca9812642f Mon Sep 17 00:00:00 2001 From: eduardocerqueira Date: Mon, 15 Jul 2024 17:11:39 +0000 Subject: [PATCH] 2024-07-15 17:11:39.721584 new snippets --- seeker/report.txt | 45 ++ seeker/snippet/A29.java | 72 -- seeker/snippet/CalcSLM.java | 669 ------------------ seeker/snippet/Dockerfile | 24 + seeker/snippet/GPA.py | 14 - seeker/snippet/bukuserver.sh | 64 -- seeker/snippet/buttsnatcher.sh | 25 - ...ot-generate-inventory-data-python-excel.py | 29 + ...lot-python-excel-customer-churn-dataset.py | 34 + seeker/snippet/copilot-python-fake-scores.py | 38 + seeker/snippet/copy_files | 72 -- seeker/snippet/create-socket-server.sh | 88 +++ seeker/snippet/deep_neural_network.py | 496 +++++++++++++ seeker/snippet/devbox | 75 -- seeker/snippet/enc.py | 58 -- seeker/snippet/example_mssql.py | 62 -- seeker/snippet/fetch_chrome_bypass.py | 19 - seeker/snippet/gistfile1.txt | 452 ------------ seeker/snippet/main.py | 30 - seeker/snippet/netip.Prefix_to_net.IPNet.go | 16 - seeker/snippet/pres.py | 62 -- ...on-script-100-fake-customers-excel-SEED.py | 32 + seeker/snippet/record_camera_stream.sh | 28 - seeker/snippet/s3.sh | 24 - seeker/snippet/shell | 574 --------------- seeker/snippet/spark-local.py | 24 - seeker/snippet/test.py | 81 +++ seeker/snippet/timer.py | 12 - seeker/snippet/us_inflation.py | 28 - seeker/snippet/us_unemployment.py | 30 - seeker/snippet/vxlan_hp.py | 24 - 31 files changed, 867 insertions(+), 2434 deletions(-) delete mode 100644 seeker/snippet/A29.java delete mode 100644 seeker/snippet/CalcSLM.java create mode 100644 seeker/snippet/Dockerfile delete mode 100644 seeker/snippet/GPA.py delete mode 100644 seeker/snippet/bukuserver.sh delete mode 100644 seeker/snippet/buttsnatcher.sh create mode 100644 seeker/snippet/copilot-generate-inventory-data-python-excel.py create mode 100644 seeker/snippet/copilot-python-excel-customer-churn-dataset.py create mode 100644 seeker/snippet/copilot-python-fake-scores.py delete mode 100644 seeker/snippet/copy_files create mode 100644 seeker/snippet/create-socket-server.sh create mode 100644 seeker/snippet/deep_neural_network.py delete mode 100644 seeker/snippet/devbox delete mode 100644 seeker/snippet/enc.py delete mode 100644 seeker/snippet/example_mssql.py delete mode 100644 seeker/snippet/fetch_chrome_bypass.py delete mode 100644 seeker/snippet/gistfile1.txt delete mode 100644 seeker/snippet/main.py delete mode 100644 seeker/snippet/netip.Prefix_to_net.IPNet.go delete mode 100644 seeker/snippet/pres.py create mode 100644 seeker/snippet/python-script-100-fake-customers-excel-SEED.py delete mode 100644 seeker/snippet/record_camera_stream.sh delete mode 100644 seeker/snippet/s3.sh delete mode 100644 seeker/snippet/shell delete mode 100644 seeker/snippet/spark-local.py create mode 100644 seeker/snippet/test.py delete mode 100644 seeker/snippet/timer.py delete mode 100644 seeker/snippet/us_inflation.py delete mode 100644 seeker/snippet/us_unemployment.py delete mode 100644 seeker/snippet/vxlan_hp.py diff --git a/seeker/report.txt b/seeker/report.txt index e9d70fd2..e6da4893 100644 --- a/seeker/report.txt +++ b/seeker/report.txt @@ -1,3 +1,48 @@ +-------------------------------------------------------------------------------- + 2024-07-15 17:11:39.721584 +-------------------------------------------------------------------------------- + On branch main +Your branch is up to date with 'origin/main'. + +Changes not staged for commit: + (use "git add/rm ..." to update what will be committed) + (use "git restore ..." to discard changes in working directory) + deleted: snippet/A29.java + deleted: snippet/CalcSLM.java + deleted: snippet/GPA.py + deleted: snippet/bukuserver.sh + deleted: snippet/buttsnatcher.sh + deleted: snippet/copy_files + deleted: snippet/devbox + deleted: snippet/enc.py + deleted: snippet/example_mssql.py + deleted: snippet/fetch_chrome_bypass.py + deleted: snippet/gistfile1.txt + deleted: snippet/main.py + deleted: snippet/netip.Prefix_to_net.IPNet.go + deleted: snippet/pres.py + deleted: snippet/record_camera_stream.sh + deleted: snippet/s3.sh + deleted: snippet/shell + deleted: snippet/spark-local.py + deleted: snippet/timer.py + deleted: snippet/us_inflation.py + deleted: snippet/us_unemployment.py + deleted: snippet/vxlan_hp.py + +Untracked files: + (use "git add ..." to include in what will be committed) + snippet/Dockerfile + snippet/copilot-generate-inventory-data-python-excel.py + snippet/copilot-python-excel-customer-churn-dataset.py + snippet/copilot-python-fake-scores.py + snippet/create-socket-server.sh + snippet/deep_neural_network.py + snippet/python-script-100-fake-customers-excel-SEED.py + snippet/test.py + +no changes added to commit (use "git add" and/or "git commit -a") + -------------------------------------------------------------------------------- 2024-07-12 17:12:24.016161 -------------------------------------------------------------------------------- diff --git a/seeker/snippet/A29.java b/seeker/snippet/A29.java deleted file mode 100644 index 55ecbef9..00000000 --- a/seeker/snippet/A29.java +++ /dev/null @@ -1,72 +0,0 @@ -//date: 2024-07-12T16:48:43Z -//url: https://api.github.com/gists/30efee98745cf1a8dde390307da4d6a6 -//owner: https://api.github.com/users/avii-7 - -// Max sum in sub-arrays ( 2 ) - -// Problem Link: https://www.geeksforgeeks.org/problems/max-sum-in-sub-arrays0824/0 - -// Brute Force Approch - -// TC -> O(n * n) -// SC -> O(1) - -// Thought Process -// 1. I will generate all the sub-arrays. -// 2. Whenever a new element is inserted into the range, check with smallest and second smallest number. -// 3. After adjusting new number, if sum is greater than maxSum then maxSum will be replaced with sum. - -public static long pairWithMaxSum(long arr[], long N) -{ - long maxSum = 0; - - for (int i = 0; i < N; i++) { - - long s = Long.MAX_VALUE, ss = Long.MAX_VALUE; - - for (int j = i; j < N; j++) { - - if(arr[j] < s) { - ss = s; - s = arr[j]; - } - else if (arr[j] < ss) { - ss = arr[j]; - } - - long tSum = s + ss; - - if (tSum > maxSum) { - maxSum = tSum; - } - } - } - - return maxSum; -} - -// Optimal Approch - -// TC-> O(n) -// SC-> O(1) - -// Observation -// 1. I found that smallest and second smallest number with in range i...j (where i < j) -// with maximum sum is always contigous (next to one another). - -// Thought Process -// 1. According to observation, we only need to find contigous elements whose sum is greater than others contigous elements. - -public static long pairWithMaxSum(long arr[], long N) -{ - long maxSum = 0; - - for (int i = 0; i <= N - 2; i++) { - long tSum = arr[i] + arr[i + 1]; - if(tSum > maxSum) { - maxSum = tSum; - } - } - - return maxSum; -} \ No newline at end of file diff --git a/seeker/snippet/CalcSLM.java b/seeker/snippet/CalcSLM.java deleted file mode 100644 index bccc1691..00000000 --- a/seeker/snippet/CalcSLM.java +++ /dev/null @@ -1,669 +0,0 @@ -//date: 2024-07-11T16:52:08Z -//url: https://api.github.com/gists/e16d9842d2ed420d0832a880641a965d -//owner: https://api.github.com/users/voidregreso - -import java.util.ArrayList; -import java.util.Calendar; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; - -public class CalcSLM extends Thread { - private double[] AudioData; - private double[] AudioDataBn; - private double[] AudioDataLowPass; - private double[] AudioDataSampled; - private double[] AudioDataSampledPipe; - private double[] AudioDataW; - private double[][] Bn_Filters; - private int[] BufferBn; - private int BufferSize; - private double[] CoefWeightingSLM; - private double[] LBn_Filter; - private double[] L_Slm; - private double[] L_SlmMax; - private double[] L_SlmMin; - private double[] LevelBF; - private double[] LevelSlm; - private double[] LevelSlmMax; - private double[] LevelSlmMin; - private Thread Phase1_Thread; - private int SAMPLE_RATE; - private ArrayList SaveLBn_Filter; - private ArrayList SaveL_Slm; - private ArrayList Save_overload; - private double Tf; - private int[] indLowPass; - private boolean isCalculating; - private boolean isStart; - private double meanPerf; - private int[] orderBn; - private int orderBnMax; - private double sensitivity; - private boolean statReady; - private Calendar t0_Date; - private static final double[] TWeighting = {1.0d, 0.125d, 1.0d, 0.035d, 1.5d}; - private static final double[] AWeighting = {0.637605921940775d, -1.940217422347462d, 1.996000069128823d, -0.7221615386020247d, 0.02916470248461033d, -3.933847323141513E-4d, 1.652131944801045E-6d, 1.0d, -3.550748857713154d, 4.943978609188165d, -3.53015441273966d, 1.501577219173829d, -0.4338578237572062d, 0.06921004766942955d}; - private static final double[] CWeighting = {0.5422220786334342d, -1.296252058209947d, 0.9935212085773927d, -0.2682289334358095d, 0.03008915353445947d, -0.001368622564076657d, 1.721685882204256E-5d, 1.0d, -3.011599123196967d, 3.54393337432347d, -2.248441217721788d, 0.9681778734815181d, -0.3066016398682175d, 0.05453514142542087d}; - private static final double[][] LowPass_Filter = {new double[]{4.891921476299063E-4d, 0.001558712942757707d, 0.003350238749207854d, 0.005012408463830158d, 0.005731976022467833d, 0.005012408463830157d, 0.003350238749207854d, 0.001558712942757707d, 4.891921476299062E-4d, 1.0d, -4.643635213368573d, 10.61698225763713d, -15.12012474139609d, 14.49844180083628d, -9.516275430943946d, 4.159947555232781d, -1.105576882887453d, 0.1368549466917421d}, new double[]{2.42160288557058E-5d, -1.876847373837585E-4d, 6.420485962173066E-4d, -0.001266484471383131d, 0.001575809184486534d, -0.001266484471383131d, 6.420485962173066E-4d, -1.876847373837585E-4d, 2.42160288557058E-5d, 1.0d, -7.867587558439409d, 27.08841602770628d, -53.3100641358524d, 65.58970769582444d, -51.66094283071723d, 25.43832206749055d, -7.159709002407171d, 0.8818577364120841d}}; - private static final int[] orderB1 = {6, 6, 6, 6, 6, 6, 6, 6, 8}; - private static final double[][] B1_Filters = {new double[]{1.186808907920508E-4d, 0.0d, -3.560426723761523E-4d, 0.0d, 3.560426723761523E-4d, 0.0d, -1.186808907920508E-4d, 0.0d, 0.0d, 1.0d, -5.736720561981201d, 13.77312657265598d, -17.71344093094615d, 12.87054816043125d, -5.009573499062782d, 0.8160683512750361d, 0.0d, 0.0d}, new double[]{8.583990300664719E-4d, 0.0d, -0.002575197090199416d, 0.0d, 0.002575197090199416d, 0.0d, -8.583990300664719E-4d, 0.0d, 0.0d, 1.0d, -5.364585518836975d, 12.20957962613471d, -15.08141755377497d, 10.66187005129953d, -4.091247086987422d, 0.6662582471549228d, 0.0d, 0.0d}, new double[]{0.005733959336021465d, 0.0d, -0.0172018780080644d, 0.0d, 0.0172018780080644d, 0.0d, -0.005733959336021465d, 0.0d, 0.0d, 1.0d, -4.351979374885559d, 8.563496296075726d, -9.628727083802332d, 6.520965611831645d, -2.523748002161262d, 0.4428190358067736d, 0.0d, 0.0d}, new double[]{0.0338794161018783d, 0.0d, -0.1016382483056349d, 0.0d, 0.1016382483056349d, 0.0d, -0.0338794161018783d, 0.0d, 0.0d, 1.0d, -1.67687154293526d, 2.385273648341682d, -1.933224356495606d, 1.416329842942955d, -0.5478129902514082d, 0.1885499246096933d, 0.0d, 0.0d}, new double[]{1.154560921011158E-4d, 0.0d, -3.463682763033474E-4d, 0.0d, 3.463682763033474E-4d, 0.0d, -1.154560921011158E-4d, 0.0d, 0.0d, 1.0d, -5.739747285842896d, 13.78651129794173d, -17.7370659997152d, 12.89131543863709d, -5.018643048196507d, 0.8176372494634873d, 0.0d, 0.0d}, new double[]{8.357577994062452E-4d, 0.0d, -0.002507273398218736d, 0.0d, 0.002507273398218736d, 0.0d, -8.357577994062452E-4d, 0.0d, 0.0d, 1.0d, -5.372561573982239d, 12.24160225265874d, -15.13315663940789d, 10.70360360693644d, -4.107880713713312d, 0.6688264223231669d, 0.0d, 0.0d}, new double[]{0.005590466789926769d, 0.0d, -0.01677140036978031d, 0.0d, 0.01677140036978031d, 0.0d, -0.005590466789926769d, 0.0d, 0.0d, 1.0d, -4.373937487602234d, 8.634425601343864d, -9.72745758338127d, 6.592176697956385d, -2.549830587506132d, 0.4462880346405692d, 0.0d, 0.0d}, new double[]{0.03309486714290591d, 0.0d, -0.09928460142871774d, 0.0d, 0.09928460142871774d, 0.0d, -0.03309486714290591d, 0.0d, 0.0d, 1.0d, -1.731230748817325d, 2.462915791967629d, -2.02291715483654d, 1.466704249750132d, -0.5719930544512775d, 0.1917918830801162d, 0.0d, 0.0d}, new double[]{0.01073553644049072d, 0.0d, -0.0429421457619629d, 0.0d, 0.06441321864294436d, 0.0d, -0.0429421457619629d, 0.0d, 0.01073553644049072d, 1.0d, -2.335352343507111d, 4.027699429508178d, -4.467295165177466d, 4.102496696125923d, -2.645320442985966d, 1.400015277793566d, -0.4595816325049209d, 0.1163159043625799d}}; - private static final int[] orderB3 = {6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 8, 8}; - private static final double[][] B3_Filters = {new double[]{2.247531075955384E-6d, 0.0d, -6.74259322786615E-6d, 0.0d, 6.74259322786615E-6d, 0.0d, -2.247531075955384E-6d, 0.0d, 0.0d, 1.0d, -5.908185958862305d, 14.5831495269493d, -19.2483129456045d, 14.32844929968648d, -5.703612667735593d, 0.9485149359622779d, 0.0d, 0.0d}, new double[]{4.454284185062785E-6d, 0.0d, -1.336285255518836E-5d, 0.0d, 1.336285255518836E-5d, 0.0d, -4.454284185062785E-6d, 0.0d, 0.0d, 1.0d, -5.871894001960754d, 14.42721209706313d, -18.98454862126826d, 14.11070270751768d, -5.617084272777452d, 0.9356207363462664d, 0.0d, 0.0d}, new double[]{8.812644364217943E-6d, 0.0d, -2.643793309265383E-5d, 0.0d, 2.643793309265383E-5d, 0.0d, -8.812644364217943E-6d, 0.0d, 0.0d, 1.0d, -5.819034099578857d, 14.20438317484201d, -18.61519977140609d, 13.81317705686606d, -5.50292818176776d, 0.9196358437719515d, 0.0d, 0.0d}, new double[]{1.73982807553103E-5d, 0.0d, -5.219484226593089E-5d, 0.0d, 5.219484226593089E-5d, 0.0d, -1.73982807553103E-5d, 0.0d, 0.0d, 1.0d, -5.741295695304871d, 13.88373104480404d, -18.09480807401687d, 13.40404079819452d, -5.351432382856329d, 0.8998976841700376d, 0.0d, 0.0d}, new double[]{3.425711262592332E-5d, 0.0d, -1.0277133787777E-4d, 0.0d, 1.0277133787777E-4d, 0.0d, -3.425711262592332E-5d, 0.0d, 0.0d, 1.0d, -5.626070737838745d, 13.42071249583357d, -17.35992591322977d, 12.83952356749229d, -5.149365645023486d, 0.8756462280193738d, 0.0d, 0.0d}, new double[]{6.722960901370481E-5d, 0.0d, -2.016888270411144E-4d, 0.0d, 2.016888270411144E-4d, 0.0d, -6.722960901370481E-5d, 0.0d, 0.0d, 1.0d, -5.454339742660523d, 12.75332483134355d, -16.32590943314617d, 12.06185322797567d, -4.878952103408239d, 0.8460348265420987d, 0.0d, 0.0d}, new double[]{1.314002783813929E-4d, 0.0d, -3.942008351441787E-4d, 0.0d, 3.942008351441787E-4d, 0.0d, -1.314002783813929E-4d, 0.0d, 0.0d, 1.0d, -5.197742342948914d, 11.80065680747373d, -14.88910948266056d, 11.0006963140012d, -4.516967514955222d, 0.8101614870724939d, 0.0d, 0.0d}, new double[]{2.555337288123694E-4d, 0.0d, -7.666011864371083E-4d, 0.0d, 7.666011864371083E-4d, 0.0d, -2.555337288123694E-4d, 0.0d, 0.0d, 1.0d, -4.814867854118347d, 10.4703548538738d, -12.94353414845231d, 9.584461238380177d, -4.034580271982686d, 0.7671269172053471d, 0.0d, 0.0d}, new double[]{4.938881894279072E-4d, 0.0d, -0.001481664568283722d, 0.0d, 0.001481664568283722d, 0.0d, -4.938881894279072E-4d, 0.0d, 0.0d, 1.0d, -4.24744188785553d, 8.689575411579455d, -10.42652421992687d, 7.773995140410766d, -3.399347105028015d, 0.7161331571043735d, 0.0d, 0.0d}, new double[]{9.474716690347286E-4d, 0.0d, -0.002842415007104186d, 0.0d, 0.002842415007104186d, 0.0d, -9.474716690347286E-4d, 0.0d, 0.0d, 1.0d, -3.41880214214325d, 6.486257841196675d, -7.402982023928352d, 5.637895618659915d, -2.582069960277061d, 0.6566321328498096d, 0.0d, 0.0d}, new double[]{0.001801388710961576d, 0.0d, -0.005404166132884728d, 0.0d, 0.005404166132884728d, 0.0d, -0.001801388710961576d, 0.0d, 0.0d, 1.0d, -2.241030097007752d, 4.153248198352895d, -4.146587508476247d, 3.482636992008473d, -1.572979333731574d, 0.5885295013225172d, 0.0d, 0.0d}, new double[]{0.003388669643023482d, 0.0d, -0.01016600892907045d, 0.0d, 0.01016600892907045d, 0.0d, -0.003388669643023482d, 0.0d, 0.0d, 1.0d, -0.6450859904289246d, 2.476953038568834d, -1.028821979559386d, 1.988405959631875d, -0.4126526440600777d, 0.512435665485136d, 0.0d, 0.0d}, new double[]{2.185006467979256E-6d, 0.0d, -6.555019403937767E-6d, 0.0d, 6.555019403937767E-6d, 0.0d, -2.185006467979256E-6d, 0.0d, 0.0d, 1.0d, -5.90941321849823d, 14.58847471587961d, -19.2574184270465d, 14.33606663366243d, -5.706696117514818d, 0.9489884833621957d, 0.0d, 0.0d}, new double[]{4.330640939262228E-6d, 0.0d, -1.299192281778668E-5d, 0.0d, 1.299192281778668E-5d, 0.0d, -4.330640939262228E-6d, 0.0d, 0.0d, 1.0d, -5.873671054840088d, 14.43478497719779d, -18.99724314517697d, 14.12106719333461d, -5.621138628261475d, 0.9362088276512939d, 0.0d, 0.0d}, new double[]{8.56869148186766E-6d, 0.0d, -2.570607444560298E-5d, 0.0d, 2.570607444560298E-5d, 0.0d, -8.56869148186766E-6d, 0.0d, 0.0d, 1.0d, -5.821634292602539d, 14.21524359946336d, -18.63303373007432d, 13.82738450903581d, -5.508291517242435d, 0.9203635889828207d, 0.0d, 0.0d}, new double[]{1.691831217931189E-5d, 0.0d, -5.075493653793567E-5d, 0.0d, 5.075493653793567E-5d, 0.0d, -1.691831217931189E-5d, 0.0d, 0.0d, 1.0d, -5.745134353637695d, 13.89939452902026d, -18.11998100732892d, 13.42362074783273d, -5.358568356560257d, 0.900794528211528d, 0.0d, 0.0d}, new double[]{3.331609987025772E-5d, 0.0d, -9.994829961077316E-5d, 0.0d, 9.994829961077316E-5d, 0.0d, -3.331609987025772E-5d, 0.0d, 0.0d, 1.0d, -5.631776928901672d, 13.44333549744678d, -17.39545765995817d, 12.8665469242259d, -5.158901239475906d, 0.8767451193332473d, 0.0d, 0.0d}, new double[]{6.539271571118053E-5d, 0.0d, -1.961781471335416E-4d, 0.0d, 1.961781471335416E-4d, 0.0d, -6.539271571118053E-5d, 0.0d, 0.0d, 1.0d, -5.462858557701111d, 12.78584249658385d, -16.37571033639785d, 12.0989801152722d, -4.891723000549375d, 0.8473722003635806d, 0.0d, 0.0d}, new double[]{1.278338211668953E-4d, 0.0d, -3.835014635006857E-4d, 0.0d, 3.835014635006857E-4d, 0.0d, -1.278338211668953E-4d, 0.0d, 0.0d, 1.0d, -5.210472583770752d, 11.84673145256947d, -14.95768533730357d, 11.05097986535167d, -4.534044903986594d, 0.8117749908448659d, 0.0d, 0.0d}, new double[]{2.486547323161881E-4d, 0.0d, -7.459641969485643E-4d, 0.0d, 7.459641969485643E-4d, 0.0d, -2.486547323161881E-4d, 0.0d, 0.0d, 1.0d, -4.833824992179871d, 10.53374049040153d, -13.03483687577073d, 9.65054994003544d, -4.057247749772237d, 0.7690522689636592d, 0.0d, 0.0d}, new double[]{4.807263118290098E-4d, 0.0d, -0.00144217893548703d, 0.0d, 0.00144217893548703d, 0.0d, -4.807263118290098E-4d, 0.0d, 0.0d, 1.0d, -4.275396227836609d, 8.77207944698992d, -10.54130781077834d, 7.856072987563412d, -3.42894873851875d, 0.718399585039945d, 0.0d, 0.0d}, new double[]{9.225320882914392E-4d, 0.0d, -0.002767596264874317d, 0.0d, 0.002767596264874317d, 0.0d, -9.225320882914392E-4d, 0.0d, 0.0d, 1.0d, -3.459233403205872d, 6.582942436731614d, -7.534759836936225d, 5.729497179961133d, -2.619578017564804d, 0.6592547878122365d, 0.0d, 0.0d}, new double[]{0.001754676550589477d, 0.0d, -0.005264029651768433d, 0.0d, 0.005264029651768433d, 0.0d, -0.001754676550589477d, 0.0d, 0.0d, 1.0d, -2.297515153884888d, 4.243851158357998d, -4.280097096758611d, 3.564417506291639d, -1.61807231896086d, 0.5915002072154247d, 0.0d, 0.0d}, new double[]{0.003302357833713145d, 0.0d, -0.009907073501139434d, 0.0d, 0.009907073501139434d, 0.0d, -0.003302357833713145d, 0.0d, 0.0d, 1.0d, -0.7193038631230593d, 2.517093053687256d, -1.152646750327178d, 2.024601855606834d, -0.4621009175241297d, 0.515711909010813d, 0.0d, 0.0d}, new double[]{9.163981698758372E-4d, 0.0d, -0.002749194509627512d, 0.0d, 0.002749194509627512d, 0.0d, -9.163981698758372E-4d, 0.0d, 0.0d, 1.0d, -3.469249248504639d, 6.607064461673431d, -7.567635882985316d, 5.752380082997649d, -2.628903058590546d, 0.659908107063422d, 0.0d, 0.0d}, new double[]{0.001743184111712447d, 0.0d, -0.005229552335137342d, 0.0d, 0.005229552335137342d, 0.0d, -0.001743184111712447d, 0.0d, 0.0d, 1.0d, -2.311524033546448d, 4.266652970796883d, -4.313518143349423d, 3.585022561850007d, -1.629302779958307d, 0.5922407511197738d, 0.0d, 0.0d}, new double[]{0.003281114335270523d, 0.0d, -0.009843343005811567d, 0.0d, 0.009843343005811567d, 0.0d, -0.003281114335270523d, 0.0d, 0.0d, 1.0d, -0.7377489320933819d, 2.52764094448969d, -1.18364273332719d, 2.034078779999315d, -0.4744544473687313d, 0.5165292102769342d, 0.0d, 0.0d}, new double[]{0.001126491637756248d, 0.0d, -0.004505966551024993d, 0.0d, 0.00675894982653749d, 0.0d, -0.004505966551024993d, 0.0d, 0.001126491637756248d, 1.0d, 1.645266555249691d, 3.94309722642277d, 3.940206302206695d, 4.871859807643263d, 3.004812381014974d, 2.295940652475144d, 0.7248825420574899d, 0.3359411160777301d}, new double[]{0.00253383237425594d, 0.0d, -0.01013532949702376d, 0.0d, 0.01520299424553564d, 0.0d, -0.01013532949702376d, 0.0d, 0.00253383237425594d, 1.0d, 4.442366659641266d, 10.11584021855619d, 14.65311957326574d, 14.70720468941874d, 10.37912621362843d, 5.069909436670455d, 1.572990921433747d, 0.2515307456097444d}}; - private boolean isSLM = true; - private boolean isOverload = false; - private boolean pauseCalc = true; - private boolean isRealTime = true; - private int cpt = 0; - private boolean isBnFilters = false; - private boolean isExpW = true; - private int FreqWselect = 1; - private int BnFilter = 0; - private double tW = 0.025d; - private final Object mPauseLock = new Object(); - private boolean isPause = true; - - public CalcSLM(boolean z, int i, int i2, double d) { - this.isStart = z; - this.SAMPLE_RATE = i; - this.BufferSize = i2; - this.sensitivity = d; - this.isCalculating = false; - initBuffers(); - } - - /** OK! **/ - public void initBuffers() { - pauseCalc = true; - while (isCalculating) { - try { - Thread.sleep(10L); - } catch (Exception e) { - e.printStackTrace(); - } - } - LevelSlm = new double[12]; - LevelSlmMax = new double[9]; - LevelSlmMin = new double[]{1.0E20d, 1.0E20d, 1.0E20d, 1.0E20d, 1.0E20d, 1.0E20d, 1.0E20d, 1.0E20d, 1.0E20d}; - L_Slm = new double[16]; - L_SlmMax = new double[9]; - L_SlmMin = new double[9]; - CoefWeightingSLM = new double[5]; - CoefWeightingSLM[1] = 1.0d; - for (int i = 1; i < 5; i++) { - CoefWeightingSLM[i] = Math.exp(((-1.0d) / SAMPLE_RATE) / TWeighting[i]); - } - isOverload = false; - AudioData = new double[BufferSize]; - AudioDataW = new double[(BufferSize + 6) * 3]; - cpt = 0; - statReady = false; - orderBnMax = 0; - orderBn = BnFilter == 1 ? orderB1.clone() : orderB3.clone(); - for (int ord : orderBn) { - if (orderBnMax < ord) orderBnMax = ord; - } - indLowPass = BnFilter == 1 ? new int[]{4, 8} : new int[]{12, 24}; - BufferBn = BnFilter == 1 ? new int[]{BufferSize / 32, BufferSize / 32, BufferSize / 32, - BufferSize / 32, BufferSize / 2, BufferSize / 2, BufferSize / 2, BufferSize / 2, BufferSize} : - new int[]{BufferSize / 32, BufferSize / 32, BufferSize / 32, BufferSize / 32, BufferSize / 32, BufferSize / 32, - BufferSize / 32, BufferSize / 32, BufferSize / 32, BufferSize / 32, BufferSize / 32, BufferSize / 32, - BufferSize / 2, BufferSize / 2, BufferSize / 2, BufferSize / 2, BufferSize / 2, BufferSize / 2, - BufferSize / 2, BufferSize / 2, BufferSize / 2, BufferSize / 2, BufferSize / 2, BufferSize / 2, - BufferSize, BufferSize, BufferSize, BufferSize, BufferSize}; - Bn_Filters = BnFilter == 1 ? B1_Filters.clone() : B3_Filters.clone(); - AudioDataBn = BnFilter == 1 ? new double[((BufferSize / 32) * 4) + ((BufferSize / 2) * 4) + BufferSize + 24 + 24 + 8] : - new double[((BufferSize / 32) * 12) + ((BufferSize / 2) * 12) + (BufferSize * 5) + 72 + 72 + 18 + 16]; - AudioDataLowPass = new double[(BufferSize + 8) * 3]; - AudioDataSampled = new double[(BufferSize / 2) + BufferSize + (BufferSize / 32) + (orderBnMax * 3)]; - AudioDataSampledPipe = new double[(BufferSize / 2) + BufferSize + (BufferSize / 32) + (orderBnMax * 3)]; - LevelBF = new double[orderBn.length + 1]; - LBn_Filter = new double[orderBn.length + 1]; - Tf = (1.0d / SAMPLE_RATE) * BufferSize; - pauseCalc = false; - SaveLBn_Filter = new ArrayList<>(); - SaveL_Slm = new ArrayList<>(); - t0_Date = Calendar.getInstance(); - } - - public void onPause() { - synchronized (this.mPauseLock) { - this.isPause = true; - } - } - - public void onResume() { - synchronized (this.mPauseLock) { - this.isPause = false; - this.mPauseLock.notify(); - } - } - - public void onStop() { - this.isStart = false; - } - - /** OK! **/ - public void global_filters() { - double d = 0.0d; - for (int i : this.orderBn) { - d += this.LevelBF[i]; - } - this.LBn_Filter[this.orderBn.length] = Math.log10(d) * 10.0d; - if (this.isExpW) { - return; - } - this.LBn_Filter[this.orderBn.length] -= Math.log10(this.Tf * this.cpt) * 10.0d; - } - - /** OK! **/ - @Override - public void run() { - double[] dArr = new double[25]; - while (this.isStart) { - synchronized (this.mPauseLock) { - while (this.isPause) { - try { - this.mPauseLock.wait(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - } - this.isPause = true; - long nanoTime = System.nanoTime(); - if (!this.pauseCalc) { - this.isCalculating = true; - if (this.isBnFilters) { - BandFilter(); - } else { - FreqWeightings(); - IntegrateSLM(); - } - if (this.cpt > 0) { - this.statReady = true; - saveData(); - } - this.cpt++; - } else { - this.isCalculating = false; - } - long nanoTime2 = System.nanoTime(); - double d = 0.0d; - for (int i = 0; i < 24; i++) { - dArr[i] = dArr[i + 1]; - d += dArr[i]; - } - dArr[24] = (nanoTime2 - nanoTime) * 1.0E-6d; - this.meanPerf = (((d + dArr[24]) / 25.0d) / (((1.0d / this.SAMPLE_RATE) * this.BufferSize) * 1000.0d)) * 100.0d; - this.isRealTime = this.meanPerf <= 100.0d; - } - } - - public void saveData() { - if (this.isBnFilters) { - this.SaveLBn_Filter.add(this.LBn_Filter.clone()); - } else { - this.SaveL_Slm.add(this.L_Slm.clone()); - } - } - - public void setAudioData(double[] dArr) { - this.AudioData = dArr; - } - - /** OK! **/ - public void FreqWeightings() { - for (int i = 0; i < 6; i++) { - AudioDataW[i] = AudioDataW[i + BufferSize]; - AudioDataW[BufferSize + 6 + i] = AudioDataW[BufferSize + 6 + i + BufferSize]; - AudioDataW[(BufferSize + 6) * 2 + i] = AudioDataW[(BufferSize + 6) * 2 + i + BufferSize]; - } - int overloadCount = 0; - for (int i = 6; i < BufferSize + 6; i++) { - AudioDataW[i] = AudioData[i - 6] / sensitivity; - if (AudioData[i - 6] > 32767.0 || AudioData[i - 6] < -32767.0) { - overloadCount++; - if (overloadCount > 3) { - isOverload = true; - } - } else { - overloadCount = 0; - } - AudioDataW[i + BufferSize + 6] = AWeighting[0] * AudioDataW[i]; - AudioDataW[(BufferSize + 6) * 2 + i] = CWeighting[0] * AudioDataW[i]; - for (int j = 1; j <= 6; j++) { - int index = i + BufferSize + 6; - AudioDataW[index] += (AWeighting[j] * AudioDataW[i - j]) - (AWeighting[j + 6 + 1] * AudioDataW[index - j]); - int cIndex = (BufferSize + 6) * 2 + i; - AudioDataW[cIndex] += (CWeighting[j] * AudioDataW[i - j]) - (CWeighting[j + 6 + 1] * AudioDataW[cIndex - j]); - } - } - } - - /** OK! **/ - public void IntegrateSLM() { - double[] tempSum = new double[3]; - - // First pass: calculate temporary sums - for (int i = 0; i < BufferSize; i++) { - for (int j = 0; j < 3; j++) { - int baseIndex = ((j + 1) * 6 + BufferSize * j) + i; - double energySum = calculateEnergy(AudioDataW[baseIndex - 1], AudioDataW[baseIndex]); - - if (cpt > 0) { - LevelSlm[j * 4] += energySum; - } - tempSum[j] += energySum; - } - } - - // Second pass: update levels - for (int i = 0; i < BufferSize; i++) { - for (int j = 0; j < 3; j++) { - int baseIndex = ((j + 1) * 6 + BufferSize * j) + i; - double energy = calculateEnergy(AudioDataW[baseIndex - 1], AudioDataW[baseIndex]); - - updateLevels(j, energy, tempSum[j]); - } - } - - // Final calculations - calculateFinalLevels(); - } - - /** OK! **/ - private double calculateEnergy(double sample1, double sample2) { - return ((1.0 / SAMPLE_RATE) * 2.5E9 * (sample1 * sample1 + sample2 * sample2)) / 2.0; - } - - /** OK! **/ - private void updateLevels(int index, double energy, double tempSum) { - for (int k = 1; k < TWeighting.length - 1; k++) { - int levelIndex = index * 4 + k; - double weightedEnergy = energy / TWeighting[k]; - - if (k == 3) { - double threshold = tempSum / Tf; - if (threshold > LevelSlm[levelIndex]) { - LevelSlm[levelIndex] = LevelSlm[levelIndex] * CoefWeightingSLM[k] + weightedEnergy; - } else { - LevelSlm[levelIndex] = LevelSlm[levelIndex] * CoefWeightingSLM[4] + weightedEnergy / TWeighting[4]; - } - } else if (cpt > 0) { - LevelSlm[levelIndex] = LevelSlm[levelIndex] * CoefWeightingSLM[k] + weightedEnergy; - } else { - LevelSlm[levelIndex] = LevelSlm[levelIndex] * CoefWeightingSLM[3] + energy / TWeighting[3]; - } - - updateMaxMinLevels(index, k, levelIndex); - } - } - - /** OK! **/ - private void updateMaxMinLevels(int index, int k, int levelIndex) { - if (statReady) { - int statIndex = (k - 1) + (index * 3); - if (LevelSlmMax[statIndex] < LevelSlm[levelIndex]) { - LevelSlmMax[statIndex] = LevelSlm[levelIndex]; - } - if (LevelSlmMin[statIndex] > LevelSlm[levelIndex]) { - LevelSlmMin[statIndex] = LevelSlm[levelIndex]; - } - } - } - - /** OK! **/ - private void calculateFinalLevels() { - for (int i = 0; i < 3; i++) { - int baseIndex = i * 5; - if (cpt > 0) { - L_Slm[baseIndex] = 10.0 * Math.log10(LevelSlm[i * 4]); - L_Slm[baseIndex + 1] = L_Slm[baseIndex] - 10.0 * Math.log10(Tf * cpt); - } - for (int j = 2; j < 5; j++) { - L_Slm[baseIndex + j] = 10.0 * Math.log10(LevelSlm[(j - 1) + (i * 4)]); - int statIndex = (j - 2) + (i * 3); - L_SlmMax[statIndex] = 10.0 * Math.log10(LevelSlmMax[statIndex]); - L_SlmMin[statIndex] = 10.0 * Math.log10(LevelSlmMin[statIndex]); - } - } - if (cpt > 0) { - L_Slm[15] = L_Slm[6] + 10.0 * Math.log10((Tf * cpt) / 28800.0); - } - } - - public boolean getIsCalculating() { - return this.isCalculating; - } - - public void setIsSLM(boolean z) { - this.isSLM = z; - } - - public void setpauseCalc(boolean z) { - this.pauseCalc = z; - } - - public double getMeanPerf() { - return this.meanPerf; - } - - public double[] getL_Slm() { - return this.L_Slm; - } - - public double[] getL_SlmMax() { - return this.L_SlmMax; - } - - public double[] getL_SlmMin() { - return this.L_SlmMin; - } - - public void resetSLM() { - initBuffers(); - } - - public boolean getisOverload() { - return this.isOverload; - } - - public double getSensitivity() { - return this.sensitivity; - } - - public void setSensitivity(double d) { - this.sensitivity = d; - } - - public void setisOverload(boolean z) { - this.isOverload = z; - } - - public boolean getiSRealTime() { - return this.isRealTime; - } - - public ArrayList getSaveL_Slm() { - return this.SaveL_Slm; - } - - public ArrayList getSaveLBn_Filter() { - return this.SaveLBn_Filter; - } - - public ArrayList getSave_overload() { - return this.Save_overload; - } - - public Calendar getT0_Date() { - return this.t0_Date; - } - - public int getCpt() { - return this.cpt - 1; - } - - public double getTf() { - return this.Tf; - } - - /** OK! **/ - public void BandFilter() { - // Phase1 Thread - this.Phase1_Thread = new Thread(new Runnable() { - @Override - public void run() { - CalcSLM.this.FreqWeightings(); - CalcSLM.this.LowPassFilter(CalcSLM.this.FreqWselect); - CalcSLM.this.SampledBuffer(); - } - }, "Phase1 Thread"); - this.Phase1_Thread.start(); - - // ExecutorService for BnFilters - ExecutorService executorService1 = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); - for (int i = 0; i < this.orderBn.length; i++) { - executorService1.execute(new BnFilters(i)); - } - executorService1.shutdown(); - - // Wait for phase1Thread and executorService1 to complete - try { - this.Phase1_Thread.join(); - executorService1.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } - - // Copy AudioDataSampled to AudioDataSampledPipe - System.arraycopy(this.AudioDataSampled, 0, this.AudioDataSampledPipe, 0, this.AudioDataSampled.length); - - // ExecutorService for IntegrateFilters - ExecutorService executorService2 = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); - for (int i = 0; i < this.orderBn.length; i++) { - executorService2.execute(new IntegrateFilters(i)); - } - executorService2.shutdown(); - - // Wait for executorService2 to complete - try { - executorService2.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } - // Final global filters - global_filters(); - } - - /** OK! **/ - public void LowPassFilter(int n) { - int bufferSize = this.BufferSize; - for (int i = 0; i < 8; ++i) { - AudioDataLowPass[i] = AudioDataLowPass[i + bufferSize]; - AudioDataLowPass[bufferSize + 8 + i] = AudioDataLowPass[bufferSize + 8 + i + bufferSize]; - AudioDataLowPass[(bufferSize + 8) * 2 + i] = AudioDataLowPass[(bufferSize + 8) * 2 + i + bufferSize]; - } - for (int n2 = 8; n2 < bufferSize + 8; n2++) { - AudioDataLowPass[n2] = AudioDataW[(bufferSize + 6) * n + n2 - 2]; - AudioDataLowPass[n2 + bufferSize + 8] = LowPass_Filter[0][0] * AudioDataLowPass[n2]; - AudioDataLowPass[(bufferSize + 8) * 2 + n2] = LowPass_Filter[1][0] * AudioDataLowPass[n2]; - for (int j = 1; j <= 8; j++) { - int n3 = n2 + bufferSize + 8; - int n10 = (bufferSize + 8) * 2 + n2; - int n6 = n2 - j; - int n9 = j + 1 + 8; - AudioDataLowPass[n3] += LowPass_Filter[0][j] * AudioDataLowPass[n6] - LowPass_Filter[0][n9] * AudioDataLowPass[n3 - j]; - AudioDataLowPass[n10] += LowPass_Filter[1][j] * AudioDataLowPass[n6] - LowPass_Filter[1][n9] * AudioDataLowPass[n10 - j]; - } - } - } - - /** OK! **/ - public void SampledBuffer() { - int halfBufferSize = this.BufferSize / 2; - int oneThirtySecondBufferSize = this.BufferSize / 32; - - for (int i = 0; i < this.orderBnMax; i++) { - double[] dArr = this.AudioDataSampled; - int i4 = this.BufferSize; - dArr[i] = dArr[i + i4]; - dArr[i4 + this.orderBnMax + i] = dArr[i4 + this.orderBnMax + i + halfBufferSize]; - dArr[i4 + this.orderBnMax + halfBufferSize + this.orderBnMax + i] = dArr[i4 + this.orderBnMax + halfBufferSize + this.orderBnMax + i + oneThirtySecondBufferSize]; - } - - System.arraycopy(this.AudioDataLowPass, 8, this.AudioDataSampled, this.orderBnMax, this.BufferSize); - - for (int i = 0; i < halfBufferSize; i++) { - this.AudioDataSampled[i + this.BufferSize + (this.orderBnMax * 2)] = this.AudioDataLowPass[this.BufferSize + 16 + (i * 2)]; - } - - for (int i = 0; i < oneThirtySecondBufferSize; i++) { - this.AudioDataSampled[i + this.BufferSize + (this.orderBnMax * 3) + halfBufferSize] = this.AudioDataLowPass[this.BufferSize + 24 + this.BufferSize + (i * 32)]; - } - } - - /** OK! **/ - public class BnFilters implements Runnable { - private int m; - - public BnFilters(int i) { - this.m = i; - } - - @Override - public void run() { - int totalOrderBn = 0; - for (int i = 0; i < m; i++) { - totalOrderBn += CalcSLM.this.BufferBn[i] + CalcSLM.this.orderBn[i]; - } - - for (int i = 0; i < CalcSLM.this.orderBn[m]; i++) { - CalcSLM.this.AudioDataBn[i + totalOrderBn] = CalcSLM.this.AudioDataBn[CalcSLM.this.BufferBn[m] + i + totalOrderBn]; - } - - int bufferOffset = CalcSLM.this.BufferSize + CalcSLM.this.orderBnMax * 3 + CalcSLM.this.BufferSize / 2; - int orderOffset = CalcSLM.this.orderBn[m] + totalOrderBn; - - if (m < CalcSLM.this.indLowPass[0]) { - processAudioData(bufferOffset, orderOffset, 0); - } else if (m < CalcSLM.this.indLowPass[1]) { - processAudioData(CalcSLM.this.BufferSize + CalcSLM.this.orderBnMax * 2, orderOffset, 0); - } else { - processAudioData(CalcSLM.this.orderBnMax, orderOffset, 6); - } - } - - private void processAudioData(int bufferOffset, int orderOffset, int extraFilterCount) { - double[] filters = CalcSLM.this.Bn_Filters[m]; - for (int i = 0; i < CalcSLM.this.BufferBn[m]; i++) { - double result = calculateFilteredValue(filters, bufferOffset, orderOffset, i); - - if (extraFilterCount > 0) { - result += calculateExtraFilteredValue(filters, bufferOffset, orderOffset, i); - } - - CalcSLM.this.AudioDataBn[orderOffset + i] = result; - } - } - - private double calculateFilteredValue(double[] filters, int bufferOffset, int orderOffset, int index) { - return filters[0] * CalcSLM.this.AudioDataSampledPipe[bufferOffset + index] - - filters[10] * CalcSLM.this.AudioDataBn[orderOffset + index - 1] - + filters[2] * CalcSLM.this.AudioDataSampledPipe[bufferOffset + index - 2] - - filters[11] * CalcSLM.this.AudioDataBn[orderOffset + index - 2] - - filters[12] * CalcSLM.this.AudioDataBn[orderOffset + index - 3] - + filters[4] * CalcSLM.this.AudioDataSampledPipe[bufferOffset + index - 4] - - filters[13] * CalcSLM.this.AudioDataBn[orderOffset + index - 4] - - filters[14] * CalcSLM.this.AudioDataBn[orderOffset + index - 5] - + filters[6] * CalcSLM.this.AudioDataSampledPipe[bufferOffset + index - 6] - - filters[15] * CalcSLM.this.AudioDataBn[orderOffset + index - 6]; - } - - private double calculateExtraFilteredValue(double[] filters, int bufferOffset, int orderOffset, int index) { - return -filters[16] * CalcSLM.this.AudioDataBn[orderOffset + index - 7] - + filters[8] * CalcSLM.this.AudioDataSampledPipe[bufferOffset + index - 8] - - filters[17] * CalcSLM.this.AudioDataBn[orderOffset + index - 8]; - } - } - - /** OK! **/ - public class IntegrateFilters implements Runnable { - private int m; - - public IntegrateFilters(int m) { - this.m = m; - } - - @Override - public void run() { - int n; - double timeStep; - if (m < indLowPass[0]) { - n = 32; - timeStep = 32.0 / SAMPLE_RATE; - } else if (m < indLowPass[1]) { - n = 2; - timeStep = 2.0 / SAMPLE_RATE; - } else { - n = 1; - timeStep = 1.0 / SAMPLE_RATE; - } - - double exp, exp2; - if (isExpW) { - double negTimeStep = -timeStep; - exp = Math.exp(negTimeStep / tW); - exp2 = Math.exp(negTimeStep / 1.5); - } else { - tW = 1.0; - exp = exp2 = 1.0; - } - - int offset = 0; - for (int i = 0; i < m; i++) { - offset += BufferBn[i] + orderBn[i]; - } - - double integral = 0.0; - if (tW == 0.035) { - for (int j = 0; j < BufferSize / n; j++) { - int index = orderBn[m] + offset + j; - integral += 2.5E9 * timeStep * (Math.pow(AudioDataBn[index - 1], 2) + Math.pow(AudioDataBn[index], 2)) / 2.0; - } - } - - for (int k = 0; k < BufferSize / n; k++) { - int index = orderBn[m] + offset + k; - double squaredSum = Math.pow(AudioDataBn[index - 1], 2) + Math.pow(AudioDataBn[index], 2); - if (tW == 0.035) { - if (integral / Tf > LevelBF[m]) { - LevelBF[m] = LevelBF[m] * exp + 1.0 / (tW * 4.0E-10) * timeStep * squaredSum / 2.0; - } else { - LevelBF[m] = LevelBF[m] * exp2 + 1.6666666666666665E9 * timeStep * squaredSum / 2.0; - } - } else { - LevelBF[m] = LevelBF[m] * exp + 1.0 / (tW * 4.0E-10) * timeStep * squaredSum / 2.0; - } - } - - LBn_Filter[m] = 10.0 * Math.log10(LevelBF[m]); - if (!isExpW) { - LBn_Filter[m] -= 10.0 * Math.log10(Tf * cpt); - } - } - } - - public void setIsBnFilters(boolean z) { - this.isBnFilters = z; - } - - public void setFreqWselect(int i) { - this.FreqWselect = i; - } - - public void settW(double d) { - this.tW = d; - } - - public void setisExpW(boolean z) { - this.isExpW = z; - } - - public void setBnFilter(int i) { - this.BnFilter = i; - } - - public int getBnFilter() { - return this.BnFilter; - } - - public double[] getLBn_Filters() { - return this.LBn_Filter; - } - - public int getNbrFilters() { - return this.orderBn.length; - } -} \ No newline at end of file diff --git a/seeker/snippet/Dockerfile b/seeker/snippet/Dockerfile new file mode 100644 index 00000000..2fca7e4a --- /dev/null +++ b/seeker/snippet/Dockerfile @@ -0,0 +1,24 @@ +#date: 2024-07-15T16:41:00Z +#url: https://api.github.com/gists/a45166a36c2eb652bd7c02e2e5bd4a7b +#owner: https://api.github.com/users/itzzjb + +# We will start from a apline version of node.js base image +FROM node:lts-alpine + +# Create a directory to hold the application code inside the image +WORKDIR /usr/src/app + +# Copy package.json and package-lock.json to the image +COPY ./package*.json /usr/src/app + +# Installing all the node_modules according to the package.json file +RUN npm install + +# Copy the rest of the application code to the image +COPY ./ /usr/src/app + +# Expose the port that the app runs on +EXPOSE 3000 + +# Start the application +CMD ["npm","start"] \ No newline at end of file diff --git a/seeker/snippet/GPA.py b/seeker/snippet/GPA.py deleted file mode 100644 index 16516ce6..00000000 --- a/seeker/snippet/GPA.py +++ /dev/null @@ -1,14 +0,0 @@ -#date: 2024-07-12T16:43:12Z -#url: https://api.github.com/gists/7de081cf9ae2073b9603e77eadb242c3 -#owner: https://api.github.com/users/MaXVoLD - -students = {'Johnny', 'Bilbo', 'Steve', 'Khendrik', 'Aaron'} -grades = [[5, 3, 3, 5, 4], [2, 2, 2, 3], [4, 5, 5, 2], [4, 4, 3], [5, 5, 5, 4, 5]] -gpa = [sum(grades[0]) / len(grades[0]) , - sum(grades[1]) / len(grades[1]) , - sum(grades[2]) / len(grades[2]) , - sum(grades[3]) / len(grades[3]) , - sum(grades[4]) / len(grades[4]) ,] #Для каждого значения из списка нашел среднее значение. Составил новый список. -sorted_student = sorted(students) #Отсортировал имена по порядку A-Z. Вернул значения множества списком. -dict_gpa = dict(zip(sorted_student , gpa)) #Склеил оба списка между собой и объединил их в словарь. -print(dict_gpa) \ No newline at end of file diff --git a/seeker/snippet/bukuserver.sh b/seeker/snippet/bukuserver.sh deleted file mode 100644 index e0593960..00000000 --- a/seeker/snippet/bukuserver.sh +++ /dev/null @@ -1,64 +0,0 @@ -#date: 2024-07-12T16:49:09Z -#url: https://api.github.com/gists/ef44453f2da72d2f55492b7d4c955d8d -#owner: https://api.github.com/users/LeXofLeviafan - -#!/bin/bash -# Usage: `bukuserver` starts up the server, `bukuserver --stop` sends TERM to the already running server -# On startup, the user is offered to choose or create a new DB (cancel both to exit) -# After the server stops (via CTRL+C or `bukuserver --stop`), the choice is given again -# NOTE: requires Zenity to work - -: "${BUKUSERVER=$HOME/Work/buku/}" # path to executable, or directory to run from source -: "${VENV:=$HOME/.local/share/bukuserver/venv}" # alternatively, can be set to $BUKUSERVER/venv -BUKU_CONFIG_DIR=~/.local/share/buku - -# specify Web-UI settings here -export BUKUSERVER_THEME=slate -export BUKUSERVER_DISABLE_FAVICON=false -export BUKUSERVER_OPEN_IN_NEW_TAB=true - - -function _settermtitle { echo -en "\033]2;$1\007"; } # changes terminal title - -function _select-db { - FILES=( ) - while read FILE; do - [ -e "$FILE" ] && FILES+=( "$(basename "${FILE%.db}")" ) - done < <(ls -1 "$BUKU_CONFIG_DIR"/{,.}*.db 2>/dev/null | sort) - FILE= - if [ ${#FILES[@]} != 0 ]; then - FILE=`zenity --list --title="Choose DB" --text="(or click Cancel to create new DB)" --column="Name" -- "${FILES[@]}"` - [ "$FILE" ] && echo "$BUKU_CONFIG_DIR/$FILE.db" && return - fi - while true; do - FILE=`zenity --entry --title="Create new DB?" --text="DB name (cannot contain '/'):" --entry-text="bookmarks"` - ! [ "$FILE" ] && echo "No name given, qutting" >&2 && return - [[ "$FILE" == *'/'* ]] && zenity --error --text="DB name cannot contain '/'!" && continue - [ -e "$BUKU_CONFIG_DIR/$FILE.db" ] && ! zenity --question --text="'$FILE' exists already. Open anyway?" && continue - echo "$BUKU_CONFIG_DIR/$FILE.db" - return - done -} - - -if [ "$1" == '--stop' ]; then - PID=`ps -afu "$USER" | grep '/python[^ ]* .*/bukuserver run$' | awk '{print $2}'` - [ "$PID" ] && kill "$PID" - exit -fi - -_settermtitle 'bukuserver' - -if [ -d "$BUKUSERVER" ]; then - cd "$BUKUSERVER" - python -m venv "$VENV" - . "$VENV/bin/activate" - pip install .[server] - BUKUSERVER='bukuserver' -fi - -export BUKUSERVER_DB_FILE=`_select-db` -while [ "$BUKUSERVER_DB_FILE" ]; do - "$BUKUSERVER" run - export BUKUSERVER_DB_FILE=`_select-db` -done diff --git a/seeker/snippet/buttsnatcher.sh b/seeker/snippet/buttsnatcher.sh deleted file mode 100644 index 34ca509f..00000000 --- a/seeker/snippet/buttsnatcher.sh +++ /dev/null @@ -1,25 +0,0 @@ -#date: 2024-07-11T16:47:55Z -#url: https://api.github.com/gists/6311b18793679e976a44f92ef9fa7c41 -#owner: https://api.github.com/users/themactep - -#!/bin/sh -# IPC button catcher -# Paul Philippov -# 2024-07-01: Initial release - -GPIO_MAX=95 -EXCLUDE="10 16 17 18 49 54 55 56 57 58" - -for i in $(seq 0 $GPIO_MAX); do - echo $EXCLUDE | grep -e "\b$i\b" >/dev/null && continue - echo gpio input $i -done - -gpio list > /tmp/old -while :; do - gpio list > /tmp/new - diff /tmp/old /tmp/new - sleep 1 -done - -exit 0 \ No newline at end of file diff --git a/seeker/snippet/copilot-generate-inventory-data-python-excel.py b/seeker/snippet/copilot-generate-inventory-data-python-excel.py new file mode 100644 index 00000000..24d79cf7 --- /dev/null +++ b/seeker/snippet/copilot-generate-inventory-data-python-excel.py @@ -0,0 +1,29 @@ +#date: 2024-07-15T16:58:40Z +#url: https://api.github.com/gists/56abe14f64be817384c90b579f147268 +#owner: https://api.github.com/users/summerofgeorge + +import pandas as pd +import numpy as np +from faker import Faker + +# Set the random seed +np.random.seed(1234) + +# Initialize the faker generator +fake = Faker() + +# Generate inventory data +inventory_data = { + 'Item ID': [fake.unique.random_number(digits=6) for _ in range(2000)], + 'Category': [fake.random_element(['Electronics', 'Clothing', 'Home Goods', 'Sports Equipment', 'Toys']) for _ in range(2000)], + 'Stock Level': np.random.normal(loc=100, scale=30, size=2000), + 'Reorder Level': np.random.uniform(low=20, high=50, size=2000), + 'Lead Time': np.random.exponential(scale=1/0.05, size=2000) +} + +# Create a DataFrame +df = pd.DataFrame(inventory_data) + +# Save to an Excel file +df.to_excel('warehouse_inventory.xlsx', index=False) +print("Excel workbook 'warehouse_inventory.xlsx' created successfully!") diff --git a/seeker/snippet/copilot-python-excel-customer-churn-dataset.py b/seeker/snippet/copilot-python-excel-customer-churn-dataset.py new file mode 100644 index 00000000..39ad462d --- /dev/null +++ b/seeker/snippet/copilot-python-excel-customer-churn-dataset.py @@ -0,0 +1,34 @@ +#date: 2024-07-15T17:02:17Z +#url: https://api.github.com/gists/6218180cf42c9e2297493947befc7031 +#owner: https://api.github.com/users/summerofgeorge + +import pandas as pd +import numpy as np +from faker import Faker + +# Set the random seed +np.random.seed(1234) + +# Initialize the faker generator +fake = Faker() + +# Generate customer churn data +customer_data = { + 'Customer ID': [fake.unique.random_number(digits=6) for _ in range(5000)], + 'Age': np.random.normal(loc=35, scale=10, size=5000), + 'Tenure': np.random.uniform(low=1, high=72, size=5000), + 'Monthly Charges': np.random.normal(loc=70, scale=20, size=5000), +} + +# Calculate Total Charges +customer_data['Total Charges'] = customer_data['Monthly Charges'] * customer_data['Tenure'] + +# Generate churn (0 or 1) based on probability +customer_data['Churn'] = np.random.choice([0, 1], size=5000, p=[0.8, 0.2]) + +# Create a DataFrame +df = pd.DataFrame(customer_data) + +# Save to an Excel file +df.to_excel('telecom_customer_churn.xlsx', index=False) +print("Excel workbook 'telecom_customer_churn.xlsx' created successfully!") diff --git a/seeker/snippet/copilot-python-fake-scores.py b/seeker/snippet/copilot-python-fake-scores.py new file mode 100644 index 00000000..f8571dcc --- /dev/null +++ b/seeker/snippet/copilot-python-fake-scores.py @@ -0,0 +1,38 @@ +#date: 2024-07-15T16:55:28Z +#url: https://api.github.com/gists/48fff68259a7c7dd0e406cbad5d86e37 +#owner: https://api.github.com/users/summerofgeorge + +# Use Python and the faker package to create a synthetic Excel dataset for an organization's employee performance review for 500 employees with the following details: + +# - Columns: Employee ID, Department, Performance Score, Salary, Years of Experience +# - Performance Score: Normally distributed with a mean of 70 and a standard deviation of 10 +# - Salary: Log-normally distributed with a mean of $50,000 and a standard deviation of $15,000 +# - Years of Experience: Exponentially distributed with a lambda of 0.1 +# - Department: Randomly chosen from 'Sales', 'HR', 'IT', 'Marketing', 'Finance +# - Random seed: Set to 1234. + +import pandas as pd +import numpy as np +from faker import Faker + +# Set the random seed +np.random.seed(1234) + +# Initialize the faker generator +fake = Faker() + +# Generate employee data +employee_data = { + 'Employee ID': [fake.unique.random_number(digits=6) for _ in range(500)], + 'Department': [fake.random_element(['Sales', 'HR', 'IT', 'Marketing', 'Finance']) for _ in range(500)], + 'Performance Score': np.random.normal(loc=70, scale=10, size=500), + 'Salary': np.random.lognormal(mean=np.log(50000), sigma=np.log(15000), size=500), + 'Years of Experience': np.random.exponential(scale=1/0.1, size=500) +} + +# Create a DataFrame +df = pd.DataFrame(employee_data) + +# Save to an Excel file +df.to_excel('employee_performance.xlsx', index=False) +print("Excel workbook 'employee_performance.xlsx' created successfully!") diff --git a/seeker/snippet/copy_files b/seeker/snippet/copy_files deleted file mode 100644 index 51f54149..00000000 --- a/seeker/snippet/copy_files +++ /dev/null @@ -1,72 +0,0 @@ -#date: 2024-07-12T16:53:46Z -#url: https://api.github.com/gists/b1e92b1eb4241818a13d9929fa88021a -#owner: https://api.github.com/users/nyabongo - -#!/bin/bash - -show_help() { - echo "Usage: $(basename "$0") [-h] [-i pattern] [-e pattern] [-v]" - echo - echo "Options:" - echo " -h Show this help message" - echo " -i pattern Ignore files matching the pattern (e.g., '*.g.dart')" - echo " -e pattern Only include files matching the pattern (e.g., 'index.*')" - echo " -v Enable verbose mode to print files not ignored" -} - -ignore_pattern="" -include_pattern="" -verbose=false - -while getopts "hi:e:v" opt; do - case $opt in - h) - show_help - exit 0 - ;; - i) - ignore_pattern="$OPTARG" - ;; - e) - include_pattern="$OPTARG" - ;; - v) - verbose=true - ;; - \?) - show_help - exit 1 - ;; - esac -done - -output="" -while IFS= read -r file; do - # Check if the file matches the ignore pattern - if [[ -n "$ignore_pattern" && "$file" == $ignore_pattern ]]; then - continue - fi - - # Check if the file matches the include pattern - if [[ -n "$include_pattern" && ! "$file" == $include_pattern ]]; then - continue - fi - - if $verbose; then - echo "Processing file: $file" - fi - - file_type=$(file --mime-type -b "$file") - output+="File: $file"$'\n' - output+="Path: $(realpath --relative-to="$(pwd)" "$file")"$'\n' - output+="Contents:"$'\n' - if [[ "$file_type" == text/* ]]; then - output+="\`\`\`"$'\n' - output+="$(cat "$file")"$'\n' - output+="\`\`\`"$'\n' - else - output+=""$'\n' - fi - output+=$'\n'$'\n' -done < <(git ls-files) -echo "$output" | xclip -selection clipboard diff --git a/seeker/snippet/create-socket-server.sh b/seeker/snippet/create-socket-server.sh new file mode 100644 index 00000000..d8240808 --- /dev/null +++ b/seeker/snippet/create-socket-server.sh @@ -0,0 +1,88 @@ +#date: 2024-07-15T17:00:06Z +#url: https://api.github.com/gists/c0553e536ea31f64d3911a5c11dcc572 +#owner: https://api.github.com/users/rohitranjan-2702 + + #!/bin/bash + # Create a new directory for the project + PROJECT_DIR="simple-socket-server" + mkdir -p $PROJECT_DIR + cd $PROJECT_DIR + + # Initialize a new Node.js project + echo "Initializing Node.js project..." + npm init -y + + # Install necessary packages + echo "Installing socket.io..." + npm install socket.io express + npm install -D nodemon + + # Create a .gitignore + cat << 'EOF' > .gitignore + node_modules/ + .env + .git + EOF + + # Create a simple socket server file + cat << 'EOF' > index.js + const express = require("express"); + const { createServer } = require("node:http"); + const { Server } = require("socket.io"); + + const app = express(); + const server = createServer(app); + const io = new Server(server); + + app.get("/", (req, res) => { + res.send("

Socket Server is running 🚀

"); + }); + + io.on("connection", (socket) => { + console.log("User connected", socket.id); + + socket.on("message", (message) => { + const msg = JSON.parse(message); + console.log(msg); + }); + + socket.on("disconnect", () => { + console.log("User Disconnected", socket.id); + }); + }); + + server.listen(3000, () => { + console.log("server running at http://localhost:3000"); + }); + + EOF + + # Add scripts to package.json + echo "Adding dev and start scripts to package.json..." + node -e " + const fs = require('fs'); + const packageJson = JSON.parse(fs.readFileSync('package.json')); + packageJson.scripts = { + ...packageJson.scripts, + dev: 'nodemon index.js', + start: 'node index.js' + }; + fs.writeFileSync('package.json', JSON.stringify(packageJson, null, 2)); + " + + # Create a .gitignore + cat << 'EOF' > READme.md + # Simple Socket Server + + ## Setup + + - `cd simple-socket-server` + - `npm run dev` : for development mode + - `npm start` : for production mode + EOF + + # Instructions to run the server + echo "Setup complete. To run the server, execute the following commands:" + echo "cd $PROJECT_DIR" + echo "npm run dev # For development mode" + echo "npm start # For production mode" diff --git a/seeker/snippet/deep_neural_network.py b/seeker/snippet/deep_neural_network.py new file mode 100644 index 00000000..49159b4d --- /dev/null +++ b/seeker/snippet/deep_neural_network.py @@ -0,0 +1,496 @@ +#date: 2024-07-15T16:52:59Z +#url: https://api.github.com/gists/fa201f1e481c48b28cc879ce362cd2e4 +#owner: https://api.github.com/users/kodejuice + +import numpy as np +import os +import json + + +def sigmoid(x): + x = np.clip(x, -709, 709) # Clip input to avoid overflow + return 1 / (1 + np.exp(-x)) + + +def d_sigmoid(x): + return sigmoid(x) * (1 - sigmoid(x)) + + +def ReLu(v): + return np.maximum(0, v) + + +def d_ReLu(x): + return np.where(x > 0, 1, 0) + + +def leaky_ReLu(x, alpha=0.01): + v = np.maximum(alpha * x, x) + v = np.clip(v, 1e-15, 1 - 1e-15) + return v + + +def d_leaky_ReLu(x, alpha=0.01): + return np.where(x > 0, 1, alpha) + + +def tanh(x): + return np.tanh(x) + + +def d_tanh(x): + return 1 - np.tanh(x) ** 2 + + +def linear(x): + x = np.clip(x, 1e-15, 1 - 1e-15) + return x + + +def d_linear(x): + return 1 + + +def softmax(x, T=1): + clip_value = 10.0 + x = x - x.max(axis=0) + x = np.clip(x, -clip_value, clip_value) + exp_xrel = np.exp(x / T) + return exp_xrel / exp_xrel.sum(axis=0) + + +class BatchNormLayer: + def __init__(self, size, epsilon=1e-5, momentum=0.9): + self.epsilon = epsilon + self.momentum = momentum + self.size = size + self.gamma = np.ones((size, 1)) + self.beta = np.zeros((size, 1)) + self.running_mean = np.zeros((size, 1)) + self.running_var = np.ones((size, 1)) + + def forward(self, Z, training=True): + if training: + self.Z = Z + self.mu = np.mean(Z, axis=1, keepdims=True) + self.var = np.var(Z, axis=1, keepdims=True) + self.Z_norm = (Z - self.mu) / np.sqrt(self.var + self.epsilon) + self.Z_out = self.gamma * self.Z_norm + self.beta + + # Update running mean and variance + self.running_mean = self.momentum * \ + self.running_mean + (1 - self.momentum) * self.mu + self.running_var = self.momentum * \ + self.running_var + (1 - self.momentum) * self.var + else: + Z_norm = (Z - self.running_mean) / \ + np.sqrt(self.running_var + self.epsilon) + self.Z_out = self.gamma * Z_norm + self.beta + + # print(f"BatchNorm: {self.Z_out}") + return self.Z_out + + def backward(self, dZ, learning_rate): + m = dZ.shape[1] + + dgamma = np.sum(dZ * self.Z_norm, axis=1, keepdims=True) + dbeta = np.sum(dZ, axis=1, keepdims=True) + + dZ_norm = dZ * self.gamma + dvar = np.sum(dZ_norm * (self.Z - self.mu) * -0.5 * + (self.var + self.epsilon) ** (-1.5), axis=1, keepdims=True) + dmu = np.sum(dZ_norm * -1 / np.sqrt(self.var + self.epsilon), axis=1, + keepdims=True) + dvar * np.mean(-2 * (self.Z - self.mu), axis=1, keepdims=True) + dZ = dZ_norm / np.sqrt(self.var + self.epsilon) + \ + dvar * 2 * (self.Z - self.mu) / m + dmu / m + + # Update gamma and beta + self.gamma -= learning_rate * dgamma + self.beta -= learning_rate * dbeta + + return dZ + + +class NNLayer: + def __init__(self, input_size, output_size, activation='relu', network_loss=None, keep_prob=1, batch_norm=False): + self.input_size = input_size + self.output_size = output_size + self.activation = activation + self.__network_loss = network_loss + self.keep_prob = keep_prob + self.batch_norm = batch_norm + self.init_weights() + if self.batch_norm: + self.batch_norm_layer = BatchNormLayer(self.output_size) + + def init_weights(self): + k = 1. + if self.activation == 'relu': + k = 2. + + # initialize weights with random values from normal distribution + self.W = np.random.randn( + self.output_size, self.input_size) * np.sqrt(k / self.input_size) + self.b = np.zeros((self.output_size, 1)) + + # initialize weights for momentum + self.vdW = np.zeros((self.output_size, self.input_size)) + self.vdb = np.zeros((self.output_size, 1)) + + # initialize weights for Adam + self.sdW = np.zeros((self.output_size, self.input_size)) + self.sdb = np.zeros((self.output_size, 1)) + + def forward(self, A_prev, training=False): + self.A_prev = A_prev + self.Z = np.dot(self.W, A_prev) + self.b + + if self.batch_norm: + self.Z = self.batch_norm_layer.forward(self.Z, training) + + self.A = self.activation_fn(self.Z) + + if training and self.keep_prob < 1: + # apply dropout to the activations of the previous layer + self.A = self.A * np.random.binomial( + 1, self.keep_prob, size=self.A.shape) + # scale the activations + self.A = self.A / self.keep_prob + + return self.A + + def gradient_descent_update(self, dW, db, learning_rate, L2_reg=0, beta=0.9, beta2=0.999, train_iteration=1, optimization='gd'): + eps = 1e-8 + if optimization == 'gd': + self.W -= learning_rate * (dW + L2_reg * self.W) + self.b -= learning_rate * db + elif optimization == 'adam': + self.vdW = beta * self.vdW + (1 - beta) * dW + self.vdb = beta * self.vdb + (1 - beta) * db + self.sdW = beta2 * self.sdW + (1 - beta2) * dW ** 2 + self.sdb = beta2 * self.sdb + (1 - beta2) * db ** 2 + # bias correction + vdW = self.vdW / (1 - beta ** train_iteration) + vdb = self.vdb / (1 - beta ** train_iteration) + sdW = self.sdW / (1 - beta2 ** train_iteration) + sdb = self.sdb / (1 - beta2 ** train_iteration) + # update weights + self.W = self.W - learning_rate * vdW / np.sqrt(sdW + eps) + self.b = self.b - learning_rate * vdb / np.sqrt(sdb + eps) + elif optimization == 'rmsprop': + self.vdW = beta * self.vdW + (1 - beta) * dW ** 2 + self.vdb = beta * self.vdb + (1 - beta) * db ** 2 + # update weights + self.W = self.W - learning_rate * dW / np.sqrt(self.vdW + eps) + self.b = self.b - learning_rate * db / np.sqrt(self.vdb + eps) + elif optimization == 'momentum': + self.vdW = beta * self.vdW + (1 - beta) * dW + self.vdb = beta * self.vdb + (1 - beta) * db + self.W -= learning_rate * self.vdW + self.b -= learning_rate * self.vdb + else: + raise ValueError(f"Unsupported optimization method: {optimization}") + + def backward(self, dA, learning_rate, L2_reg=0, beta1=0.9, beta2=0.999, train_iteration=0, optimization='gd'): + m = self.A_prev.shape[1] + + if self.activation == 'softmax' or self.__network_loss == 'binary_cross_entropy': + # we already computed the derivative in the nerual network backward pass method + dZ = dA + else: + dZ = dA * self.activation_fn(self.Z, derivative=True) + + if self.batch_norm: + dZ = self.batch_norm_layer.backward(dZ, learning_rate) + + dW = 1 / m * np.dot(dZ, self.A_prev.T) + db = 1 / m * np.sum(dZ, axis=1, keepdims=True) + dA_prev = np.dot(self.W.T, dZ) + + self.gradient_descent_update( + dW, db, learning_rate, + L2_reg=L2_reg, + beta=beta1, + beta2=beta2, + train_iteration=train_iteration + 1, + optimization=optimization + ) + + return dA_prev + + def activation_fn(self, x, derivative=False): + a = { + 'relu': [ReLu, d_ReLu], + 'leaky_relu': [leaky_ReLu, d_leaky_ReLu], + 'tanh': [tanh, d_tanh], + 'sigmoid': [sigmoid, d_sigmoid], + 'linear': [linear, d_linear], + 'softmax': [softmax, None], + } + return a[self.activation][derivative](x) + + +class Layer: + def __init__(self, neurons: int, activation='relu', keep_prob=1.0, batch_norm=False): + self.neurons = neurons + self.activation = activation + self.keep_prob = keep_prob + self.batch_norm = batch_norm + + +class OutputLayer(Layer): + def __init__(self, classes: int, activation='linear', keep_prob=1.0, batch_norm=False): + super().__init__(classes, activation, keep_prob, batch_norm) + + +def connect_layers(layers: list[Layer], loss=None): + assert len(layers) > 1, "At least 2 layers are required" + nn_layers = [ + NNLayer(input_size=layers[0].neurons, output_size=layers[1].neurons, + activation=layers[1].activation, keep_prob=layers[0].keep_prob), + ] + for i in range(1, len(layers) - 1): + nn_layers.append( + NNLayer( + input_size=layers[i].neurons, + output_size=layers[i + 1].neurons, + activation=layers[i + 1].activation, + network_loss=loss, + keep_prob=layers[i].keep_prob, + batch_norm=layers[i].batch_norm + ) + ) + return nn_layers + + +class DeepNeuralNetwork: + def __init__(self, layers: list[Layer], loss='mse', L2_reg=0.0, beta1=0.9, beta2=0.999, optimization='gd', model_file=None): + self.loss_fn = loss + self.layers = connect_layers(layers, loss) + self.assertions() + self.training = False + self.L2_reg = L2_reg + self.beta1 = beta1 + self.beta2 = beta2 + self.optimization = optimization + self.model_file_name = model_file or 'model_weights.json' + self.load_weights_from_file() + + def assertions(self): + if self.loss_fn == 'binary_cross_entropy': + assert self.layers[-1].activation == 'sigmoid', \ + 'Last layer must be sigmoid for binary cross entropy' + assert self.layers[-1].output_size == 1, \ + 'Last layer must have 1 neuron for binary cross entropy' + + for layer in self.layers: + assert layer.activation in [ + 'relu', + 'leaky_relu', + 'tanh', + 'sigmoid', + 'linear', + 'softmax', + ], \ + f"Unsupported activation function: '{layer.activation}'" + + assert self.loss_fn in ['cross_entropy', 'mse', 'binary_cross_entropy'], \ + f"Unsupported loss function: '{self.loss_fn}'" + + def cost(self, X, Y): + """cost over all examples in a batch""" + A_L = self.full_forward_pass(X) + Y = np.array(Y).T + + assert A_L.shape == Y.shape, \ + "Invalid shapes, A_L: %s, Y: %s" % (A_L.shape, Y.shape) + + cost = 0 + if self.loss_fn == 'mse': + cost = np.mean(np.mean(np.square(A_L - Y), axis=0)) + elif self.loss_fn == 'cross_entropy': + A_L = np.clip(A_L, 1e-15, 1 - 1e-15) + cost = np.mean(-np.sum(Y * np.log(A_L), axis=0)) + elif self.loss_fn == 'binary_cross_entropy': + A_L = np.clip(A_L, 1e-15, 1 - 1e-15) + cost = np.mean(-np.sum(Y * np.log(A_L) + (1 - Y) + * np.log(1 - A_L), axis=0)) + + # add L2 regularization + if self.L2_reg > 0 and self.optimization in ['gd']: + l2_reg = 0 + for layer in self.layers: + l2_reg += np.sum(np.square(layer.W)) + cost += (self.L2_reg / 2) * l2_reg + + return cost + + def predict(self, X): + return self.single_forward_pass(X) + + def single_forward_pass(self, X): + """Foward pass for a single input""" + X = np.array(X).reshape((self.layers[0].input_size, 1)) + A = X + for layer in self.layers: + A = layer.forward(A) + return A + + def full_forward_pass(self, X): + """Foward pass for a batch of inputs""" + # X_T = np.array(X).T # make all examples be arranged in a column + # """ + # X_T = [ + # [example1_a, example2_a, ..., exampleN_a], + # [example1_b, example2_b, ..., exampleN_b], + # [example1_c, example2_c, ..., exampleN_c], + # ] + # """ + X = np.array(X).T + A = X + for layer in self.layers: + A = layer.forward(A, self.training) + return A + + def backward_pass(self, Y, learning_rate, iteration=0): + # we must have run a foward pass before calling this method + + Y = np.array(Y) + Y_T = Y.T # reshape training labels to be arranged in a column + A_L = self.layers[-1].A + + if self.layers[-1].activation == 'softmax' and self.loss_fn == 'cross_entropy': + dA = A_L - Y_T + elif self.layers[-1].activation == 'sigmoid' and self.loss_fn == 'binary_cross_entropy': + dA = A_L - Y_T + else: + assert self.loss_fn == 'mse', 'Expected mse loss' + assert self.layers[-1].activation != 'softmax', 'Use a different activation function other than softmax here' + + dA = 2 * (A_L - Y_T) * \ + self.layers[-1].activation_fn(self.layers[-1].Z, derivative=True) + + for layer in reversed(self.layers): + dA = layer.backward( + dA, learning_rate, + L2_reg=self.L2_reg, + beta1=self.beta1, + beta2=self.beta2, + train_iteration=iteration, + optimization=self.optimization + ) + + def train(self, X, Y, epochs=900000, initial_learning_rate=0.01, batch_size=64, decay_rate=0.0001, generate_dataset_fn=None, periodic_callback=None): + print('Initial cost:', self.cost(X, Y)) + if periodic_callback: + periodic_callback() + print('') + + if any(l.keep_prob < 1 for l in self.layers): + print('Applying Dropout to some layers') + if self.L2_reg > 0 and self.optimization in ['gd']: + print('Applying L2 regularization') + + for i in range(1, epochs): + # decay learning rate + learning_rate = initial_learning_rate / (1 + decay_rate * i) + + # Mini-batch gradient descent + for j in range(0, len(X), batch_size): + X_batch = X[j:j + batch_size] + Y_batch = Y[j:j + batch_size] + + self.training = True + self.full_forward_pass(X_batch) + self.backward_pass(Y_batch, learning_rate, iteration=j) + self.training = False + + if i % 10 == 0: + self.output_weights_to_file() + loss = self.cost(X, Y) + print(f'Epoch {i}, Loss: {loss:.6f}, LR: {learning_rate:.6f}') + + if i % 70 == 0: + if periodic_callback: + periodic_callback() + + if i % 100 == 0: + if generate_dataset_fn: + X, Y = generate_dataset_fn() + else: + # shuffle dataset + XY = list(zip(X, Y)) + np.random.shuffle(XY) + X, Y = zip(*XY) + + print('Final cost:', self.cost(X, Y)) + + def nn_layers_params(self): + s = '' + for layer in self.layers: + s += f'({layer.input_size}x{layer.output_size}, {layer.activation}) -> ' + return s + + def output_weights_to_file(self): + layers_params = self.nn_layers_params() + weights = {'network_params_hash': layers_params, 'weights': []} + model_file_name = self.model_file_name.replace('.json', '') + + with open(f'{model_file_name}.json', 'w') as f: + for i, layer in enumerate(self.layers): + params = { + 'W': layer.W.tolist(), + 'b': layer.b.tolist(), + } + if layer.batch_norm: + params['batch_norm_params'] = { + 'gamma': layer.batch_norm_layer.gamma.tolist(), + 'beta': layer.batch_norm_layer.beta.tolist(), + 'running_mean': layer.batch_norm_layer.running_mean.tolist(), + 'running_var': layer.batch_norm_layer.running_var.tolist(), + } + + weights['weights'] += [params] + f.write(json.dumps(weights, indent=1)) + + def load_weights_from_file(self): + model_file_name = self.model_file_name.replace('.json', '') + if os.path.exists(f'{model_file_name}.json'): + print('Loading weights from file...') + else: + return + + with open(f'{model_file_name}.json', 'r+') as f: + try: + model_weights = json.loads(f.read()) + except: + print('Error: weights file is not valid JSON') + f.write('{}') + return + + if 'network_params_hash' not in model_weights: + print('Error: weights file has no network_params_hash') + os.rename(f'{model_file_name}.json', f'{model_file_name}_old.json') + return + + if model_weights['network_params_hash'] != self.nn_layers_params(): + print('Error: weights file and current network layers hash do not match, ignoring') + # rename old weights file + os.rename(f'{model_file_name}.json', f'{model_file_name} (old).json') + return + + weights = model_weights['weights'] + for i, layer in enumerate(self.layers): + layer.W = np.array(weights[i]['W']) + layer.b = np.array(weights[i]['b']) + if layer.batch_norm and 'batch_norm_params' in weights[i]: + Wi = weights[i] + layer.batch_norm_layer.gamma = np.array( + Wi['batch_norm_params']['gamma']) + layer.batch_norm_layer.beta = np.array( + Wi['batch_norm_params']['beta']) + layer.batch_norm_layer.running_mean = np.array( + Wi['batch_norm_params']['running_mean']) + layer.batch_norm_layer.running_var = np.array( + Wi['batch_norm_params']['running_var']) + diff --git a/seeker/snippet/devbox b/seeker/snippet/devbox deleted file mode 100644 index 263eb07f..00000000 --- a/seeker/snippet/devbox +++ /dev/null @@ -1,75 +0,0 @@ -#date: 2024-07-12T17:01:35Z -#url: https://api.github.com/gists/aba9e766f881d47aea64f3f38c7c2118 -#owner: https://api.github.com/users/pythoninthegrass - -#!/usr/bin/env bash - -# shellcheck disable=SC1091,SC2317 - -# shift shim behind real binary -export PATH="/usr/local/bin/:${PATH}" - -# env vars -git_root="$(git rev-parse --show-toplevel 2>/dev/null)" -script_dir=$(dirname "$(readlink -f "$0")") -if [ -n "$git_root" ]; then - tld="$(git rev-parse --show-toplevel)" -else - tld="${script_dir}" -fi -env_file="${tld}/.env" - -# $USER -[[ -n $(logname >/dev/null 2>&1) ]] && logged_in_user=$(logname) || logged_in_user=$(whoami) - -# $HOME -logged_in_home=$(eval echo "~${logged_in_user}") - -install_shim() { - mkdir -p "${logged_in_home}/.local/bin" - ln -s "${script_dir}/devbox" "${logged_in_home}/.local/bin/devbox" -} - -uninstall_shim() { - if [ -f "${logged_in_home}/.local/bin/devbox" ]; then - echo "Removing devbox shim" - rm -f "${logged_in_home}/.local/bin/devbox" - else - echo "devbox shim not found" - fi -} - -main() { - # cap infinite recursion by limiting SHLVL - if [ "${SHLVL}" -gt 10 ]; then - echo "Error: Too many nested shells, stopping to prevent infinite loop." - exit 1 - fi - - if [ $# -eq 0 ]; then - devbox - else - case $1 in - -i|--install) - install_shim - ;; - -u|--uninstall) - uninstall_shim - ;; - shell|run) - devbox "$@" - ;; - services) - # append `--env-file` to `devbox services` command - if [ -f "${env_file}" ] && [ -s "${env_file}" ]; then - set -- "$1" "--env-file" "${env_file}" "${@:2}" - fi - devbox "$@" - ;; - *) - devbox "$@" - ;; - esac - fi -} -main "$@" diff --git a/seeker/snippet/enc.py b/seeker/snippet/enc.py deleted file mode 100644 index 0f91890b..00000000 --- a/seeker/snippet/enc.py +++ /dev/null @@ -1,58 +0,0 @@ -#date: 2024-07-12T16:56:38Z -#url: https://api.github.com/gists/96229aa4f23117c03b4f05e6abfdeb65 -#owner: https://api.github.com/users/KunYi - -#!/usr/bin/python3 - -from Crypto.Cipher import AES -import binascii - -def get_uid(uid_string): - """ - Convert the UID string to lowercase and then to a byte array of specified length. - - Parameters: - uid_string (str): The UID string to be converted. - - Returns: - bytes: The converted byte array. - """ - # Convert the string to lowercase - lowercase_uid = uid_string.lower() - - # Take the required number of characters - shortened_uid = lowercase_uid[:8] - print(shortened_uid) - - # Convert the shortened string to a byte array - byte_array_uid = shortened_uid.encode('ascii') - return byte_array_uid - - -# my device uid '10063F1FB910E0DC' -#uid = b'10063f1f' -#uid = b'10093f30' -uid = get_uid('10063F1FB910E0DC') - -# AES key "59494F4754fff00" store in firmware -key = b'59494F4754fff00\0' - -aes = AES.new(key, AES.MODE_ECB) - -dat = aes.encrypt(uid + uid) - -otp = dat[:8].hex().encode('ascii').hex() -print(otp) - -binotp = binascii.unhexlify(otp) -print(binotp) - -buff = bytearray([0xFF] * 1024) -buff[0:16] = binotp -buff[256:256+15] = binotp[1:] -buff[512:512+14] = binotp[2:] -buff[768:768+13] = binotp[3:] - -with open('otp.bin', 'wb') as f: - f.write(buff) -print("Binary data written to otp.bin") \ No newline at end of file diff --git a/seeker/snippet/example_mssql.py b/seeker/snippet/example_mssql.py deleted file mode 100644 index 1bd63ec4..00000000 --- a/seeker/snippet/example_mssql.py +++ /dev/null @@ -1,62 +0,0 @@ -#date: 2024-07-12T17:00:36Z -#url: https://api.github.com/gists/a444dabf34d6e1da5e05da9cf4a92408 -#owner: https://api.github.com/users/DavidRueter - -from pymssql import _mssql - -class SQLSettings: - def __init__(self, server= "**********"=1433, user='someuser', password='somepassword', - database='somedatabase', appname='someapp', max_conns=10, sql_timeout=120, - ): - self.server = server - self.port = port - self.user = user - self.password = "**********" - self.database = database - self.appname = appname - self.max_conns = max_conns - self.sql_timeout = sql_timeout - - _mssql.set_max_connections(max_conns) - -#set the required SQL Server connection information -sql_settings = "**********"='localhost', database='master', user='sa', password='mypassword') - -sql_conn = _mssql.connect( - server=sql_settings.server, - port=sql_settings.port, - user=sql_settings.user, - password= "**********" - database=sql_settings.database, - appname=sql_settings.appname -) - -sql_str = 'SELECT TOP 10 * FROM sys.objects' -# Note: you can concatenate your own value for sql_str however you want - -sql_conn.execute_query(sql_str) - -resultsets = [] # query may return multiple resultsets -first_resultset = [] - -# get the first resultset and store a list of the rows in this_resultset -this_resultset = [row for row in sql_conn] - -# append this_resultset to our list of resultsets (in case the query returns multiple resultsets) -resultsets.append(this_resultset) - -# repeat for each additional resultset -have_next_resultset = sql_conn.nextresult() - -while have_next_resultset: - this_resultset = [row for row in sql_conn] - resultsets.append(this_resultset) - have_next_resultset = sql_conn.nextresult() - -# the resultset is just a list of rows -for row in resultsets[0]: - # loop through the resultset and do whatever we want - print(row['object_id'], row['name']) - -# note that if there are multiple resultsets you can do -# the same kind of thing with resultsets[1], resultset[2], etc.sultset[2], etc. \ No newline at end of file diff --git a/seeker/snippet/fetch_chrome_bypass.py b/seeker/snippet/fetch_chrome_bypass.py deleted file mode 100644 index dad8b45d..00000000 --- a/seeker/snippet/fetch_chrome_bypass.py +++ /dev/null @@ -1,19 +0,0 @@ -#date: 2024-07-12T17:08:46Z -#url: https://api.github.com/gists/321ac7189d0a8f30057eea5a8241940b -#owner: https://api.github.com/users/tdoylend - -# Kudos to the Chromium devs for needlessly obfuscating how to access a site with an invalid HTTPS cert. /s - -import requests -import re -from base64 import b64decode - -data = requests.get('https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/security_interstitials/core/browser/resources/interstitial_large.js?format=TEXT').content - -data = b64decode(data) -#print(data) -#open('t','wb').write(data) - -r = re.search(r"BYPASS_SEQUENCE.*?'(.*?)'".encode('ascii'), data) - -print('Current Chromium bypass: '+b64decode(r.group(1)).decode('ascii')) diff --git a/seeker/snippet/gistfile1.txt b/seeker/snippet/gistfile1.txt deleted file mode 100644 index 5457d8b5..00000000 --- a/seeker/snippet/gistfile1.txt +++ /dev/null @@ -1,452 +0,0 @@ -#date: 2024-07-11T16:45:12Z -#url: https://api.github.com/gists/0b2db4a2701d7b71fc8ea0c18efa2f28 -#owner: https://api.github.com/users/Nienelames - -#!/bin/bash - - -rootDir=/opt/mopidy - - - -# Install deps - -sudo apt install -y build-essential \ - python3-gi \ - python3-venv \ - python3-dev \ - python3-pip \ - pkg-config \ - gir1.2-gst-plugins-base-1.0 \ - gir1.2-gstreamer-1.0 \ - gstreamer1.0-plugins-good \ - gstreamer1.0-plugins-ugly \ - gstreamer1.0-tools \ - gstreamer1.0-pipewire \ - libxt-dev \ - libcairo2-dev \ - libgirepository1.0-dev \ - python3-gst-1.0 \ - tmux \ - snapserver \ - snapclient - - -# Install mopidy - -sudo mkdir $rootDir -sudo chown -R $USER:$USER $rootDir - -python3 -m venv "${rootDir}/.venv" -. "${rootDir}/.venv/bin/activate" -cd $rootDir - -pip install PyGObject mopidy mopidy-local mopidy-iris mopidy-mobile mopidy-youtube "yt-dlp[default]" - - -# Configure Mopidy - -mkdir -p ~/.config/mopidy -touch ~/.config/mopidy/mopidy.conf -bash -c "cat > ~/.config/mopidy/mopidy.conf" << EOF -[core] -#cache_dir = $XDG_CACHE_DIR/mopidy -#config_dir = $XDG_CONFIG_DIR/mopidy -#data_dir = $XDG_DATA_DIR/mopidy -max_tracklist_length = 10000 -restore_state = true - -[logging] -verbosity = 0 -format = %(levelname)-8s %(asctime)s [%(process)d:%(threadName)s] %(name)s\n %(message)s -color = true -config_file = - -[audio] -#mixer = software -#mixer_volume = -output = audioresample quality=10 ! audioconvert ! audio/x-raw,rate=48000,channels=2,format=S16LE ! tcpclientsink host=127.0.0.1 port=9000 -#buffer_time = - -[proxy] -#scheme = -#hostname = -#port = -#username = - "**********"# "**********"p "**********"a "**********"s "**********"s "**********"w "**********"o "**********"r "**********"d "**********" "**********"= "**********" - -[file] -enabled = true -media_dirs = - $HOME/Music -# ~/|Home -excluded_file_extensions = - .directory - .html - .jpeg - .jpg - .log - .nfo - .pdf - .png - .txt - .zip -show_dotfiles = false -follow_symlinks = false -metadata_timeout = 1000 - -[http] -enabled = true -hostname = :: -port = 6680 -zeroconf = Mopidy HTTP server on $hostname -#allowed_origins = -csrf_protection = true -default_app = mopidy - -[m3u] -enabled = true -base_dir = $XDG_MUSIC_DIR -default_encoding = latin-1 -default_extension = .m3u8 -#playlists_dir = - -[softwaremixer] -enabled = true - -[stream] -enabled = true -protocols = - http - https - mms - rtmp - rtmps - rtsp -metadata_blacklist = -timeout = 5000 - -[iris] -enabled = true -country = LT -locale = en_US -verify_certificates = true -snapcast_enabled = true -snapcast_host = $(ip route get 1 | awk '{print $NF;exit}') -snapcast_port = 1780 -snapcast_ssl = false -snapcast_stream = Default -spotify_authorization_url = https://jamesbarnsley.co.nz/iris/auth_spotify.php -lastfm_authorization_url = https://jamesbarnsley.co.nz/iris/auth_lastfm.php -genius_authorization_url = https://jamesbarnsley.co.nz/iris/auth_genius.php - -[local] -enabled = true -max_search_results = 100 -media_dir = $HOME/Music -scan_timeout = 1000 -scan_flush_threshold = 100 -scan_follow_symlinks = false -#included_file_extensions = -excluded_file_extensions = - .cue - .directory - .html - .jpeg - .jpg - .log - .nfo - .pdf - .png - .txt - .zip -#directories = -# Albums local:directory?type=album -# Artists local:directory?type=artist -# Composers local:directory?type=artist&role=composer -# Genres local:directory?type=genre -# Performers local:directory?type=artist&role=performer -# Release Years local:directory?type=date&format=%25Y -# Tracks local:directory?type=track -# Last Week's Updates local:directory?max-age=604800 -# Last Month's Updates local:directory?max-age=2592000 -timeout = 10 -use_artist_sortname = false -album_art_files = - *.jpg - *.jpeg - *.png - -[youtube] -enabled = true -allow_cache = true -#youtube_api_key = -search_results = 10 -playlist_max_videos = 20 -api_enabled = false -#channel_id = -musicapi_enabled = false -#musicapi_cookie = -autoplay_enabled = false -strict_autoplay = false -max_autoplay_length = 600 -max_degrees_of_separation = 3 -youtube_dl_package = yt_dlp -EOF - - -# Configure Snapcast - -sudo bash -c "cat > /etc/default/snapserver" << EOF -START_SNAPSERVER=true -SNAPSERVER_OPTS="" -EOF - -sudo bash -c "cat > /etc/default/snapclient" << EOF -START_SNAPCLIENT=true -SNAPCLIENT_OPTS="" -EOF - -sudo bash -c "cat > /lib/systemd/system/snapclient.service" << EOF -[Unit] -Description=Snapcast client -Documentation=man:snapclient(1) -Wants=network-online.target avahi-daemon.service -After=network-online.target time-sync.target sound.target avahi-daemon.service - -[Service] -EnvironmentFile=-/etc/default/snapclient -ExecStart="tmux new-session -d -s snapclient 'snapclient'" -User=$USER -Group=$USER -# very noisy on stdout -StandardOutput=null -Restart=on-failure - -[Install] -WantedBy=multi-user.target -EOF - -sudo bash -c "cat > /etc/snapserver.conf" << EOF -############################################################################### -# ______ # -# / _____) # -# ( (____ ____ _____ ____ ___ _____ ____ _ _ _____ ____ # -# \____ \ | _ \ (____ || _ \ /___)| ___ | / ___)| | | || ___ | / ___) # -# _____) )| | | |/ ___ || |_| ||___ || ____|| | \ V / | ____|| | # -# (______/ |_| |_|\_____|| __/ (___/ |_____)|_| \_/ |_____)|_| # -# |_| # -# # -# Snapserver config file # -# # -############################################################################### - -# default values are commented -# uncomment and edit to change them - -# Settings can be overwritten on command line with: -# "--
.=", e.g. --server.threads=4 - - -# General server settings ##################################################### -# -[server] -# Number of additional worker threads to use -# - For values < 0 the number of threads will be 2 (on single and dual cores) -# or 4 (for quad and more cores) -# - 0 will utilize just the processes main thread and might cause audio drops -# in case there are a couple of longer running tasks, such as encoding -# multiple audio streams -threads = -1 - -# the pid file when running as daemon -#pidfile = /var/run/snapserver/pid - -# the user to run as when daemonized -#user = snapserver -# the group to run as when daemonized -#group = snapserver - -# directory where persistent data is stored (server.json) -# if empty, data dir will be -# - "/var/lib/snapserver/" when running as daemon -# - "$HOME/.config/snapserver/" when not running as daemon -#datadir = - -# -############################################################################### - - -# HTTP RPC #################################################################### -# -[http] -# enable HTTP Json RPC (HTTP POST and websockets) -enabled = true - -# address to listen on, can be specified multiple times -# use "0.0.0.0" to bind to any IPv4 address or :: to bind to any IPv6 address -# or "127.0.0.1" or "::1" to bind to localhost IPv4 or IPv6, respectively -# use the address of a specific network interface to just listen to and accept -# connections from that interface -bind_to_address = :: - -# which port the server should listen to -port = 1780 - -# serve a website from the doc_root location -# disabled if commented or empty -doc_root = /usr/share/snapserver/snapweb - -# Hostname or IP under which clients can reach this host -# used to serve cached cover art -# use as placeholder for your actual host name -#host = - -# -############################################################################### - - -# TCP RPC ##################################################################### -# -[tcp] -# enable TCP Json RPC -enabled = true - -# address to listen on, can be specified multiple times -# use "0.0.0.0" to bind to any IPv4 address or :: to bind to any IPv6 address -# or "127.0.0.1" or "::1" to bind to localhost IPv4 or IPv6, respectively -# use the address of a specific network interface to just listen to and accept -# connections from that interface -bind_to_address = :: - -# which port the server should listen to -port = 1705 - - -# -############################################################################### - - -# Stream settings ############################################################# -# -[stream] -# address to listen on, can be specified multiple times -# use "0.0.0.0" to bind to any IPv4 address or :: to bind to any IPv6 address -# or "127.0.0.1" or "::1" to bind to localhost IPv4 or IPv6, respectively -# use the address of a specific network interface to just listen to and accept -# connections from that interface -bind_to_address = :: - -# which port the server should listen to -port = 1704 - -# source URI of the PCM input stream, can be configured multiple times -# The following notation is used in this paragraph: -# : the whole expression must be replaced with your specific setting -# [square brackets]: the whole expression is optional and can be left out -# [key=value]: if you leave this option out, "value" will be the default for "key" -# -# Format: TYPE://host/path?name=[&codec=][&sampleformat=][&chunk_ms=][&controlscript=[&controlscriptparams=]] -# parameters have the form "key=value", they are concatenated with an "&" character -# parameter "name" is mandatory for all sources, while codec, sampleformat and chunk_ms are optional -# and will override the default codec, sampleformat or chunk_ms settings -# Available types are: -# pipe: pipe:///?name=[&mode=create], mode can be "create" or "read" -# "**********": "**********":///?name=[&username=&password=][&devicename=Snapcast][&bitrate=320][&wd_timeout=7800][&volume=100][&onevent=""][&nomalize=false][&autoplay=false][¶ms=] -# note that you need to have the librespot binary on your machine -# sampleformat will be set to "44100:16:2" -# file: file:///?name= -# process: process:///?name=[&wd_timeout=0][&log_stderr=false][¶ms=] -# airplay: airplay:///?name=[&port=5000] -# note that you need to have the airplay binary on your machine -# sampleformat will be set to "44100:16:2" -# tcp server: tcp://:?name=[&mode=server] -# tcp client: tcp://:?name=&mode=client -# alsa: alsa:///?name=&device=[&send_silence=false][&idle_threshold=100][&silence_threshold_percent=0.0] -# meta: meta://///.../?name= -source = tcp://127.0.0.1:9000?name=Mopidy - -# Default sample format: :: -#sampleformat = 48000:16:2 - -# Default transport codec -# (flac|ogg|opus|pcm)[:options] -# Start Snapserver with "--stream:codec=:?" to get codec specific options -codec = pcm - -# Default source stream read chunk size [ms]. -# The server will continously read this number of milliseconds from the source into buffer and pass this buffer to the encoder. -# The encoded buffer is sent to the clients. Some codecs have a higher latency and will need more data, e.g. Flac will need ~26ms chunks -#chunk_ms = 20 - -# Buffer [ms] -# The end-to-end latency, from capturing a sample on the server until the sample is played-out on the client -buffer = 750 - -# Send audio to muted clients -#send_to_muted = false -# - - -# Streaming client options #################################################### -# -[streaming_client] - -# Volume assigned to new snapclients [percent] -# Defaults to 100 if unset -#initial_volume = 100 -# -############################################################################### - - -# Logging options ############################################################# -# -[logging] - -# log sink [null,system,stdout,stderr,file:] -# when left empty: if running as daemon "system" else "stdout" -#sink = - -# log filter :[,:]* -# with tag = * or and level = [trace,debug,info,notice,warning,error,fatal] -#filter = *:info -# -############################################################################### -EOF - - -# Write service - -sudo touch /etc/systemd/system/mopidy.service -sudo bash -c "cat > /etc/systemd/system/mopidy.service" << EOF -[Unit] -Description=Mopidy music server -After=network.target remote-fs.target -Wants=named.service alsa-utils.service avahi-daemon.service dbus.service pipewire.service - -[Service] -Type=simple -User=$USER -Group=audio -PermissionsStartOnly=true -ExecStart=/opt/mopidy/.venv/bin/mopidy --config $HOME/.config/mopidy/mopidy.conf -Restart=on-failure -Environment=PATH=/sbin:/usr/sbin:/bin:/usr/bin:/opt/mopidy/.venv/bin - -[Install] -WantedBy=multi-user.target -EOF - - - -sudo systemctl daemon-reload -sudo systemctl enable --now snapserver -sudo systemctl enable --now snapclient -sleep 10 -sudo systemctl enable --now mopidy -sleep 3 -sudo systemctl restart snapserver - - - -systemctl status mopidy diff --git a/seeker/snippet/main.py b/seeker/snippet/main.py deleted file mode 100644 index 9c7706c7..00000000 --- a/seeker/snippet/main.py +++ /dev/null @@ -1,30 +0,0 @@ -#date: 2024-07-11T16:49:14Z -#url: https://api.github.com/gists/6cd650b97c5589b9fe4944d96e5d4bab -#owner: https://api.github.com/users/devniel - -from playwright.sync_api import sync_playwright - -with sync_playwright() as p: - browser = p.chromium.connect_over_cdp("http://localhost:9222") - default_context = browser.contexts[0] - page = default_context.pages[0] - page.goto("https://x.com/devniel/followers") - page.wait_for_timeout(1000) - items = page.get_by_test_id("cellInnerDiv").all() - print(len(items)) - # Get all followers in the page - users = [] - for item in items: - links = item.get_by_role("link").all() - name = links[0].text_content() - username = links[1].text_content().lstrip("@") - print(f"{name} | {username}") - users.append({ - "name": name, - "username": username - }) - # Visit all followers profile page - for user in users: - print(user) - page.goto(f"https://x.com/{user["username"]}") - diff --git a/seeker/snippet/netip.Prefix_to_net.IPNet.go b/seeker/snippet/netip.Prefix_to_net.IPNet.go deleted file mode 100644 index abbd0696..00000000 --- a/seeker/snippet/netip.Prefix_to_net.IPNet.go +++ /dev/null @@ -1,16 +0,0 @@ -//date: 2024-07-11T16:42:54Z -//url: https://api.github.com/gists/9e9eff0e622982a82a672a20beb08e66 -//owner: https://api.github.com/users/b4nst - -import ( - "net" - "net/netip" -) - -func PrefixToIPNet(p netip.Prefix) net.IPNet { - ip := p.Masked().Addr().AsSlice() - return net.IPNet{ - IP: ip, - Mask: net.CIDRMask(p.Bits(), len(ip)*8), - } -} \ No newline at end of file diff --git a/seeker/snippet/pres.py b/seeker/snippet/pres.py deleted file mode 100644 index 83ca60cd..00000000 --- a/seeker/snippet/pres.py +++ /dev/null @@ -1,62 +0,0 @@ -#date: 2024-07-11T17:01:47Z -#url: https://api.github.com/gists/b992a51ca8966e27610498985ddb9fe8 -#owner: https://api.github.com/users/openbrian - -import sys -from random import random - -# [electoral votes, chance biden, chance trump -state = { - "wi": [10, 46, 48], - "mi": [15, 45, 47], - "pa": [19, 44, 48], - "nv": [ 6, 43, 49], - "az": [11, 43, 48], - "ga": [16, 43, 49], - "nc": [16, 43, 48], -} - - -biden_base = 226 -trump_base = 219 - - -def vote(): - bi = biden_base - tr = trump_base - for st in state: - ev = state[st][0] - total = state[st][1] + state[st][2] -# total = 100 - biden_chance = state[st][1] / total - trump_chance = (state[st][2] / total) - trump_chance = biden_chance + trump_chance - r = random() - if r < biden_chance: - bi += ev - elif r < trump_chance: - tr += ev - if bi > 270: - return 'b' - if tr > 270: - return 't' - return 'o' - - - -pres = { - "b": 0, - "t": 0, - "o": 0, -} - -n = int(sys.argv[1]) -for i in range(n): - pres[vote()] += 1 - -print() -print() -print(pres) - -for p in pres: - print(f"{p} {pres[p]/n}") \ No newline at end of file diff --git a/seeker/snippet/python-script-100-fake-customers-excel-SEED.py b/seeker/snippet/python-script-100-fake-customers-excel-SEED.py new file mode 100644 index 00000000..7b7d1356 --- /dev/null +++ b/seeker/snippet/python-script-100-fake-customers-excel-SEED.py @@ -0,0 +1,32 @@ +#date: 2024-07-15T16:46:46Z +#url: https://api.github.com/gists/2c1f39e2f487c29c0e4a51822e4282df +#owner: https://api.github.com/users/summerofgeorge + +import openpyxl +import random +from faker import Faker + +# Set the random seed +random.seed(1234) + +# Initialize Faker for generating fake data +fake = Faker() + +# Create a new workbook +wb = openpyxl.Workbook() + +# Get the active sheet +ws = wb.active + +# Add headers to the sheet +ws.append(["Name", "Address"]) + +# Generate 100 fake customer records +for _ in range(100): + name = fake.name() + address = fake.address().replace("\n", ", ") + ws.append([name, address]) + +# Save the workbook +wb.save("FakeCustomers.xlsx") +print("Excel workbook 'FakeCustomers.xlsx' created successfully!") diff --git a/seeker/snippet/record_camera_stream.sh b/seeker/snippet/record_camera_stream.sh deleted file mode 100644 index 742d2a03..00000000 --- a/seeker/snippet/record_camera_stream.sh +++ /dev/null @@ -1,28 +0,0 @@ -#date: 2024-07-12T17:11:47Z -#url: https://api.github.com/gists/91fb40ef648bdc173cce793145b7deb6 -#owner: https://api.github.com/users/lebedev-a - -#!/bin/bash -# Bash Unofficial strict mode -set -euo pipefail - -CAMERA_HOST="$1" - -#find out where the script is located iself -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null; pwd)" -#read password from file, the same for all the cameras -PASS=$( < "${SCRIPT_DIR}"/pwd) - -#2024-07-20 -TODAY=$(date +'%Y-%m-%d') - -TARGET_DIR=/opt/cameras/"${CAMERA_HOST}" - -#ensure directory exists -mkdir -p "${TARGET_DIR}"/"${TODAY}" - -#Save stream into todays folder -ffmpeg -i "rtsp://admin:${PASS}@${CAMERA_HOST}:8554/1080p?video=all&audio=all" \ - -err_detect aggressive -c copy -f segment -segment_time 59 \ - -reset_timestamps 1 -strftime 1 \ - -segment_format mp4 "${TARGET_DIR}"/"%Y-%m-%d/%Y-%m-%d-%H-%M-%S.mp4" diff --git a/seeker/snippet/s3.sh b/seeker/snippet/s3.sh deleted file mode 100644 index ef0bb354..00000000 --- a/seeker/snippet/s3.sh +++ /dev/null @@ -1,24 +0,0 @@ -#date: 2024-07-11T16:42:35Z -#url: https://api.github.com/gists/076a452faa6cbec0ab6f04a3e504f16f -#owner: https://api.github.com/users/Babatunde13 - -#!bin/bash - -# This script will create a bucket in S3 and upload a file to it -aws s3 mb s3://bkoiki950assets - -echo "Bucket created" - -echo "THis is my first file in index" >> index.txt -echo "THis is another file in index1" >> index1.txt -echo "THis is another file in index2" >> index2.txt - -aws s3 cp index.txt s3://bkoiki950assets -aws s3 cp index1.txt s3://bkoiki950assets -aws s3 cp index2.txt s3://bkoiki950assets - -aws s3 sync . s3://bkoiki950assets # This will sync all files in the current directory to the bucket - -echo "Files uploaded" - -aws s3 ls s3://bkoiki950assets diff --git a/seeker/snippet/shell b/seeker/snippet/shell deleted file mode 100644 index e01a644d..00000000 --- a/seeker/snippet/shell +++ /dev/null @@ -1,574 +0,0 @@ -#date: 2024-07-12T17:03:29Z -#url: https://api.github.com/gists/9b713966b81a8fb5f83106060ec777e4 -#owner: https://api.github.com/users/ekitagawa - -#!/usr/bin/env bash -# -# Mirantis Container Runtime installer -# -# Script build information: -# COMMIT_SHA=175bd33bb9e4c030be7d049ac5411304ef27e28d -# COMMIT_SHA_PVAL=175bd33 -# SEMVER_VERSION=1.0.24 -# PUBLISH_STRING=stable -# - -set -e - -if [ -z "$DOCKER_URL" ]; then - echo "ERROR: DOCKER_URL must be set, exiting..." - exit 1 -fi - -VERSION=${VERSION:-} -CHANNEL=${CHANNEL:-test} -APT_CONTAINERD_INSTALL="containerd.io" -YUM_CONTAINERD_INSTALL="containerd.io" -ZYPPER_CONTAINERD_INSTALL="containerd.io" - -MIN_ROOTLESS_VER="20.10.12" -MIN_MCR_WITH_C8D_VER="23.0.1" - -DIST_ID="" - -if [ "$CONTAINERD_VERSION" ]; then - APT_CONTAINERD_INSTALL="containerd.io=$CONTAINERD_VERSION*" - YUM_CONTAINERD_INSTALL="containerd.io-$CONTAINERD_VERSION*" - ZYPPER_CONTAINERD_INSTALL="containerd.io=$CONTAINERD_VERSION*" -fi - -function check_sem_ver() { - if [[ $1 =~ ^(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))?(-(0|[1-9][0-9]*))?$ ]]; then - echo "$1" - else - echo "" - fi -} - -#function chech_semver_test_valid() { -# local valid=(20.10.4 23.0.3 23.0.9-1) -# for i in "${valid[@]}"; do -# if [[ ! $(check_sem_ver ${i}) ]]; then -# echo "error in chsv_test_valid, '${i}' is a valid semantic version" >&2 -# exit 1 -# fi -# done -#} - -#function check_semver_test_invalid() { -# local invalid=(1 1.2 1.2.3-0123 1.2.3-0123.0123 1.1.2-prerelease+meta 1.1.2+meta 1.1.2+meta-valid 1.0.0-alpha 1.0.0-beta 1.0.0-alpha.beta 1.0.0-alpha.beta.1 1.0.0-alpha.1 1.0.0-alpha0.valid 1.0.0-alpha.0valid 1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay 1.0.0-rc.1+build.1 2.0.0-rc.1+build.123 1.2.3-beta 10.2.3-DEV-SNAPSHOT 1.2.3-SNAPSHOT-123 2.0.0+build.1848 2.0.1-alpha.1227 1.0.0-alpha+beta 1.2.3----RC-SNAPSHOT.12.9.1--.12+788 1.2.3----R-S.12.9.1--.12+meta 1.2.3----RC-SNAPSHOT.12.9.1--.12 1.0.0+0.build.1-rc.10000aaa-kk-0.1 1.0.0-0A.is.legal 1.1.2+.123 +invalid -invalid -invalid+invalid -invalid.01 alpha alpha.beta alpha.beta.1 alpha.1 alpha+beta alpha_beta alpha. alpha.. beta 1.0.0-alpha_beta -alpha. 1.0.0-alpha.. 1.0.0-alpha..1 1.0.0-alpha...1 1.0.0-alpha....1 1.0.0-alpha.....1 1.0.0-alpha......1 1.0.0-alpha.......1 01.1.1 1.01.1 1.1.01 1.2 1.2.3.DEV 1.2-SNAPSHOT 1.2.31.2.3----RC-SNAPSHOT.12.09.1--..12+788 1.2-RC-SNAPSHOT -1.0.3-gamma+b7718 +justmeta 9.8.7+meta+meta 9.8.7-whatever+meta+meta 99999999999999999999999.999999999999999999.99999999999999999----RC-SNAPSHOT.12.09.1--------------------------------..12) -# for i in "${invalid[@]}"; do -# if [[ $(check_sem_ver ${i}) ]]; then -# echo "error in chsv_test_invalid, '${i}' is a valid semantic version" >&2 -# exit 1 -# fi -# done -#} - -get_docker_c8d_dep() { - local package="docker-ee" - local package_version=${1:?"Please specify MCR version"} - - case "${DIST_ID}" in - ubuntu) - C8D_VER=$(apt-cache show "${package}=5:${package_version}*" | grep -oP 'containerd\.io \(\K[^\)]+' | awk '{print $2}') - ;; - centos | rhel | rocky | amzn | ol) - #iq | sort -r | head -n 1`` are used for testing purposes for ability to specify release version and the latest pre-release will be taken - C8D_DEP=$(yum deplist "${package}-3:${package_version}*" | awk '/dependency: containerd.io/ {print $4}' | uniq | sort -r | head -n 1) - C8D_VER=$(yum search --showduplicates containerd.io | awk -F'io-' '/containerd.io-/ {print $2}' |grep "${C8D_DEP}" | awk '{print $1}' | sort | head -n 1) - ;; - sles | opensuse-leap) - VER=$(zypper se "${package}-3:${package_version}" | awk '/\|\ package/ {print $6}' | sort -r | head -n 1) - zypper -v install -f --allow-downgrade --download-only "${package}=${VER}" &> /dev/null - C8D_VER=$(find /var/cache/zypp/packages/ -type f -print0 -name "${package}-${package_version}*.rpm" | xargs rpm -qR | awk '/containerd.io/ {print $3}') - ;; - esac - echo "${C8D_VER}" -} - -get_c8d_mapped_version() { - local MCR_VER=$VERSION - - declare -rA MCR_C8D_DEB_MAP=(["23.0.1"]="1.6.17" ["23.0.3"]="1.6.19" ["23.0.5"]="1.6.20" ["23.0.6"]="1.6.21" ["23.0.7"]="1.6.22" ["23.0.8"]="1.6.25~rc.1-1" ["23.0.9"]="1.6.28~rc.1-1" ["23.0.9-1"]="1.6.28~rc.1-2" ['23.0.10']="1.6.30~rc.2-1") - declare -rA MCR_C8D_RPM_MAP=(["23.0.1"]="1.6.17" ["23.0.3"]="1.6.19" ["23.0.5"]="1.6.20" ["23.0.6"]="1.6.21" ["23.0.7"]="1.6.22" ["23.0.8"]="1.6.25-2.1.rc.1.1" ["23.0.9"]="1.6.28-2.1.rc.1.1" ["23.0.9-1"]="1.6.28-3.1.rc.1.1" ["23.0.10"]="1.6.30-2.2.rc.2.1") - - case "$DIST_ID" in - ubuntu) - C8D_VER=${MCR_C8D_DEB_MAP[${MCR_VER}]} - if [ -n "${C8D_VER}" ]; then - echo "${C8D_VER}" - return 0 - fi - ;; - centos | rhel | rocky | amzn | ol | sles | opensuse-leap) - C8D_VER=${MCR_C8D_RPM_MAP[${MCR_VER}]} - if [ -n "${C8D_VER}" ]; then - echo "${C8D_VER}" - return 0 - fi - ;; - esac -} - -get_docker_mapped_version() { - local MCR_VER=$VERSION - - declare -rA MCR_DOCKER_DEB_MAP=(["23.0.9"]="23.0.9~3-" ["23.0.9-1"]="23.0.9~4") - declare -rA MCR_DOCKER_RPM_MAP=(["23.0.9"]="23.0.9-3" ["23.0.9-1"]="23.0.9-4") - - case "$DIST_ID" in - ubuntu) - DOCKER_VER=${MCR_DOCKER_DEB_MAP[${MCR_VER}]} - if [ -n "${DOCKER_VER}" ]; then - echo "${DOCKER_VER}" - return 0 - fi - ;; - centos | rhel | rocky | amzn | ol | sles | opensuse-leap) - DOCKER_VER=${MCR_DOCKER_RPM_MAP[${MCR_VER}]} - if [ -n "${DOCKER_VER}" ]; then - echo "${DOCKER_VER}" - return 0 - fi - ;; - esac - echo "${MCR_VER}" -} - -get_c8d_version() { - local MCR_VER=${1:-$VERSION} - - if [ -n "${CONTAINERD_VERSION}" ]; then - echo "${CONTAINERD_VERSION}" - return 0 - fi - - C8D_VER="$(get_c8d_mapped_version)" - if [ -n "$C8D_VER" ]; then - echo "${C8D_VER}" - return 0 - fi - - C8D_VER=$(get_docker_c8d_dep "${MCR_VER}") - - echo "${C8D_VER}" - return 0 -} - -command_exists() { - command -v "$@" > /dev/null 2>&1 -} - -on_ec2() { - if [ -f /sys/hypervisor/uuid ] && [ "$(head -c 3 /sys/hypervisor/uuid)" == ec2 ]; then - return 0 - else - return 1 - fi -} - -strip_trailing_slash() { - echo "$1" | sed 's/\/$//' -} - -# version_gte checks if the version specified in $VERSION is at least -# the given CalVer (YY.MM) version. returns 0 (success) if $VERSION is either -# unset (=latest) or newer or equal than the specified version. Returns 1 (fail) -# otherwise. -# -# examples: -# -# VERSION=20.10 -# version_gte 20.10 // 0 (success) -# version_gte 19.03 // 0 (success) -# version_gte 21.10 // 1 (fail) -version_gte() { - if [ -z "$VERSION" ]; then - return 0 - fi - # Cut "-" used in case off dev/tp/rc builds - clean_version="$(echo "$VERSION" | cut -d'-' -f1)" - eval calver_compare "$clean_version" "$1" -} - -# calver_compare compares two CalVer (YY.MM.VER) version strings. returns 0 (success) -# if version A is newer or equal than version B, or 1 (fail) otherwise. Patch -# releases and pre-release (-alpha/-beta) are not taken into account -# -# examples: -# -# calver_compare 20.10.12 19.03 // 0 (success) -# calver_compare 20.10.12 20.10.12 // 0 (success) -# calver_compare 19.03.02 20.10.12 // 1 (fail) -calver_compare() ( - set +x - - yy_a="$(echo "$1" | cut -d'.' -f1)" - yy_b="$(echo "$2" | cut -d'.' -f1)" - if (( "$yy_a" < "$yy_b" )); then - return 1 - fi - if (( "$yy_a" > "$yy_b" )); then - return 0 - fi - mm_a="$(echo "$1" | cut -d'.' -f2)" - mm_b="$(echo "$2" | cut -d'.' -f2)" - if (( "${mm_a}" < "${mm_b}" )); then - return 1 - fi - ver_a="$(echo "$1" | cut -d'.' -f3)" - ver_b="$(echo "$2" | cut -d'.' -f3)" - if (( "$ver_a" < "$ver_b" )); then - return 1 - fi - - return 0 -) - -ubuntu_install() { - local dist_version="$1" - export DEBIAN_FRONTEND=noninteractive - local pre_reqs="apt-transport-https ca-certificates curl software-properties-common" - if ! command -v gpg > /dev/null; then - pre_reqs="$pre_reqs gnupg" - fi - local ubuntu_url - ( - set -ex - $sh_c "apt-get update -qq" - $sh_c "apt-get install -y -qq $pre_reqs >/dev/null" - ) - ubuntu_url=$(strip_trailing_slash "$DOCKER_URL") - # - # Check if we have a gpg (should be valid repo to use if it's there) before appending suffix - if ! curl -fsSL "$ubuntu_url/gpg" >/dev/null; then - # URL's may not be suffixed with ubuntu, let's make sure that they are - if [[ ! "$ubuntu_url" =~ /ubuntu$ ]]; then - ubuntu_url="$ubuntu_url/ubuntu" - fi - fi - local arch - arch="$(dpkg --print-architecture)" - local release - # Grab this outside of the command to install so it's not muddled - release="$(lsb_release -cs)" - ( - set -ex - $sh_c "curl -fsSL $ubuntu_url/gpg | apt-key add -qq - >/dev/null" - $sh_c "add-apt-repository -y 'deb [arch=$arch] $ubuntu_url $release $CHANNEL' >/dev/null" - $sh_c "apt-get update -qq >/dev/null" - ) - local package="docker-ee" - local package_version="" - # By default don't include a cli_package and rootless_package to install just let the package manager grab the topmost one - local cli_package="" - local rootless_package="" - local allow_downgrade="" - # Grab the specific version, base it off of regex patterns - if [ -n "$docker_version" ]; then - package_pattern="${docker_version}" - local search_command="apt-cache madison '$package' | grep '$package_pattern' | sort -r | head -1 | cut -d' ' -f 4" - package_version="$($sh_c "$search_command")" - local cli_search_command="apt-cache madison '$package-cli' | grep '$package_pattern' | sort -r | head -1 | cut -d' ' -f 3" - cli_package_version="$($sh_c "$cli_search_command")" - if version_gte "$MIN_ROOTLESS_VER"; then - local rootless_search_command="apt-cache madison '$package-rootless-extras' | sort -r | grep '$package_pattern' | head -1 | cut -d' ' -f 3" - rootless_package_version="$($sh_c "$rootless_search_command")" - fi - echo "INFO: Searching repository for VERSION '$docker_version'" - echo "INFO: $search_command" - if [ -z "$package_version" ]; then - echo - echo "ERROR: '$docker_version' not found amongst apt-cache madison results" - echo - exit 1 - fi - # If a cli package was found for the given version then include it in the install - if [ -n "$cli_package_version" ]; then - cli_package="$package-cli=$cli_package_version" - fi - # If a rootless package was found for the given version then include it in the install - if [ -n "$rootless_package_version" ]; then - rootless_package="$package-rootless-extras=$rootless_package_version" - fi - package_version="=$package_version" - - if [ "$dist_version" != "14.04" ]; then - allow_downgrade="--allow-downgrades" - fi - - if version_gte "$MIN_MCR_WITH_C8D_VER"; then - c8d_version="$(get_c8d_version)" - if [ -n "${c8d_version}" ]; then - APT_CONTAINERD_INSTALL="containerd.io=${c8d_version}*" - fi - fi - fi - ( - set -ex - $sh_c "apt-get install -y $allow_downgrade -qq $package$package_version $cli_package $rootless_package $APT_CONTAINERD_INSTALL" - ) -} - -yum_install() { - local DIST_ID="$1" - local dist_version="$2" - local yum_url - yum_url=$(strip_trailing_slash "$DOCKER_URL") - ( - set -ex - $sh_c "rpm -qa | grep curl || yum install -q -y curl" - ) - # Check if we have a usable repo file before appending suffix - if ! curl -fsSL "$yum_url/docker-ee.repo" >/dev/null; then - if [[ ! "$yum_url" =~ /centos$|/rhel$|rocky$ ]]; then - yum_url="$yum_url/$DIST_ID" - fi - fi - case $DIST_ID:$dist_version in - oraclelinux:7*) - # Enable "Oracle Linux 7 Server Add ons (x86_64)" repo for oraclelinux7 - ( - set -ex - $sh_c 'yum-config-manager --enable ol7_addons' - ) - ;; - rhel:7*) - extras_repo="rhel-7-server-extras-rpms" - if on_ec2; then - $sh_c "yum install -y rh-amazon-rhui-client" - extras_repo="rhel-7-server-rhui-extras-rpms" - fi - # We don't actually make packages for 7.1 but they can still use the 7 repository - if [ "$dist_version" = "7.1" ]; then - dist_version="7" - fi - # Enable extras repo for rhel - ( - set -ex - $sh_c "yum-config-manager --enable $extras_repo" - ) - ;; - esac - # TODO: For Docker EE 17.03 a targeted version of container-selinux needs to be - # installed. See: https://github.com/docker/release-repo/issues/62 - ( - set -ex - $sh_c "echo '$yum_url' > /etc/yum/vars/dockerurl" - $sh_c "echo '$dist_version' > /etc/yum/vars/dockerosversion" - $sh_c "yum install -q -y yum-utils device-mapper-persistent-data lvm2" - $sh_c "yum-config-manager --add-repo $yum_url/docker-ee.repo" - $sh_c "yum-config-manager --disable 'docker-ee-*'" - $sh_c "yum-config-manager --enable 'docker-ee-$CHANNEL'" - ) - local package="docker-ee" - local package_version="" - # By default don't include a cli_package and rootless_package to install just let the package manager grab the topmost one - local cli_package="" - local rootless_package="" - local install_cmd="install" - if [ -n "$VERSION" ]; then - package_pattern="${docker_version}" - local search_command="yum list --showduplicates '$package' | grep '$package_pattern' | sort -r | tail -1 | awk '{print \$2}'" - package_version="$($sh_c "$search_command")" - local cli_search_command="yum list --showduplicates '$package-cli' | grep '$package_pattern' | sort -r | tail -1 | awk '{print \$2}'" - cli_package_version="$($sh_c "$cli_search_command")" - if version_gte "$MIN_ROOTLESS_VER" && [ "$DIST_ID:$dist_version" != "oraclelinux:7" ]; then - local rootless_search_command="yum list --showduplicates '$package-rootless-extras' | sort -r | grep '$package_pattern' | tail -1 | awk '{print \$2}'" - rootless_package_version="$($sh_c "$rootless_search_command")" - fi - echo "INFO: Searching repository for VERSION '$docker_version'" - echo "INFO: $search_command" - if [ -z "$package_version" ]; then - echo - echo "ERROR: '$docker_version' not found amongst yum list results" - echo - exit 1 - fi - if [ -n "$cli_package_version" ]; then - cli_package="$package-cli-$(echo "${cli_package_version}" | cut -d':' -f 2)" - fi - if [ -n "$rootless_package_version" ]; then - rootless_package="$package-rootless-extras-$(echo "${rootless_package_version}" | cut -d':' -f 2)" - fi - # Cut out the epoch and prefix with a '-' - package_version="$(echo "$package_version" | cut -d':' -f 2)" - package_version_dash="-${package_version}" - - # Check if we're doing an upgrade / downgrade and the command accordingly - echo "INFO: Checking to determine whether this should be an upgrade or downgrade" - # If the package isn't realdy installed then don't try upgrade / downgrade - if ! $sh_c "yum list installed $package" >/dev/null; then - install_cmd="install" - # Exit codes when using --assumeno will give 0 if there would be an upgrade/downgrade, 1 if there is - elif ! $sh_c "yum upgrade --assumeno $package$package_version_dash"; then - install_cmd="upgrade" - elif ! $sh_c "yum downgrade --assumeno $package$package_version_dash"; then - install_cmd="downgrade" - fi - echo "INFO: will use install command $install_cmd" - if version_gte "$MIN_MCR_WITH_C8D_VER"; then - c8d_version=$(get_c8d_version "${VERSION}") - YUM_CONTAINERD_INSTALL="containerd.io-${c8d_version}*" - fi - fi - ( - set -ex - $sh_c "yum $install_cmd -q -y $package$package_version_dash $cli_package $rootless_package $YUM_CONTAINERD_INSTALL" - ) -} - -zypper_install() { - local arch - arch="$(uname -m)" - local dist_version - dist_version=$1 - local repo_version - local zypper_flags="" - case "$dist_version" in - 12*) - repo_version=12.3 - ;; - 15*) - zypper_flags=" --allow-vendor-change" - repo_version=15 - ;; - esac - ( - set -ex - $sh_c "zypper install -y curl" - ) - local zypper_url - zypper_url=$(strip_trailing_slash "$DOCKER_URL") - # No need to append sles if we already have a valid repo - if ! curl -fsL "$zypper_url/docker-ee.repo" >/dev/null; then - zypper_url="$zypper_url/sles" - fi - ( - set -ex - $sh_c "zypper removerepo docker-ee-$CHANNEL" # this will always return 0 even if repo alias not found - $sh_c "zypper addrepo $zypper_url/$repo_version/$arch/$CHANNEL docker-ee-$CHANNEL" - $sh_c "rpm --import '$zypper_url/gpg'" - $sh_c "zypper refresh" - ) - local package="docker-ee" - local package_version="" - # By default don't include a cli_package and rootless_package to install just let the package manager grab the topmost one - local cli_package="" - local rootless_package="" - if [ -n "$VERSION" ]; then - local package_pattern - package_pattern="${docker_version}" - local search_command="zypper search -s '$package' | sed $'s/\t/ /g' | grep '$package ' | grep '$package_pattern' | sort -r | tr -d '[:space:]' | cut -d'|' -f 4" - package_version="$($sh_c "$search_command")" - local cli_search_command="zypper search -s '$package-cli' | grep '$package_pattern' | sort -r | tr -d '[:space:]' | cut -d'|' -f 4" - cli_package_version="$($sh_c "$cli_search_command")" - if version_gte "$MIN_ROOTLESS_VER" && [ "$repo_version" != "12.3" ]; then - local rootless_search_command="zypper search -s '$package-rootless-extras' | grep '$package_pattern' | sort -r | tr -d '[:space:]' | cut -d'|' -f 4" - rootless_package_version="$($sh_c "$rootless_search_command")" - fi - echo "INFO: Searching repository for VERSION '$docker_version'" - echo "INFO: $search_command" - if [ -z "$package_version" ]; then - echo - echo "ERROR: '$docker_version' not found amongst zypper search results" - echo - exit 1 - fi - if [ -n "$cli_package_version" ]; then - cli_package="$package-cli-$cli_package_version" - fi - if [ -n "$rootless_package_version" ]; then - rootless_package="$package-rootless-extras-$rootless_package_version" - fi - package_version="-$package_version" - if version_gte "$MIN_MCR_WITH_C8D_VER"; then - c8d_version="$(get_c8d_version "${VERSION}")" - ZYPPER_CONTAINERD_INSTALL="containerd.io=${c8d_version}*" - fi - fi - ( - set -ex - $sh_c "zypper rm -y docker docker-engine docker-libnetwork runc containerd || true" - $sh_c "zypper install $zypper_flags --replacefiles -f -y '$package$package_version' $ZYPPER_CONTAINERD_INSTALL $cli_package $rootless_package" - ) - - # cli package is installed, and we want to pin a version - if rpm -qa | grep "$package-cli" >/dev/null 2>/dev/null; then - if [ -n "$VERSION" ]; then - # zypper treats versions differently so we'll have to search for the version again - local search_command="zypper search -s '$package-cli' | grep '$package_pattern' | tr -d '[:space:]' | cut -d'|' -f 4" - package_version="-$($sh_c "$search_command")" - ( - set -ex - $sh_c "zypper install -f -y '$package-cli$package_version' $ZYPPER_CONTAINERD_INSTALL" - ) - fi - fi -} - -main() { - user="$(id -un 2>/dev/null || true)" - sh_c='sh -c' - if [ "$user" != 'root' ]; then - if command_exists sudo; then - sh_c='sudo -E sh -c' - elif command_exists su; then - sh_c='su -c' - else - cat >&2 <<-'EOF' - Error: this installer needs the ability to run commands as root. - We are unable to find either "sudo" or "su" available to make this happen. - EOF - exit 1 - fi - fi - semver=$(check_sem_ver "$VERSION") - if [ -z "$semver" ]; then - echo "$VERSION doesn't match with expected pattern. Version must follow this pattern a.b, a.b.c or a.b.c-d, where a,b,c,d are numbers." - exit 1 - fi - - # shellcheck disable=SC1091 - DIST_ID="$(. /etc/os-release && echo "$ID")" - # shellcheck disable=SC1091 - dist_version="$(. /etc/os-release && echo "$VERSION_ID")" - - docker_version="$(get_docker_mapped_version)" - - case "$DIST_ID:$dist_version" in - ubuntu:14.04|ubuntu:16.04|ubuntu:18.04|ubuntu:20.04|ubuntu:22.04) - ubuntu_install "$dist_version" - exit 0 - ;; - centos:*|rhel:*|rocky:*) - # Strip point versions, they don't really matter - yum_install "$DIST_ID" "${dist_version/\.*/}" - exit 0 - ;; - amzn:2) - yum_install amazonlinux 2 - exit 0 - ;; - ol:*) - # Consider only major version for OL distros - dist_version=${dist_version%%.*} - yum_install "oraclelinux" "$dist_version" - exit 0 - ;; - sles:12*|sles:15*|opensuse-leap:15*) - zypper_install "$dist_version" - exit 0 - ;; - *) - echo - echo "ERROR: Unsupported distribution / distribution version '$DIST_ID:$dist_version'" - echo " If you feel this is a mistake file an issue @ https://github.com/docker/docker-install-ee" - echo - exit 1 - ;; - esac -} - -main diff --git a/seeker/snippet/spark-local.py b/seeker/snippet/spark-local.py deleted file mode 100644 index 7de313df..00000000 --- a/seeker/snippet/spark-local.py +++ /dev/null @@ -1,24 +0,0 @@ -#date: 2024-07-12T16:36:46Z -#url: https://api.github.com/gists/2f282e8fc34488ba150542033c9f2c82 -#owner: https://api.github.com/users/iYadavVaibhav - -from pyspark.sql import SparkSession - -# Create a SparkSession -spark = SparkSession.builder \ - .appName("LocalModeExample") \ - .master("local") \ - .getOrCreate() - -# Example DataFrame operation -data = [("Alice", 1), ("Bob", 2), ("Cathy", 3)] -columns = ["Name", "Value"] - -# Create DataFrame -df = spark.createDataFrame(data, columns) - -# Show DataFrame -df.show() - -# Stop the SparkSession -spark.stop() diff --git a/seeker/snippet/test.py b/seeker/snippet/test.py new file mode 100644 index 00000000..9b86921e --- /dev/null +++ b/seeker/snippet/test.py @@ -0,0 +1,81 @@ +#date: 2024-07-15T16:44:40Z +#url: https://api.github.com/gists/09b66e5290d0765cdb0019073fc5db08 +#owner: https://api.github.com/users/LajnaLegenden + +import requests +import time +import matplotlib.pyplot as plt +from collections import deque +import signal +import concurrent.futures +import threading + +url = "" + +results = deque(maxlen=100) # Store up to 100 data points +interval = 10 # Time interval in seconds +pause_time = 5 # Pause time between intervals +max_concurrent_requests = 10 # Maximum number of concurrent requests + +request_count = 0 +request_count_lock = threading.Lock() +stop_event = threading.Event() + +def signal_handler(sig, frame): + print("Interrupt received. Saving graph...") + save_graph() + exit(0) + +signal.signal(signal.SIGINT, signal_handler) + +def save_graph(): + plt.figure(figsize=(12, 6)) + plt.plot(range(1, len(results) + 1), results) + plt.title("Requests per 10-second Interval") + plt.xlabel("Interval Number") + plt.ylabel("Number of Requests") + plt.grid(True) + plt.savefig("request_performance.png") + print("Graph saved as request_performance.png") + +def make_request(url): + global request_count + if not stop_event.is_set(): + try: + print(f"Requesting URL: {url}") + requests.get(url, timeout=9.5) # Set timeout to slightly less than interval + with request_count_lock: + request_count += 1 + except requests.exceptions.RequestException: + print("Request timed out or failed") + +try: + with concurrent.futures.ThreadPoolExecutor(max_workers=max_concurrent_requests) as executor: + while True: + stop_event.clear() + request_count = 0 + start_time = time.time() + + futures = [] + while time.time() - start_time < interval: + futures.append(executor.submit(make_request, url)) + time.sleep(0.1) # Small sleep to prevent CPU overuse + + stop_event.set() # Signal threads to stop + + # Cancel any pending futures + for future in futures: + future.cancel() + + # Wait for all futures to complete or be cancelled + concurrent.futures.wait(futures, timeout=0.5) + + results.append(request_count) + print(f"Completed {request_count} requests in {interval} seconds") + + print(f"Pausing for {pause_time} seconds...") + time.sleep(pause_time) + +except KeyboardInterrupt: + print("Interrupt received. Saving graph...") + save_graph() \ No newline at end of file diff --git a/seeker/snippet/timer.py b/seeker/snippet/timer.py deleted file mode 100644 index 517df1cd..00000000 --- a/seeker/snippet/timer.py +++ /dev/null @@ -1,12 +0,0 @@ -#date: 2024-07-12T16:54:46Z -#url: https://api.github.com/gists/a89faf26652ccf9a10933383ef29c5ea -#owner: https://api.github.com/users/richard-to - -@contextmanager -def timer(name): - """Context manager for timing code blocks with custom name.""" - start_time = time.time() - yield - end_time = time.time() - elapsed_time = (end_time - start_time) * 1000 - print(f"{name} took {elapsed_time:.2f} ms") diff --git a/seeker/snippet/us_inflation.py b/seeker/snippet/us_inflation.py deleted file mode 100644 index 557d3b22..00000000 --- a/seeker/snippet/us_inflation.py +++ /dev/null @@ -1,28 +0,0 @@ -#date: 2024-07-11T17:01:03Z -#url: https://api.github.com/gists/d6c9c84f55fbc21fa8edcba7d56e0a88 -#owner: https://api.github.com/users/farzonl - -import matplotlib.pyplot as plt - -# Data for inflation rates by month for each year -months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] - -inflation_2024 = [3.1, 3.2, 3.5, 3.4, 3.3, 3.0] -inflation_2023 = [6.4, 6.0, 5.0, 4.9, 4.0, 3.0, 3.2, 3.7, 3.7, 3.2, 3.1, 3.4] -inflation_2022 = [7.5, 7.9, 8.5, 8.3, 8.6, 9.1, 8.5, 8.3, 8.2, 7.7, 7.1, 6.5] -inflation_2021 = [1.4, 1.7, 2.6, 4.2, 5.0, 5.4, 5.4, 5.3, 5.4, 6.2, 6.8, 7.0] - -# Plotting the data -plt.figure(figsize=(12, 6)) - -plt.plot(months[:len(inflation_2024)], inflation_2024, marker='o', label='2024') -plt.plot(months, inflation_2023, marker='o', label='2023') -plt.plot(months, inflation_2022, marker='o', label='2022') -plt.plot(months, inflation_2021, marker='o', label='2021') - -plt.xlabel('Month') -plt.ylabel('Inflation Rate (%)') -plt.title('Monthly Inflation Rate (2021-2024)') -plt.legend() -plt.grid(True) -plt.show() diff --git a/seeker/snippet/us_unemployment.py b/seeker/snippet/us_unemployment.py deleted file mode 100644 index 92c0508f..00000000 --- a/seeker/snippet/us_unemployment.py +++ /dev/null @@ -1,30 +0,0 @@ -#date: 2024-07-11T17:11:02Z -#url: https://api.github.com/gists/cbe211b67ecf6198e06b188df64867d7 -#owner: https://api.github.com/users/farzonl - -import matplotlib.pyplot as plt - -# Data for unemployment rates by month for each year -months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] - -unemployment_2020 = [3.6, 3.5, 4.4, 14.8, 13.2, 11.0, 10.2, 8.4, 7.8, 6.8, 6.7, 6.7] -unemployment_2021 = [6.4, 6.2, 6.1, 6.1, 5.8, 5.9, 5.4, 5.1, 4.7, 4.5, 4.1, 3.9] -unemployment_2022 = [4.0, 3.8, 3.6, 3.7, 3.6, 3.6, 3.5, 3.6, 3.5, 3.6, 3.6, 3.5] -unemployment_2023 = [3.4, 3.6, 3.5, 3.4, 3.7, 3.6, 3.5, 3.8, 3.8, 3.8, 3.7, 3.7] -unemployment_2024 = [3.7, 3.9, 3.8, 3.9, 4.0, 4.1] - -# Plotting the data -plt.figure(figsize=(12, 6)) - -plt.plot(months, unemployment_2020, marker='o', label='2020') -plt.plot(months, unemployment_2021, marker='o', label='2021') -plt.plot(months, unemployment_2022, marker='o', label='2022') -plt.plot(months, unemployment_2023, marker='o', label='2023') -plt.plot(months[:len(unemployment_2024)], unemployment_2024, marker='o', label='2024') - -plt.xlabel('Month') -plt.ylabel('Unemployment Rate (%)') -plt.title('Monthly Unemployment Rate (2020-2024)') -plt.legend() -plt.grid(True) -plt.show() diff --git a/seeker/snippet/vxlan_hp.py b/seeker/snippet/vxlan_hp.py deleted file mode 100644 index dc49db77..00000000 --- a/seeker/snippet/vxlan_hp.py +++ /dev/null @@ -1,24 +0,0 @@ -#date: 2024-07-12T16:45:01Z -#url: https://api.github.com/gists/2f1f066a9476f237e0a2608acd807d1d -#owner: https://api.github.com/users/kaowul - -import bcc -import time -from pyroute2 import IPRoute, NetNS, IPDB, NSPopen - -b = bcc.BPF(src_file="vxlan_hp.c", debug=0) -fin = b.load_func("handle_ingress", bcc.BPF.SCHED_CLS) -fout = b.load_func("handle_egress", bcc.BPF.SCHED_CLS) - -ipr = IPRoute() -ipdb = IPDB(nl=ipr) - -ifc = ipdb.interfaces.enp0s8 - -ipr.tc("add", "ingress", ifc.index, "ffff:") -ipr.tc("add-filter", "bpf", ifc.index, ":1", fd=fin.fd, name=fin.name, parent="ffff:", action="ok", classid=1) -ipr.tc("add", "sfq", ifc.index, "1:") -ipr.tc("add-filter", "bpf", ifc.index, ":1", fd=fout.fd, name=fout.name, parent="1:", action="ok", classid=1) - -while True: - time.sleep(5)