diff --git a/seeker/report.txt b/seeker/report.txt index b5dcc527..e2458143 100644 --- a/seeker/report.txt +++ b/seeker/report.txt @@ -1,3 +1,39 @@ +-------------------------------------------------------------------------------- + 2024-09-20 17:12:11.098626 +-------------------------------------------------------------------------------- + On branch main +Your branch is up to date with 'origin/main'. + +Changes not staged for commit: + (use "git add/rm ..." to update what will be committed) + (use "git restore ..." to discard changes in working directory) + deleted: snippet/Insert Shapes in Word using Java.java + deleted: snippet/angular.Dockerfile + deleted: snippet/aws_enum.sh + deleted: snippet/check-artifact-registry-batch-deletions.py + deleted: snippet/install-awscli.sh + deleted: snippet/kafka-move-leadership.sh + deleted: snippet/main.py + deleted: snippet/spring.Dockerfile + +Untracked files: + (use "git add ..." to include in what will be committed) + snippet/LambdaBenchmarkCase.java + snippet/clone.sh + snippet/demo.py + snippet/fix-json.py + snippet/list-delete-assets.sh + snippet/list-delete-index.sh + snippet/mario_cube_dsi_ware_dumper.py + snippet/md-pdf.sh + snippet/migrate-local-dev.sh + snippet/pgmm_decrypt.py + snippet/pyunit_test_pyppeteer_browser_session.py + snippet/ravaen_normalisation_manual_extract.py + snippet/unifi.sh + +no changes added to commit (use "git add" and/or "git commit -a") + -------------------------------------------------------------------------------- 2024-09-19 17:12:45.215182 -------------------------------------------------------------------------------- diff --git a/seeker/snippet/Insert Shapes in Word using Java.java b/seeker/snippet/Insert Shapes in Word using Java.java deleted file mode 100644 index d8c50215..00000000 --- a/seeker/snippet/Insert Shapes in Word using Java.java +++ /dev/null @@ -1,37 +0,0 @@ -//date: 2024-09-18T16:54:15Z -//url: https://api.github.com/gists/133d9815f8bd9b36601b1925952eeb4c -//owner: https://api.github.com/users/aspose-com-kb - -import com.aspose.words.*; - -public class Main -{ - public static void main(String[] args) throws Exception // Adding shapes in Java - { - // Set the licenses - new License().setLicense("License.lic"); - - Document doc = new Document(); - DocumentBuilder builder = new DocumentBuilder(doc); - - //Inline shape - Shape shape = builder.insertShape(ShapeType.LINE, 200, 200); - shape.setRotation(35.0); - - //Free-floating shape - shape = builder.insertShape - ( ShapeType.ARROW,RelativeHorizontalPosition.PAGE,250, - RelativeVerticalPosition.PAGE,150,150,150,WrapType.INLINE); - shape.setRotation(40.0); - builder.writeln(); - OoxmlSaveOptions saveOptions = new OoxmlSaveOptions(SaveFormat.DOCX); - - // Save shapes as DML - saveOptions.setCompliance(OoxmlCompliance.ISO_29500_2008_TRANSITIONAL); - - // Save the document - doc.save("output.docx", saveOptions); - - System.out.println("Shapes added successfully"); - } -} \ No newline at end of file diff --git a/seeker/snippet/LambdaBenchmarkCase.java b/seeker/snippet/LambdaBenchmarkCase.java new file mode 100644 index 00000000..a40de9f0 --- /dev/null +++ b/seeker/snippet/LambdaBenchmarkCase.java @@ -0,0 +1,166 @@ +//date: 2024-09-20T16:55:22Z +//url: https://api.github.com/gists/088b55f552353f71e71a5e34f6dfdef3 +//owner: https://api.github.com/users/dreamlike-ocean + +package io.github.dreamlike.stableValue.Benchmark; + +import org.openjdk.jmh.annotations.*; +import org.openjdk.jmh.infra.Blackhole; + +import java.lang.classfile.AccessFlags; +import java.lang.classfile.ClassFile; +import java.lang.classfile.TypeKind; +import java.lang.constant.ClassDesc; +import java.lang.constant.ConstantDescs; +import java.lang.constant.DynamicCallSiteDesc; +import java.lang.constant.MethodTypeDesc; +import java.lang.invoke.*; +import java.lang.reflect.AccessFlag; +import java.lang.reflect.Method; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +import static java.lang.constant.ConstantDescs.*; + +@State(Scope.Benchmark) +@Warmup(iterations = 5, time = 200, timeUnit = TimeUnit.MILLISECONDS) +@BenchmarkMode(Mode.Throughput) +@Threads(value = 5) +@Measurement(iterations = 2, time = 200, timeUnit = TimeUnit.MILLISECONDS) +public class LambdaBenchmarkCase { +//Benchmark Mode Cnt Score Error Units +//stableValue.Benchmark.LambdaBenchmarkCase.testCallSite thrpt 10 24255154361.137 ± 1457667464.139 ops/s +//stableValue.Benchmark.LambdaBenchmarkCase.testConst thrpt 10 24519036075.193 ± 928404771.651 ops/s +//stableValue.Benchmark.LambdaBenchmarkCase.testDirect thrpt 10 24727533894.822 ± 592544888.327 ops/s +//stableValue.Benchmark.LambdaBenchmarkCase.testLazy thrpt 10 2835881869.811 ± 213543161.668 ops/s +//stableValue.Benchmark.LambdaBenchmarkCase.testLmf thrpt 10 13112492685.481 ± 1007502930.123 ops/s +//stableValue.Benchmark.LambdaBenchmarkCase.testLmfInvokeDynamic thrpt 10 13102632461.980 ± 564969148.037 ops/s +//stableValue.Benchmark.LambdaBenchmarkCase.testReflect thrpt 10 10570293119.587 ± 214468018.588 ops/s +//stableValue.Benchmark.LambdaBenchmarkCase.testReflectLazy thrpt 10 1473938487.388 ± 22329375.514 ops/s + private final static MethodHandle ADD_MH; + private final static Method METHOD; + private static final MutableCallSite callSite = new MutableCallSite( + MethodType.methodType(int.class, int.class, int.class) + ); + private static final MethodHandle callSiteInvoke = callSite.dynamicInvoker(); + + static { + try { + ADD_MH = MethodHandles.lookup().findStatic(Math.class, "addExact", MethodType.methodType(int.class, int.class, int.class)); + METHOD = Math.class.getMethod("addExact", int.class, int.class); + } catch (NoSuchMethodException | IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + private final Method LazyMethod; + + private final MethodHandle ADD_LAZY_MH; + + private final Add add; + + private final Add addInvokeDynamic; + + public LambdaBenchmarkCase() { + callSite.setTarget(ADD_MH); + ADD_LAZY_MH = ADD_MH; + LazyMethod = METHOD; + + try { + add = (Add) LambdaMetafactory.metafactory( + MethodHandles.lookup(), + "apply", + MethodType.methodType(Add.class), + MethodType.methodType(int.class, int.class, int.class), + ADD_MH, + ADD_MH.type() + ).getTarget().invokeExact(); + + addInvokeDynamic = generate(() -> ADD_MH); + } catch (Throwable e) { + throw new RuntimeException(e); + } + VarHandle.storeStoreFence(); + } + + public static Add generate(Supplier supplier) throws IllegalAccessException, InstantiationException { + byte[] classByteCode = ClassFile.of() + .build(ClassDesc.of(LambdaBenchmarkCase.class.getName() + "AddImpl"), cb -> { + cb.withInterfaceSymbols(Add.class.describeConstable().get()); + cb.withMethodBody(ConstantDescs.INIT_NAME, ConstantDescs.MTD_void, AccessFlags.ofMethod(AccessFlag.PUBLIC).flagsMask(), it -> { + it.aload(0); + it.invokespecial(CD_Object, INIT_NAME, MTD_void); + it.return_(); + }); + cb.withMethodBody("apply", + MethodTypeDesc.of(CD_int, CD_int, CD_int), + AccessFlags.ofMethod(AccessFlag.PUBLIC, AccessFlag.SYNTHETIC).flagsMask(), + it -> { + it.iload(1); + it.iload(2); + it.invokeDynamicInstruction( + DynamicCallSiteDesc.of( + ConstantDescs.ofCallsiteBootstrap(LambdaBenchmarkCase.class.describeConstable().get(), "indyLambdaFactory", ConstantDescs.CD_CallSite), + "apply", + MethodTypeDesc.of(CD_int, CD_int, CD_int) + ) + ); + it.returnInstruction(TypeKind.IntType); + }); + }); + + MethodHandles.Lookup lookup = MethodHandles.lookup() + .defineHiddenClassWithClassData(classByteCode, supplier, true); + + return (Add) lookup.lookupClass().newInstance(); + } + + public static CallSite indyLambdaFactory(MethodHandles.Lookup lookup, String name, MethodType type) throws NoSuchFieldException, IllegalAccessException { + MethodHandle methodHandle = ((Supplier) MethodHandles.classData(lookup, ConstantDescs.DEFAULT_NAME, Supplier.class)).get(); + return new ConstantCallSite(methodHandle); + } + + @Benchmark + public void testDirect(Blackhole bh) { + bh.consume(Math.addExact(1, 2)); + } + + @Benchmark + public void testLazy(Blackhole bh) throws Throwable { + bh.consume((int) ADD_LAZY_MH.invokeExact(1, 2)); + } + + @Benchmark + public void testCallSite(Blackhole bh) throws Throwable { + bh.consume((int) callSiteInvoke.invokeExact(1, 2)); + } + + @Benchmark + public void testConst(Blackhole bh) throws Throwable { + bh.consume((int) ADD_MH.invokeExact(1, 2)); + } + + @Benchmark + public void testReflect(Blackhole bh) throws Throwable { + bh.consume((int) METHOD.invoke(null, 1, 2)); + } + + @Benchmark + public void testReflectLazy(Blackhole bh) throws Throwable { + bh.consume((int) LazyMethod.invoke(null, 1, 2)); + } + + @Benchmark + public void testLmf(Blackhole bh) throws Throwable { + bh.consume(add.apply(1, 2)); + } + + @Benchmark + public void testLmfInvokeDynamic(Blackhole bh) throws Throwable { + bh.consume(addInvokeDynamic.apply(1, 2)); + } + + interface Add { + int apply(int a, int b); + } +} diff --git a/seeker/snippet/angular.Dockerfile b/seeker/snippet/angular.Dockerfile deleted file mode 100644 index 009c9158..00000000 --- a/seeker/snippet/angular.Dockerfile +++ /dev/null @@ -1,29 +0,0 @@ -#date: 2024-09-18T17:05:04Z -#url: https://api.github.com/gists/124d04536cdeca0cc709c6b43ffd9871 -#owner: https://api.github.com/users/Riko07br - -# Build angular------------------ -FROM node:lts-alpine3.20 as build - -RUN npm install -g @angular/cli - -WORKDIR /app - -COPY package.json . - -RUN npm install - -COPY . . - -RUN npm run build - -# NGINX runner--------------- -FROM nginx:1.21-alpine - -COPY --from=build /app/dist/frontend/browser /usr/share/nginx/html - -COPY ./nginx.conf /etc/nginx/conf.d/default.conf - -EXPOSE 80 4200 - -CMD ["nginx", "-g", "daemon off;"] \ No newline at end of file diff --git a/seeker/snippet/aws_enum.sh b/seeker/snippet/aws_enum.sh deleted file mode 100644 index dfad1da4..00000000 --- a/seeker/snippet/aws_enum.sh +++ /dev/null @@ -1,61 +0,0 @@ -#date: 2024-09-18T16:53:38Z -#url: https://api.github.com/gists/04d0e48c523b9b73b2f1d14d81e2ba3f -#owner: https://api.github.com/users/h00die - -EC2_TOKEN=$(curl -X PUT "http: "**********": 21600" 2>/dev/null || wget -q -O - --method PUT "http://169.254.169.254/latest/api/token" --header "X-aws-ec2-metadata-token-ttl-seconds: 21600" 2>/dev/null) -HEADER="X-aws-ec2-metadata-token: "**********" -URL="http://169.254.169.254/latest/meta-data" - -aws_req="" -if [ "$(command -v curl)" ]; then - aws_req="curl -s -f -H '$HEADER'" -elif [ "$(command -v wget)" ]; then - aws_req="wget -q -O - -H '$HEADER'" -else - echo "Neither curl nor wget were found, I can't enumerate the metadata service :(" -fi - -printf "ami-id: "; eval $aws_req "$URL/ami-id"; echo "" -printf "instance-action: "; eval $aws_req "$URL/instance-action"; echo "" -printf "instance-id: "; eval $aws_req "$URL/instance-id"; echo "" -printf "instance-life-cycle: "; eval $aws_req "$URL/instance-life-cycle"; echo "" -printf "instance-type: "; eval $aws_req "$URL/instance-type"; echo "" -printf "region: "; eval $aws_req "$URL/placement/region"; echo "" - -echo "" -echo "Account Info" -eval $aws_req "$URL/identity-credentials/ec2/info"; echo "" -eval $aws_req "http://169.254.169.254/latest/dynamic/instance-identity/document"; echo "" - -echo "" -echo "Network Info" -for mac in $(eval $aws_req "$URL/network/interfaces/macs/" 2>/dev/null); do - echo "Mac: $mac" - printf "Owner ID: "; eval $aws_req "$URL/network/interfaces/macs/$mac/owner-id"; echo "" - printf "Public Hostname: "; eval $aws_req "$URL/network/interfaces/macs/$mac/public-hostname"; echo "" - printf "Security Groups: "; eval $aws_req "$URL/network/interfaces/macs/$mac/security-groups"; echo "" - echo "Private IPv4s:"; eval $aws_req "$URL/network/interfaces/macs/$mac/ipv4-associations/"; echo "" - printf "Subnet IPv4: "; eval $aws_req "$URL/network/interfaces/macs/$mac/subnet-ipv4-cidr-block"; echo "" - echo "PrivateIPv6s:"; eval $aws_req "$URL/network/interfaces/macs/$mac/ipv6s"; echo "" - printf "Subnet IPv6: "; eval $aws_req "$URL/network/interfaces/macs/$mac/subnet-ipv6-cidr-blocks"; echo "" - echo "Public IPv4s:"; eval $aws_req "$URL/network/interfaces/macs/$mac/public-ipv4s"; echo "" - echo "" -done - -echo "" -echo "IAM Role" -eval $aws_req "$URL/iam/info" -for role in $(eval $aws_req "$URL/iam/security-credentials/" 2>/dev/null); do - echo "Role: $role" - eval $aws_req "$URL/iam/security-credentials/$role"; echo "" - echo "" -done - -echo "" -echo "User Data" -# Search hardcoded credentials -eval $aws_req "http://169.254.169.254/latest/user-data" - -echo "" -echo "EC2 Security Credentials" -eval $aws_req "$URL/identity-credentials/ec2/security-credentials/ec2-instance"; echo ""ty-credentials/ec2/security-credentials/ec2-instance"; echo "" \ No newline at end of file diff --git a/seeker/snippet/check-artifact-registry-batch-deletions.py b/seeker/snippet/check-artifact-registry-batch-deletions.py deleted file mode 100644 index e995d435..00000000 --- a/seeker/snippet/check-artifact-registry-batch-deletions.py +++ /dev/null @@ -1,101 +0,0 @@ -#date: 2024-09-18T16:58:56Z -#url: https://api.github.com/gists/cc77d7c98cd4c0c02f40f0e777ec3c0c -#owner: https://api.github.com/users/philwhiteuk - -from collections import defaultdict -from datetime import date, datetime, timedelta, UTC -import os -import textwrap - -from dotenv import load_dotenv -from google.cloud import artifactregistry_v1 as artifactregistry, logging -import yaml - -load_dotenv() # take environment variables from .env. - -project_id = os.getenv('PROJECT_ID') -location = os.getenv('LOCATION') - -def get_log(log_date: date = date.today()): - log_file_path = os.path.join(os.path.dirname(__file__), '../var/log', f'{log_date.isoformat()}.yaml') - - if not os.path.exists(log_file_path): - print(f'Fetching log for {log_date.isoformat()}...') - logging_client = logging.Client(project=project_id) - log_query = f''' - protoPayload.methodName="google.devtools.artifactregistry.v1.ArtifactRegistry.BatchDeleteVersions" - AND protoPayload.authenticationInfo.principalEmail:"@gcp-sa-artifactregistry.iam.gserviceaccount.com" - AND protoPayload.request.parent:"projects/{project_id}/locations/{location}" - AND protoPayload.request.validateOnly=true - AND protoPayload.serviceName="artifactregistry.googleapis.com" - AND - (timestamp > {log_date.isoformat()} AND timestamp <= {(log_date + timedelta(days=1)).isoformat()}) - ''' - with open(log_file_path, 'a') as f: - for entry in logging_client.list_entries(filter_=log_query): - f.write('---\n') # Separate documents with '---' - yaml.dump(entry.to_api_repr(), f) # Write entry to file - - with open(log_file_path, 'r') as f: - yaml_docs = yaml.safe_load_all(f) - return list(yaml_docs) - -def get_current_inventory(repository: str, package: str): - inventory_path = os.path.join(os.path.dirname(__file__), '../var/inventory', f'{date.today().isoformat()}-{repository}-{package}.yaml') - - if not os.path.exists(inventory_path): - print(f'Fetching most recent inventory for {repository}: {package}...') - artifactregistry_client = artifactregistry.ArtifactRegistryClient() - parent = f'projects/{project_id}/locations/{location}/repositories/{repository}/packages/{package}' - request = artifactregistry.ListVersionsRequest(parent=parent) - with open(inventory_path, 'a') as f: - for entry in artifactregistry_client.list_versions(request=request): - f.write('---\n') # Separate documents with '---' - yaml.dump({'name': entry.name, 'create_time': entry.create_time.isoformat(), 'update_time': entry.update_time.isoformat()}, f) # Write entry to file - - with open(inventory_path, 'r') as f: - yaml_docs = yaml.safe_load_all(f) - return list(yaml_docs) - -def main(): - today = date.today() - print(f'Checking log for {today.isoformat()}...') - log_entries = get_log(log_date=today) - - versions_marked_for_deletion = set() - for log_entry in log_entries: - if log_entry['protoPayload']['request']['names']: - versions_marked_for_deletion.update(log_entry['protoPayload']['request']['names']) - - versions_by_package = defaultdict(list) - for tag in versions_marked_for_deletion: - repository = tag.split('/')[5] - package = tag.split('/')[7] - versions_by_package[f'{repository}/{package}'].append(tag) - - now = datetime.now(UTC) - for package, versions_marked_for_deletion in versions_by_package.items(): - repository = package.split('/')[0] - package = package.split('/')[1] - - all_versions = get_current_inventory(repository=repository, package=package) - assert len(versions_marked_for_deletion) <= len(all_versions) - 100, f'{repository}/{package} is keeping fewer than the minimum 100 versions!' - summary = f''' - Resource: projects/{project_id}/locations/{location}/repositories/{repository}/package/{package} - Total files: {len(all_versions)} - Marked for deletion: {len(versions_marked_for_deletion)} - ''' - print(textwrap.dedent(summary)) - - inventory_lookup = dict() - for item in all_versions: - inventory_lookup.update([{ item['name'], datetime.fromisoformat(item['update_time']) }]) - - tag_counter = 0 - for tag in versions_marked_for_deletion: - if tag in inventory_lookup: - timedelta = now - inventory_lookup[tag] - assert timedelta.days >= 5, f'Version {tag} is newer than 5 days!' - print(f'- ✅ {tag.split('/')[-1]} {timedelta.days} days old') - tag_counter += 1 - diff --git a/seeker/snippet/clone.sh b/seeker/snippet/clone.sh new file mode 100644 index 00000000..3de4fcd5 --- /dev/null +++ b/seeker/snippet/clone.sh @@ -0,0 +1,70 @@ +#date: 2024-09-20T17:10:46Z +#url: https://api.github.com/gists/bb0499e1272f312b82497031d28e91f2 +#owner: https://api.github.com/users/alifeee + +#!/bin/bash +# quickly clone a GitHub repository +# 1. take user input of a GitHub repository +# 2. attempt to pattern match to an actual repository +# 3. attempt to clone it +# 4. open in file explorer or code editor +# made by alifeee +# version 0.1.0 + +BASE="/home/alifeee/git" + +echo "will clone to ${BASE}" +read -p "repository: " SSH_LOC + +echo " input: ${SSH_LOC}" + +# e.g., git@github.com:alifeee/blog.git +PAT_FULL="^git@github\.com:([^\/]*)\/([^\/]*).git$" +# e.g., git@github.com:alifeee/blog +PAT_WITHOUT_EXTENSION="^git@github\.com:([^\/]*)\/([^\/]*)$" +# e.g., alifeee/blog +PAT_OWNER_REPO="^([^\/]*)\/([^\/]*)$" +# e.g., blog (only works when specifying default github account) +PAT_REPO="^([^\/]*)$" +DEFAULT_GITHUB_ACCOUNT="alifeee" + +if [[ "${SSH_LOC}" =~ $PAT_FULL ]]; then + echo " match git@github.com:name/repo.git, cloning as-is" + cloneme="${SSH_LOC}" +elif [[ "${SSH_LOC}" =~ $PAT_WITHOUT_EXTENSION ]]; then + echo " match git@github.com:name/repo, adding .git" + cloneme="${SSH_LOC}.git" +elif [[ "${SSH_LOC}" =~ $PAT_OWNER_REPO ]]; then + echo " match name/repo, adding github and .git" + cloneme="git@github.com:${SSH_LOC}.git" +elif [[ "${SSH_LOC}" =~ $PAT_REPO ]]; then + echo " match repo, attempting alifeee" + cloneme="git@github.com:${DEFAULT_GITHUB_ACCOUNT}/${SSH_LOC}.git" +else + read -s -n1 -p "no match type found :(" + exit 1 +fi + +echo " attempting to clone ${cloneme}" + +(cd "${BASE}"; git clone "${cloneme}") + +folder=$(echo "${cloneme}" | pcregrep -o2 "${PAT_FULL}") +fullpath="${BASE}/${folder}" + +if [ ! -d "${fullpath}" ]; then + read -s -n1 -p " no cloned folder found. curious... press ENTER to exit" + exit 1 +else + openme="${BASE}/${folder}" +fi + +echo "cloned to ${openme}. what now?" +read -p "ENTER to explore, \"code\" to open in VSCodium: " ACTION + +if [ "${ACTION}" == "code" ]; then + codium "${openme}" +else + # open in explorer + xdg-open "${openme}" +fi \ No newline at end of file diff --git a/seeker/snippet/demo.py b/seeker/snippet/demo.py new file mode 100644 index 00000000..622cc741 --- /dev/null +++ b/seeker/snippet/demo.py @@ -0,0 +1,91 @@ +#date: 2024-09-20T16:46:18Z +#url: https://api.github.com/gists/a12cf70804125557a0821cca20be746e +#owner: https://api.github.com/users/Rajchowdhury420 + +import boto3 +from rich.table import Table +from rich.console import Console + +# Initialize boto3 clients +cloudformation = boto3.client('cloudformation') +console = Console() + +def list_stacksets(): + """ + List all StackSets in the account. + """ + paginator = cloudformation.get_paginator('list_stack_sets') + stacksets = [] + + for page in paginator.paginate(): + stacksets.extend(page['Summaries']) + + return stacksets + +def detect_stackset_drift(stackset_name): + """ + Detect drift for a specific StackSet. + """ + try: + response = cloudformation.detect_stack_set_drift(StackSetName=stackset_name) + return response['StackSetDriftDetectionId'] + except Exception as e: + console.print(f"Error detecting drift for {stackset_name}: {e}") + return None + +def describe_stackset_drift_detection(drift_detection_id): + """ + Describe the drift detection results for a StackSet. + """ + try: + response = cloudformation.describe_stack_set_drift_detection_status( + StackSetDriftDetectionId=drift_detection_id + ) + return response + except Exception as e: + console.print(f"Error describing drift detection: {e}") + return None + +def detect_drift_in_all_stacksets(): + """ + Detect drift for all StackSets and display in a formatted table. + """ + stacksets = list_stacksets() + + if not stacksets: + console.print("No StackSets found in the account.") + return + + table = Table(title="StackSet Drift Status") + + # Add columns to the table + table.add_column("StackSet Name", justify="left", style="cyan", no_wrap=True) + table.add_column("Drift Detection ID", justify="left", style="yellow") + table.add_column("Drift Status", justify="left", style="green") + table.add_column("Detection Status", justify="left", style="magenta") + table.add_column("Drifted Stacks", justify="right", style="red") + + # Iterate over stacksets to detect drift + for stackset in stacksets: + stackset_name = stackset['StackSetName'] + drift_detection_id = detect_stackset_drift(stackset_name) + + if drift_detection_id: + drift_status = describe_stackset_drift_detection(drift_detection_id) + + # Add stackset details to the table + table.add_row( + stackset_name, + drift_detection_id, + drift_status['StackSetDriftStatus'], + drift_status['DetectionStatus'], + str(drift_status['DriftedStackInstancesCount']) + ) + else: + table.add_row(stackset_name, "N/A", "N/A", "N/A", "N/A") + + # Print the table using rich + console.print(table) + +if __name__ == "__main__": + detect_drift_in_all_stacksets() \ No newline at end of file diff --git a/seeker/snippet/fix-json.py b/seeker/snippet/fix-json.py new file mode 100644 index 00000000..557d811a --- /dev/null +++ b/seeker/snippet/fix-json.py @@ -0,0 +1,40 @@ +#date: 2024-09-20T16:38:20Z +#url: https://api.github.com/gists/246d7928b2fc26821db582be583d8b7a +#owner: https://api.github.com/users/tkellogg + +import re + +# Remove leading and trailing text, leaving just the JSON +_JSON_TRIM_PATTERN = re.compile( + r"^" # Start of string + r"[^{\[]*" # Drop everything up to { or [ + r"([{\[].*[}\]])" # Keep the JSON + # Greedy match here should force it to not consume JSON + r"[^}\]]*" # Drop everything after } or ] + r"$", # End of string + re.DOTALL, +) + +# Remove invalid escape sequences +# This is bc mixtral seems to be an idiot, and why prompt better when you can just +# fix it. +_JSON_INVALID_ESCAPE_PATTERN = re.compile( + r"\\([^bfrntu\\/\"])" +) + +def make_json_safer(): + """ + Mock out current JSON prep/parsing functionality. Call this once at startup (e.g. after imports) + """ + orig_fn = dspy.functional._unwrap_json + def _safer_unwrap_json(output, from_json: Callable[[str], Union[pydantic.BaseModel, str]]): + output = _JSON_TRIM_PATTERN.sub("\\1", output) + output = _JSON_INVALID_ESCAPE_PATTERN.sub("\\1", output) + try: + return orig_fn(output, from_json) + except: + logger.debug(output) + raise + + if dspy.functional._unwrap_json != _safer_unwrap_json: + dspy.functional._unwrap_json = _safer_unwrap_json \ No newline at end of file diff --git a/seeker/snippet/install-awscli.sh b/seeker/snippet/install-awscli.sh deleted file mode 100644 index 73241aac..00000000 --- a/seeker/snippet/install-awscli.sh +++ /dev/null @@ -1,33 +0,0 @@ -#date: 2024-09-18T17:00:55Z -#url: https://api.github.com/gists/b7c4ed2118fe9954b85eda3d411150fb -#owner: https://api.github.com/users/salutgeek - -#!/usr/bin/env bash -# see: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-source-install.html -# This script will install aws-cli libraries to /usr/local/lib/aws-cli/ -# This script will install aws-cli executable to /usr/local/bin/ - -set -e -WORK_DIR=$(mktemp -d) - -# download source package and un-tar -curl -fsSL https://awscli.amazonaws.com/awscli.tar.gz | \ - tar -xz --strip-components=1 - -C "$WORK_DIR" - -# cleanup -trap "sudo rm -rf "$WORK_DIR"" EXIT - -pushd "$WORK_DIR" - -# remove existing installed aws-cli -sudo rm -rf /usr/local/lib/aws-cli - -# configure deps -./configure --with-download-deps - -# install -make -sudo make install -popd - -aws --version \ No newline at end of file diff --git a/seeker/snippet/kafka-move-leadership.sh b/seeker/snippet/kafka-move-leadership.sh deleted file mode 100644 index 372d72f8..00000000 --- a/seeker/snippet/kafka-move-leadership.sh +++ /dev/null @@ -1,323 +0,0 @@ -#date: 2024-09-18T16:49:56Z -#url: https://api.github.com/gists/97732ef53ee3c2f6ea421db8d22f85a2 -#owner: https://api.github.com/users/agrezende - -#!/usr/bin/env bash -# -# File: kafka-move-leadership.sh -# -# Description -# =========== -# -# Generates a Kafka partition reassignment JSON snippet to STDOUT to move the leadership -# of any replicas away from the provided "source" broker to different, randomly selected -# "target"brokers. Run this script with `-h` to show detailed usage instructions. -# -# -# Requirements -# ============ -# - Kafka 0.8.1.1 (later versions may work, too) -# -# -# Usage -# ===== -# -# To show usage instructions run this script with `-h` or `--help`. -# -# -# Full workflow -# ============= -# -# High-level overview -# ------------------- -# -# 1. Use this script to generate a partition reassignment JSON file. -# 2. Start the actual reassignment operation via Kafka's `kafka-reassign-partitions.sh` script and this JSON file. -# 3. Monitor the progress of the reassignment operation with Kafka's `kafka-reassign-partitions.sh` script. -# -# Example -# ------- -# -# Step 1 (generate reassignment JSON): -# -# $ kafka-move-leadership.sh --broker-id 4 --first-broker-id 0 --last-broker-id 8 --zookeeper zookeeper1:2181 > partitions-to-move-4.json -# -# Step 2 (start reassignment process): -# -# $ kafka-reassign-partitions.sh --zookeeper zookeeper1:2181 --reassignment-json-file partitions-to-move-4 --execute -# -# Step 3 (monitor progress of reassignment process): -# -# $ kafka-reassign-partitions.sh --zookeeper zookeeper1:2181 --reassignment-json-file partitions-to-move-4 --verify - - -declare -r MYSELF=`basename $0` - -print_usage() { - echo "$MYSELF - generates a Kafka partition reassignment JSON snippet to move partition leadership away from a broker (details below)" - echo - echo "Usage: $MYSELF [OPTION]..." - echo - echo " --broker-id Move leadership of all replicas, if any, from this broker" - echo " to different, randomly selected brokers. Example: 4" - echo " --first-broker-id First (= lowest) Kafka broker ID in the cluster. Used as" - echo " the start index for the range of broker IDs from which" - echo " replacement brokers will be randomly selected. Example: 0" - echo " --last-broker-id Last (= highest) Kafka broker ID in the cluster. Used as" - echo " the end index for the range of broker IDs from which" - echo " replacement brokers will be randomly selected. Example: 8" - echo " --zookeeper Comma-separated list of ZK servers with which the brokers" - echo " are registered. Example: zookeeper1:2181,zookeeper2:2181" - echo " -h, --help Print this help message and exit." - echo - echo "Example" - echo "-------" - echo - echo "The following example moves leadership from broker with ID 4 to brokers randomly selected from" - echo "the ID range 0,1,2,3,4,5,6,7,8 (though 4 itself will be excluded from the range automatically):" - echo - echo " $ $MYSELF --broker-id 4 --first-broker-id 0 --last-broker-id 8 --zookeeper zookeeper1:2181" - echo - echo "Use cases include:" - echo "------------------" - echo " 1. Safely restarting a broker while minimizing risk of data loss." - echo " 2. Replacing a broker." - echo " 3. Preparing a broker for maintenance." - echo - echo "Detailed description" - echo "--------------------" - echo "Generates a Kafka partition reassignment JSON snippet to STDOUT" - echo "to move the leadership of any replicas from the provided broker ID to" - echo "different, randomly selected broker IDs." - echo - echo "This JSON snippet can be saved to a file and then be used as an argument for:" - echo - echo " $ kafka-reassign-partitions.sh --reassignment-json-file my.json" - echo - echo "Further information" - echo "-------------------" - echo "- http://kafka.apache.org/documentation.html#basic_ops_cluster_expansion" - echo "- https://cwiki.apache.org/confluence/display/KAFKA/Replication+tools#Replicationtools-6.ReassignPartitionsTool" -} - -if [[ $# -eq 0 ]]; then - print_usage - exit 97 -fi - -while [[ $# -gt 0 ]]; do - case "$1" in - --broker-id) - shift - declare -r BROKER="$1" - shift - ;; - --zookeeper) - shift - declare -r ZOOKEEPER_CONNECT="$1" - shift - ;; - --first-broker-id) - shift - declare -r KAFKA_FIRST_BROKER_ID="$1" - shift - ;; - --last-broker-id) - shift - declare -r KAFKA_LAST_BROKER_ID="$1" - shift - ;; - -h|--help) - print_usage - exit 98 - ;; - *) - echo "ERROR: Unexpected option ${1}" - echo - print_usage - exit 99 - ;; - esac -done - - -# Input validation -if [ -z "$BROKER" ]; then - echo "ERROR: You must set the parameter --broker-id" - exit 80 -fi - -if [ -z "$ZOOKEEPER_CONNECT" ]; then - echo "ERROR: You must set the parameter --zookeeper" - exit 81 -fi - -if [ -z "$KAFKA_FIRST_BROKER_ID" ]; then - echo "ERROR: You must set the parameter --first-broker-id" - exit 82 -fi - -if [ -z "$KAFKA_LAST_BROKER_ID" ]; then - echo "ERROR: You must set the parameter --last-broker-id" - exit 83 -fi - - -############################################################################### -### DEPENDENCIES -############################################################################### - -declare -r KAFKA_TOPICS_SCRIPT_NAME="kafka-topics.sh" -declare -r FALLBACK_PATH="/opt/kafka/bin" - -which "$KAFKA_TOPICS_SCRIPT_NAME" &>/dev/null -if [ $? -ne 0 ]; then - declare -r FALLBACK_BIN="$FALLBACK_PATH/$KAFKA_TOPICS_SCRIPT_NAME" - which "$FALLBACK_BIN" &>/dev/null - if [ $? -ne 0 ]; then - echo "ERROR: $KAFKA_TOPICS_SCRIPT_NAME (ships with Kafka) not found in PATH." - exit 70 - else - declare -r KAFKA_TOPICS_BIN="$FALLBACK_BIN" - fi -else - declare -r KAFKA_TOPICS_BIN="$KAFKA_TOPICS_SCRIPT_NAME" -fi - - -############################################################################### -### MISC CONFIGURATION - DO NOT TOUCH UNLESS YOU KNOW WHAT YOU ARE DOING -############################################################################### - -declare -r OLD_IFS="$IFS" - - -############################################################################### -### UTILITY FUNCTIONS -############################################################################### - -# Checks whether an array (first param) contains an element (second param). -# Returns 0 if the array contains the element, and 1 if it does not. -# -# Usage: array_contains myArray myElement -function array_contains { - local array="$1[@]" - local seeking=$2 - local in=1 - for element in "${!array}"; do - if [[ $element == $seeking ]]; then - in=0 - break - fi - done - return $in -} - -# Randomly selects a broker ID in the range specified by -# KAFKA_FIRST_BROKER_ID (including) and KAFKA_LAST_BROKER_ID (including). -# -# Usage: random_broker => may return e.g. "6" -function random_broker { - shuf -i ${KAFKA_FIRST_BROKER_ID}-${KAFKA_LAST_BROKER_ID} -n 1 -} - -# Randomly selects, from the list of available brokers (range specified by -# KAFKA_FIRST_BROKER_ID and KAFKA_LAST_BROKER_ID), a broker ID that is not -# already listed in the provided brokers (first param). -# -# Usage: other_broker "1,4,6" => may return e.g. "2" -# -# Note: Do NOT put spaces in the string. "1,2" is ok, "1, 2" is not. -function other_broker { - local brokers_string=$1 - IFS=$',' read -a brokers <<< "$brokers_string" - local new_broker=`random_broker` - while array_contains brokers $new_broker; do - new_broker=`random_broker` - done - echo $new_broker -} - -# Returns a list of broker IDs by removing the provided broker ID (second param) -# from the provided list of original broker IDs (first param). If the original -# broker list does not contain the provided broker, the list is returned as is. -# -# The list of broker IDs must be a comma-separated list of numbers, e.g. "1,2". -# -# Usage: all_but_broker "1,2,3" "3" => returns "1,2" -# -# Note: Do NOT put spaces in the string. "1,2" is ok, "1, 2" is not. -function all_but_broker { - local brokers_string=$1 - local broker=$2 - IFS=$',' read -a brokers <<< "$brokers_string" - local new_brokers="" - for curr_broker in "${brokers[@]}"; do - if [ "$curr_broker" != "$broker" ]; then - new_brokers="$new_brokers,$curr_broker" - fi - done - # Remove leading comma, if any. - new_brokers=${new_brokers#","} - echo $new_brokers -} - -# Returns a list of broker IDs based on a provided list of broker IDs (first -# param), where the provided broker ID (second param) is replaced by a -# randomly selected broker ID that is not already in the original list. -# -# Usage: replace_broker "1,2,3" "2" => may return e.g. "1,3,4" -# -# Note: Do NOT put spaces in the string. "1,2" is ok, "1, 2" is not. -function replace_broker { - local brokers_string=$1 - local broker=$2 - local remaining_brokers=`all_but_broker $brokers_string $broker` - local replacement_broker=`other_broker $brokers_string $broker` - new_brokers="$remaining_brokers,$replacement_broker" - # Remove leading comma, if any. - new_brokers=${new_brokers#","} - # Remove trailing comma, if any. - new_brokers=${new_brokers%","} - echo $new_brokers -} - - -############################################################################### -### MAIN -############################################################################### - -# "Header" of JSON file for Kafka partition reassignment -json="{\n" -json="$json \"partitions\": [\n" - -# Actual partition reassignments -for topicPartitionReplicas in `$KAFKA_TOPICS_BIN --zookeeper $ZOOKEEPER_CONNECT --describe | grep "Leader: $BROKER" | awk '{ print $2"#"$4"#"$8 }'`; do - # Note: We use '#' as field separator in awk (see above) and here - # because it is not a valid character for a Kafka topic name. - IFS=$'#' read -a array <<< "$topicPartitionReplicas" - topic="${array[0]}" # e.g. "zerg.hydra" - partition="${array[1]}" # e.g. "4" - replicas="${array[2]}" # e.g. "0,8" (= comma-separated list of broker IDs) - new_replicas=`replace_broker $replicas $BROKER` - json="$json {\"topic\": \"${topic}\", \"partition\": ${partition}, \"replicas\": [${new_replicas}] },\n" -done - -# Remove tailing comma, if any. -json=${json%",\n"} -json="${json}\n" - -# "Footer" of JSON file -json="$json ],\n" -json="$json \"version\": 1\n" -json="${json}}\n" - -# Print JSON to STDOUT -echo -e $json - - -############################################################################### -### CLEANUP -############################################################################### - -IFS="$OLD_IFS" \ No newline at end of file diff --git a/seeker/snippet/list-delete-assets.sh b/seeker/snippet/list-delete-assets.sh new file mode 100644 index 00000000..334ef355 --- /dev/null +++ b/seeker/snippet/list-delete-assets.sh @@ -0,0 +1,36 @@ +#date: 2024-09-20T17:10:32Z +#url: https://api.github.com/gists/3d3ceac4b690daf2ea8391b875e496b4 +#owner: https://api.github.com/users/duboc + +#!/bin/bash + +# Get the access token +ACCESS_TOKEN= "**********" + +# Define the base URL +BASE_URL="https://warehouse-visionai.googleapis.com/v1/projects/713488125678/locations/us-central1/corpora/4299188317952260006/assets" + +# List all assets +echo "Listing assets..." +curl -X GET \ + -H "Authorization: "**********" + "$BASE_URL" + +# Parse the JSON response to extract asset IDs +echo "Deleting assets..." +assets=$(curl -X GET \ + -H "Authorization: "**********" + "$BASE_URL" | jq -r '.assets[].name' | sed 's/.*\/assets\///') + +# Delete each asset +for asset_id in $assets; do + curl -X DELETE \ + -H "Authorization: "**********" + "$BASE_URL/$asset_id" +done + +echo "All assets deleted." +BASE_URL/$asset_id" +done + +echo "All assets deleted." diff --git a/seeker/snippet/list-delete-index.sh b/seeker/snippet/list-delete-index.sh new file mode 100644 index 00000000..a14a0c7d --- /dev/null +++ b/seeker/snippet/list-delete-index.sh @@ -0,0 +1,36 @@ +#date: 2024-09-20T17:08:58Z +#url: https://api.github.com/gists/ba6c3843858f022b739834dde1bf6e42 +#owner: https://api.github.com/users/duboc + +#!/bin/bash + +# Get the access token +ACCESS_TOKEN= "**********" + +# Define the base URL +BASE_URL="https://warehouse-visionai.googleapis.com/v1/projects/713488125678/locations/us-central1/corpora/4299188317952260006/indexes" + +# List all indexes +echo "Listing indexes..." +curl -X GET \ + -H "Authorization: "**********" + "$BASE_URL" + +# Parse the JSON response to extract index IDs +echo "Deleting indexes..." +indexes=$(curl -X GET \ + -H "Authorization: "**********" + "$BASE_URL" | jq -r '.indexes[].name' | sed 's/.*\/indexes\///') + +# Delete each index +for index_id in $indexes; do + curl -X DELETE \ + -H "Authorization: "**********" + "$BASE_URL/$index_id" +done + +echo "All indexes deleted." +ASE_URL/$index_id" +done + +echo "All indexes deleted." diff --git a/seeker/snippet/main.py b/seeker/snippet/main.py deleted file mode 100644 index 3bf676a9..00000000 --- a/seeker/snippet/main.py +++ /dev/null @@ -1,6 +0,0 @@ -#date: 2024-09-18T17:06:49Z -#url: https://api.github.com/gists/00ca864491f7851330dd40ea0b33db42 -#owner: https://api.github.com/users/mypy-play - -for i in range(1000): - print(i:int) \ No newline at end of file diff --git a/seeker/snippet/mario_cube_dsi_ware_dumper.py b/seeker/snippet/mario_cube_dsi_ware_dumper.py new file mode 100644 index 00000000..050df2e2 --- /dev/null +++ b/seeker/snippet/mario_cube_dsi_ware_dumper.py @@ -0,0 +1,47 @@ +#date: 2024-09-20T16:38:55Z +#url: https://api.github.com/gists/d362c8a0402344726cc92d3d4133d8e8 +#owner: https://api.github.com/users/p4p1 + +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Made by papi +# Created on: Fri 20 Sep 2024 04:36:32 PM IST +# mario_cube_dsi_ware_dumper.py +# Description: +# This python script will download all of the nds file present on the +# dsi ware section of the mario cube website. All credit goes to them this +# is just a little script to make downloading files there easier. + +import requests +from bs4 import BeautifulSoup + +URL="https://repo.mariocube.com/DSiWare/NDS/" + +def dl_file(url, file_name): + response = requests.get(url, stream=True) + if response.status_code == 200: + with open(file_name, 'wb') as file: + for chunck in response.iter_content(chunk_size=8192): + file.write(chunck) + return True + return False + +let = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z' ] + +for letter in let: + response = requests.get(URL + letter + "/") + + if response.status_code == 200: + soup = BeautifulSoup(response.text, 'html.parser') + links = soup.find_all('a', href=True) + href_links = [link['href'] for link in links] + i = 0 + + for l in href_links: + if i % 2 == 0: + i += 1 + continue + i += 1 + if "USA" in l: + if dl_file(URL + letter + "/" + l,l.replace('%20', ' ')): + print("done: %s" % l.replace('%20', ' ')) diff --git a/seeker/snippet/md-pdf.sh b/seeker/snippet/md-pdf.sh new file mode 100644 index 00000000..e0e3396d --- /dev/null +++ b/seeker/snippet/md-pdf.sh @@ -0,0 +1,36 @@ +#date: 2024-09-20T17:00:22Z +#url: https://api.github.com/gists/85be7d83ceb6750adb6bc0ab496c3f6f +#owner: https://api.github.com/users/ichux + +#!/bin/bash + +# sudo apt install -y pandoc wkhtmltopdf texlive-xetex texlive-latex-extra + +# Check if pandoc is installed +if ! command -v pandoc &> /dev/null; then + echo "pandoc is not installed. Please install it and try again." + exit 1 +fi + +# Directory containing .md files (current directory by default) +input_dir="${1:-.}" + +# Convert all .md files in the directory to .pdf +for md_file in "$input_dir"/*.md; do + if [ -f "$md_file" ]; then + # Output file name + output_file="${md_file%.md}.pdf" + + pandoc "$md_file" --pdf-engine=wkhtmltopdf -o "$output_file" + + # Check if the conversion was successful + if [ $? -eq 0 ]; then + echo "Converted: $md_file -> $output_file" + else + echo "Conversion failed for: $md_file" + fi + else + echo "No .md files found in the directory." + exit 1 + fi +done diff --git a/seeker/snippet/migrate-local-dev.sh b/seeker/snippet/migrate-local-dev.sh new file mode 100644 index 00000000..1b29ba1b --- /dev/null +++ b/seeker/snippet/migrate-local-dev.sh @@ -0,0 +1,73 @@ +#date: 2024-09-20T16:50:06Z +#url: https://api.github.com/gists/e89a274a3c87610cfc415e5969f63f05 +#owner: https://api.github.com/users/jeroenvervaeke + +#!/bin/bash + +# Check if required arguments are provided +if [ "$#" -ne 2 ]; then + echo "Usage: $0 " + exit 1 +fi + +CONTAINER_NAME="$1" +NEW_IMAGE="$2" +OLD_CONTAINER_NAME="${CONTAINER_NAME}-old" + +# Function to revert changes +revert() { + echo "Error occurred. Reverting changes..." + docker stop "$CONTAINER_NAME" 2>/dev/null + docker rm "$CONTAINER_NAME" 2>/dev/null + docker rename "$OLD_CONTAINER_NAME" "$CONTAINER_NAME" 2>/dev/null + docker start "$CONTAINER_NAME" + exit 1 +} + +# Step 1: Pull the new image +echo "Pulling new image: $NEW_IMAGE" +if ! docker pull "$NEW_IMAGE"; then + echo "Failed to pull new image. Exiting." + exit 1 +fi + +# Step 2: Take the settings from the running container +# Check if the container is running +if [ "$(docker inspect -f '{{.State.Running}}' "$CONTAINER_NAME" 2>/dev/null)" != "true" ]; then + echo "Container $CONTAINER_NAME is not running. Exiting." + exit 1 +fi + +PORT_MAPPINGS=$(docker inspect --format='{{range $p, $conf := .NetworkSettings.Ports}}{{if $conf}} -p {{(index $conf 0).HostIp}}:{{(index $conf 0).HostPort}}:{{$p}}{{end}}{{end}}' "$CONTAINER_NAME" | sed 's:/tcp::g') +ENV_VARIABLES=$(docker inspect --format='{{range $index, $value := .Config.Env}} -e "{{$value}}"{{end}}' "$CONTAINER_NAME") + +# Step 3: Stop the old container +echo "Stopping container: $CONTAINER_NAME" +if ! docker stop "$CONTAINER_NAME"; then + echo "Failed to stop container. Exiting." + exit 1 +fi + +# Step 4: Rename the old container +echo "Renaming container to: $OLD_CONTAINER_NAME" +if ! docker rename "$CONTAINER_NAME" "$OLD_CONTAINER_NAME"; then + echo "Failed to rename container. Reverting." + docker start "$CONTAINER_NAME" + exit 1 +fi + +# Step 5: Start the new container with volumes, port mappings and environment variables from the old container +echo "Starting new container" +if ! docker run -d --volumes-from "$OLD_CONTAINER_NAME" $PORT_MAPPINGS $ENV_VARIABLES --name "$CONTAINER_NAME" "$NEW_IMAGE"; then + echo "Failed to start new container. Reverting." + revert +fi + +# Step 6: Remove the old container +echo "Removing old container" +if ! docker rm "$OLD_CONTAINER_NAME"; then + echo "Failed to remove old container. Reverting." + revert +fi + +echo "Update completed successfully!" \ No newline at end of file diff --git a/seeker/snippet/pgmm_decrypt.py b/seeker/snippet/pgmm_decrypt.py new file mode 100644 index 00000000..093a68c0 --- /dev/null +++ b/seeker/snippet/pgmm_decrypt.py @@ -0,0 +1,85 @@ +#date: 2024-09-20T17:01:51Z +#url: https://api.github.com/gists/ead16838a0474577d77f17a0e7843cdf +#owner: https://api.github.com/users/Zolyn + +import os +import click +import json +from base64 import b64decode +# https://github.com/wqhanginge/pgmm_decrypt/tree/bugfix/incorrect-decryption +from pgmm_decrypt import decrypt_pgmm_key, decrypt_pgmm_resource + +@click.group() +def main(): + """\b + ╔═╗╔═╗╔╦╗╔╦╗ ╔═╗┌─┐┌┬┐┌─┐┌─┐ + ╠═╝║ ╦║║║║║║ ║ │ │ ││├┤ │ + ╩ ╚═╝╩ ╩╩ ╩ ╚═╝└─┘─┴┘└─┘└─┘ + Pixel Game Maker MV Codec @syrinka + + pgmm-codec COMMAND --help 查看对应命令的帮助 + + \b + Thanks to: + https://github.com/blluv/pgmm_decrypt + """ + pass + +@main.command('decrypt', short_help='使用密钥解密文件') +@click.argument('info', type=click.Path(exists=True, file_okay=True, readable=True)) +@click.option('-i', '--input', + type=click.Path(exists=True, file_okay=False, readable=True), + required=True, + help='''\b + 输入目录,将会尝试解密其下的*所有文件* + 请留意当中是否仍有正常文件,解密后很可能打不开''') +@click.option('-o', '--output', + type=click.Path(file_okay=False), + help='输出目录,留空则为 <输入目录>-dec') +@click.option('-w', '--weak', + is_flag=True, + help='使用弱解密模式') +@click.option('-q', '--quiet', + is_flag=True) +def func(info, input, output, weak, quiet): + """ + 使用密钥解密资源,并输出到指定路径 + + INFO: info.json文件 + + \b + 使用例: + pgmm-codec decrypt INFO -i Resources/img -o img-dec + """ + with open(info, "r", encoding="utf-8") as f: + encrypted_key = b64decode(json.load(f)["key"]) + decrypted_key = decrypt_pgmm_key(encrypted_key) + + if output is None: + output = input + '-dec' + + ilen = len(input) + for root, dirs, files in os.walk(input): + if not quiet: + print(f'# 当前目录:{root}') + + for file in files: + ipath = os.path.join(root, file) + opath = os.path.join(output, root[ilen+1:], file) + + with open(ipath, "rb") as f: + file_bytes = f.read() + + decrypted_bytes = decrypt_pgmm_resource(file_bytes, decrypted_key, weak=weak) + + os.makedirs(os.path.dirname(opath), exist_ok=True) + + with open(opath, "wb") as f: + f.write(decrypted_bytes) + + if not quiet: + print(file) + + +if __name__ == '__main__': + main() diff --git a/seeker/snippet/pyunit_test_pyppeteer_browser_session.py b/seeker/snippet/pyunit_test_pyppeteer_browser_session.py new file mode 100644 index 00000000..61d4b97d --- /dev/null +++ b/seeker/snippet/pyunit_test_pyppeteer_browser_session.py @@ -0,0 +1,107 @@ +#date: 2024-09-20T16:57:52Z +#url: https://api.github.com/gists/1e46c343d1cf5910e9faa031171767ed +#owner: https://api.github.com/users/pije76 + +import asyncio +import unittest +from pyppeteer import connect, launch +from pyppeteer.errors import PageError +from urllib.parse import quote +import json +import os +from os import environ + + +exec_platform = os.getenv('EXEC_PLATFORM') + + +# Get username and access key of the LambdaTest Platform +username = environ.get('LT_USERNAME', None) +access_key = "**********" + + +# Capabilities array with the respective configuration for parallel tests +cloud_capabilities = { + 'browserName': 'Chrome', + 'browserVersion': 'latest', + 'LT:Options': { + 'platform': 'Windows 11', + 'build': '[Build] Launching browser session with Pyppeteer (with unittest)', + 'name': 'Launching browser session with Pyppeteer (with unittest)', + 'user': username, + 'accessKey': "**********" + 'resolution': '1920x1080', + 'network': True, + 'video': True, + 'console': True, + 'headless': False + } +} + + +local_capabilities = { + 'browserName': 'Chrome' +} + + +class LambdaTestAsyncTest(unittest.IsolatedAsyncioTestCase): + async def asyncSetUp(self): + if exec_platform == 'cloud': + capability = quote(json.dumps(cloud_capabilities)) + print('Initializing test:: ', cloud_capabilities['LT:Options']['name']) + + + self.browser = await connect( + browserWSEndpoint=f'wss://cdp.lambdatest.com/puppeteer?capabilities={capability}' + ) + elif exec_platform == 'local': + print('Initializing test:: ', local_capabilities['browserName']) + self.browser = await launch(headless = False, args=['--start-maximized']) + + + await asyncio.sleep(1) + self.page = await self.browser.newPage() + + + async def asyncTearDown(self): + await self.page.close() + await asyncio.sleep(1) + await self.browser.close() + + + async def test_page_title(self): + await self.page.goto('https://search.brave.com/') + title = await self.page.title() + print('Scenario 1: Page Title ' + title) + + + try: + assert title == 'Private Search Engine - Brave Search', 'Expected page title is incorrect!' + await self.page.evaluate('_ => {}', f'lambdatest_action: {json.dumps({ "action": "setTestStatus", "arguments": { "status": "passed", "remark": "Title matched" } })}') + except PageError as e: + await self.page.evaluate('_ => {}', f'lambdatest_action: {json.dumps({ "action": "setTestStatus", "arguments": { "status": "failed", "remark": str(e) } })}') + + + async def test_page_content(self): + # Navigate to a website to see the effect + await self.page.goto('https://www.duckduckgo.com') + element = await self.page.querySelector('[name="q"]') + + + await element.click() + await element.type('LambdaTest') + await asyncio.gather( + self.page.keyboard.press('Enter'), + self.page.waitForNavigation() + ) + + + page_title = await self.page.title() + print('Scenario 2: Page Title ' + page_title) + return page_title + + +if __name__ == '__main__': + unittest.main() +: + unittest.main() diff --git a/seeker/snippet/ravaen_normalisation_manual_extract.py b/seeker/snippet/ravaen_normalisation_manual_extract.py new file mode 100644 index 00000000..caa6feae --- /dev/null +++ b/seeker/snippet/ravaen_normalisation_manual_extract.py @@ -0,0 +1,134 @@ +#date: 2024-09-20T17:11:24Z +#url: https://api.github.com/gists/c312c2e39bcceb88d41f88ae3dd2cb2a +#owner: https://api.github.com/users/previtus + + +class DataNormalizerLogManual(): + def __init__(self): + self.setup() + def setup(self): + # These were edited to work with the 10 bands we had in Wildfires project (FireCLR) + # only use 10m resolution bands (10): Blue (B2), Green (B3), Red (B4), VNIR (B5), + # VNIR (B6), VNIR (B7), NIR (B8), VNIR (B8a), SWIR (B11), SWIR (B12) combining + self.BANDS_S2_BRIEF = ["B2", "B3", "B4", "B5", "B6", "B7", "B8", "B8A", "B11", "B12"] + + self.RESCALE_PARAMS = { + "B1": {"x0": 7.3, + "x1": 7.6, + "y0": -1, + "y1": 1, + }, + "B2": {"x0": 6.9, + "x1": 7.5, + "y0": -1, + "y1": 1, + }, + "B3": {"x0": 6.5, + "x1": 7.4, + "y0": -1, + "y1": 1, + }, + "B4": {"x0": 6.2, + "x1": 7.5, + "y0": -1, + "y1": 1, + }, + "B5": {"x0": 6.1, + "x1": 7.5, + "y0": -1, + "y1": 1, + }, + "B6": {"x0": 6.5, + "x1": 8, + "y0": -1, + "y1": 1, + }, + "B7": {"x0": 6.5, + "x1": 8, + "y0": -1, + "y1": 1, + }, + "B8": {"x0": 6.5, + "x1": 8, + "y0": -1, + "y1": 1, + }, + "B8A": {"x0": 6.5, + "x1": 8, + "y0": -1, + "y1": 1, + }, + "B9": {"x0": 6, + "x1": 7, + "y0": -1, + "y1": 1, + }, + "B10": {"x0": 2.5, + "x1": 4.5, + "y0": -1, + "y1": 1, + }, + "B11": {"x0": 6, + "x1": 8, + "y0": -1, + "y1": 1, + }, + "B12": {"x0": 6, + "x1": 8, + "y0": -1, + "y1": 1, + } + } + print("normalization params are manually found") + + def normalize_x(self, data): + bands = data.shape[0] # for example 15 + for band_i in range(bands): + data_one_band = data[band_i, :, :] + if band_i < len(self.BANDS_S2_BRIEF): + # log + data_one_band = np.log(data_one_band) + data_one_band[np.isinf(data_one_band)] = np.nan + + # rescale + r = self.RESCALE_PARAMS[self.BANDS_S2_BRIEF[band_i]] + x0, x1, y0, y1 = r["x0"], r["x1"], r["y0"], r["y1"] + data_one_band = ((data_one_band - x0) / (x1 - x0)) * (y1 - y0) + y0 + data[band_i, :, :] = data_one_band + return data + + def denormalize_x(self, data): + bands = data.shape[0] # for example 15 + for band_i in range(bands): + data_one_band = data[band_i, :, :] + if band_i < len(self.BANDS_S2_BRIEF): + # rescale + r = self.RESCALE_PARAMS[self.BANDS_S2_BRIEF[band_i]] + x0, x1, y0, y1 = r["x0"], r["x1"], r["y0"], r["y1"] + data_one_band = (((data_one_band - y0) / (y1 - y0)) * (x1 - x0)) + x0 + + # undo log + data_one_band = np.exp(data_one_band) + # data_one_band = np.log(data_one_band) + # data_one_band[np.isinf(data_one_band)] = np.nan + + data[band_i, :, :] = data_one_band + return data + + + +normaliser = DataNormalizerLogManual() +# pseudocode of usage: +# imagin you have loaded data here +before = read_image(before_path, channels) # rasterio load for example +before_tiles = image2tiles(before) # tiling script +after = read_image(after_path, channels) +after_tiles = image2tiles(after) + +for tile_i in range(len(before_tiles)): + before_tiles[tile_i] = normaliser.normalize_x(before_tiles[tile_i]) +for tile_i in range(len(after_tiles)): + after_tiles[tile_i] = normaliser.normalize_x(after_tiles[tile_i]) + +# ... etc +# for example check the stats of your normalised data - is it between the expected -1 to +1 ? \ No newline at end of file diff --git a/seeker/snippet/spring.Dockerfile b/seeker/snippet/spring.Dockerfile deleted file mode 100644 index bfae59a6..00000000 --- a/seeker/snippet/spring.Dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -#date: 2024-09-18T17:05:04Z -#url: https://api.github.com/gists/124d04536cdeca0cc709c6b43ffd9871 -#owner: https://api.github.com/users/Riko07br - -# Build maven----------------------- -FROM maven:3.8.4-openjdk-17 AS build - -WORKDIR /app - -COPY src/main ./src/main - -COPY pom.xml ./ - -RUN mvn clean "-Dmaven.test.skip" package - -# openJDK runner-------------------- -FROM openjdk:17-jdk-alpine - -WORKDIR /app - -COPY --from=build /app/target/backend-0.0.1-SNAPSHOT.jar ./app.jar - -EXPOSE 8080 - -ENTRYPOINT ["java","-jar","/app/app.jar"] \ No newline at end of file diff --git a/seeker/snippet/unifi.sh b/seeker/snippet/unifi.sh new file mode 100644 index 00000000..ffb7cfd7 --- /dev/null +++ b/seeker/snippet/unifi.sh @@ -0,0 +1,19 @@ +#date: 2024-09-20T16:43:07Z +#url: https://api.github.com/gists/c42e8104d2bcc5d2a801df7439f58433 +#owner: https://api.github.com/users/mbc3k + +#!/bin/sh +# works on SmartOS LX instances, maybe elsewhere? + +# set up repos +echo 'deb [ arch=amd64,arm64 ] https://www.ui.com/downloads/unifi/debian stable ubiquiti' > /etc/apt/sources.list.d/100-ubnt-unifi.list +echo 'deb [trusted=yes] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/3.6 multiverse' > /etc/apt/sources.list.d/mongodb-org-3.6.list + +# for libssl1 +echo 'deb http://security.ubuntu.com/ubuntu focal-security main' > /etc/apt/sources.list.d/old-ubuntu.list + +curl -sO https://dl.ui.com/unifi/unifi-repo.gpg --output-dir /etc/apt/trusted.gpg.d/ +curl -sO https://www.mongodb.org/static/pgp/server-3.6.asc --output-dir /etc/apt/trusted.gpg.d/ + +apt update +apt install -y unifi