diff --git a/seeker/report.txt b/seeker/report.txt index e73e11c6..ce3258a7 100644 --- a/seeker/report.txt +++ b/seeker/report.txt @@ -1,3 +1,33 @@ +-------------------------------------------------------------------------------- + 2024-09-16 17:12:34.795202 +-------------------------------------------------------------------------------- + On branch main +Your branch is up to date with 'origin/main'. + +Changes not staged for commit: + (use "git add/rm ..." to update what will be committed) + (use "git restore ..." to discard changes in working directory) + deleted: snippet/Wallet.java + deleted: snippet/brute_force_affine.py + deleted: snippet/impress.py + deleted: snippet/main.py + deleted: snippet/remove-taint.sh + deleted: snippet/script.py + deleted: snippet/test_logging.py + deleted: snippet/test_namaster.py + deleted: snippet/testing_logging_levels.py + +Untracked files: + (use "git add ..." to include in what will be committed) + snippet/base_conversions.py + snippet/generate_signed_urls.py + snippet/ipynb_importParentDir_pyCodeTemplate.py + snippet/main.java + snippet/module_5_2.py + snippet/program.java + +no changes added to commit (use "git add" and/or "git commit -a") + -------------------------------------------------------------------------------- 2024-09-13 17:12:30.412983 -------------------------------------------------------------------------------- diff --git a/seeker/snippet/Wallet.java b/seeker/snippet/Wallet.java deleted file mode 100644 index a4f579ff..00000000 --- a/seeker/snippet/Wallet.java +++ /dev/null @@ -1,30 +0,0 @@ -//date: 2024-09-13T16:50:04Z -//url: https://api.github.com/gists/945ee148356f3d8d5c7529670c678342 -//owner: https://api.github.com/users/dmzDAWG - -class LedgerItem { - InternalAccount account; - - static LedgerItem repayment(Repayment repayment) { - return // LedgerItem().builder()... - } -} - -class InternalAccount { - List ledgerItems; - // account doesn't directly reference repayments - // it just manages ledger items and represents a repayment as a ledger item - - void record(Repayment repayment) { - ledgerItems.add(LedgerItem.repayment(repayment)); - } -} - -class Repayment { - InternalAccount internalAccount; - - public Repayment(InternalAccount internalAccount, Money amount) { - this.internalAccount = internalAccount; - internalAccount.record(this); - } -} \ No newline at end of file diff --git a/seeker/snippet/base_conversions.py b/seeker/snippet/base_conversions.py new file mode 100644 index 00000000..34485fc7 --- /dev/null +++ b/seeker/snippet/base_conversions.py @@ -0,0 +1,110 @@ +#date: 2024-09-16T17:00:30Z +#url: https://api.github.com/gists/dd83ecd985ad1817d72ae92764b4921c +#owner: https://api.github.com/users/Marcus5408 + +# conversion.py +# ------------- +# Description: +# A simple program that converts a number from one base to another using either +# the successive division method or the weighted multiplication method. +# ------------- +# Usage: +# In a terminal, run the following command: +# python3 conversion.py +# +# selects the conversion method using one of the following: +# - divide: successive division method +# - multiply: weighted multiplication method +# is the number to convert. +# is the target base (for successive division) +# or the base of the number (for weighted multiplication). +# ------------- +# (c) Issac Liu, 2024 + +from typing import Union, Literal +import sys + + +def success_div(n, base): + remainder = 0 + result = 0 + charset = "0123456789" + if base > 10: + if base == 16: + charset = "0123456789ABCDEF" + else: + print( + "You have entered a base greater than 10. Please enter every digit of your base from least to greatest." + ) + values = input("") + charset = values if len(values) == base else "0123456789ABCDEF" + if base < 10: + while n != 0 or n > base: + remainder = n % base + quotient = n // base + print(f"{n}/{base} = {quotient}r{remainder}") + result = result * 10 + remainder + n = quotient + # reverse the result + result = int(str(result)[::-1]) + print(f"\n{result}") + else: + result = "" + while n != 0: + remainder = n % base + quotient = n // base + if base > 10 and remainder > 9: + hex_value = f" ({remainder} -> {charset[remainder]})" + print(f"{n}/{base} = {quotient}r{remainder}{hex_value}") + else: + print(f"{n}/{base} = {quotient}r{remainder}") + result = charset[remainder] + result + n = quotient + print(f"\n{result}") + + return result + + +def weighted_multiply(n: Union[int, str], base: int) -> int: + if isinstance(n, str): + n = n.upper() + charset = "0123456789ABCDEF" + list = [charset.index(x) for x in n] + else: + list = [int(x) for x in str(n)] + + weights = [base**i for i in range(len(list) - 1, -1, -1)] + result = [a * b for a, b in zip(list, weights)] + + for i in range(len(result)): + if base > 10 and list[i] > 9: + hex_value = f" ({charset[list[i]]} -> {list[i]})" + print( + f"{list[i]}{hex_value} * {base}^{len(list) - i - 1} = {list[i]} * {weights[i]} = {result[i]}" + ) + else: + print( + f"{list[i]} * {base}^{len(list) - i - 1} = {list[i]} * {weights[i]} = {result[i]}" + ) + + print(f"\n{' + '.join([str(x) for x in result])} = {sum(result)}") + return sum(result) + + +if __name__ == "__main__": + if len(sys.argv) != 4: + print("Usage: python conversion.py ") + sys.exit(1) + + method = sys.argv[1] + n = int(sys.argv[2]) if sys.argv[2].isdigit() else sys.argv[2] + base = int(sys.argv[3]) + + if method == "divide": + success_div(n, base) + elif method == "multiply": + weighted_multiply(n, base) + else: + print( + "Invalid method. Use 1 for division method or 2 for weighted multiply method." + ) diff --git a/seeker/snippet/brute_force_affine.py b/seeker/snippet/brute_force_affine.py deleted file mode 100644 index aa7c52c1..00000000 --- a/seeker/snippet/brute_force_affine.py +++ /dev/null @@ -1,27 +0,0 @@ -#date: 2024-09-13T16:44:57Z -#url: https://api.github.com/gists/4ab1e134faf18daa87f0d1451b236c8d -#owner: https://api.github.com/users/xHacka - -def affine_decrypt(ciphertext, a, b): - m = 256 # Byte range (0-255) - a_inv = pow(a, -1, m) - return bytes([(a_inv * (byte - b)) % m for byte in ciphertext]) - -def brute_force_affine(ciphertext): - m = 256 # Max value - for a in range(1, m): - try: - for b in range(m): - decrypted = affine_decrypt(ciphertext, a, b).decode("utf-8", errors="ignore") - yield f'a={a}, b={b}: {decrypted}' - except ValueError: - continue - - return results - - -ciphertext = bytes.fromhex('9094939a8b8c8b8d868b97968c919088c0') -results = brute_force_affine(ciphertext) -for result in results: - if result.isprintable() and result.isascii(): - print(result) diff --git a/seeker/snippet/generate_signed_urls.py b/seeker/snippet/generate_signed_urls.py new file mode 100644 index 00000000..0ad90d08 --- /dev/null +++ b/seeker/snippet/generate_signed_urls.py @@ -0,0 +1,52 @@ +#date: 2024-09-16T16:57:20Z +#url: https://api.github.com/gists/05da91b9b34799ff6fd4254cffba7d3e +#owner: https://api.github.com/users/rlank + +from google.cloud import storage +from datetime import timedelta +import os + +# Set the path to your service account key file +os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = '/path/to/key/json' + +def generate_signed_urls(bucket_name, prefix, expiration_time): + """ + Generates signed URLs for files in the given bucket and prefix. + + :param bucket_name: Name of the GCS bucket. + :param prefix: Prefix of the files in the GCS bucket. + :param expiration_time: Time in minutes for which the signed URL should be valid. + :return: List of tuples containing the file name and signed URL. + """ + # Initialize the client + # This uses the default credentials. Make sure that the GOOGLE_APPLICATION_CREDENTIALS environment variable is set. + storage_client = storage.Client() + + # Get the bucket + bucket = storage_client.bucket(bucket_name) + + # Get blobs (files) with the given prefix + blobs = bucket.list_blobs(prefix=prefix) + + signed_urls = [] + for blob in blobs: + # Generate a signed URL for each blob + url = blob.generate_signed_url( + expiration=expiration_time, + version='v4' # Use V4 signing + ) + signed_urls.append((blob.name, url)) + + return signed_urls +# Usage +bucket_name = 'fuelcast-data' +prefix = 'fuel/rapid-2024-conus/' + +# Longest allowable time is one week +exp_time = timedelta(days=7) + +signed_urls = generate_signed_urls(bucket_name, prefix, expiration_time=exp_time) + +# Print signed URLs +for file_name, url in signed_urls: + print(f"File: {file_name} - Signed URL: {url}") \ No newline at end of file diff --git a/seeker/snippet/impress.py b/seeker/snippet/impress.py deleted file mode 100644 index b4a036fd..00000000 --- a/seeker/snippet/impress.py +++ /dev/null @@ -1,59 +0,0 @@ -#date: 2024-09-12T17:09:29Z -#url: https://api.github.com/gists/cea1753cdb2140862f5f4982dfac312a -#owner: https://api.github.com/users/rk9777 - -li = range(100) -# multiply each item in a list by 2 -[x*2 for x in li] - -# sum a list -sum(li) - -# verify if exists in a string. -word_list = ["iterate", "comprehension", "for", "yield", "Guido"] - -tweet_yes = "Guido said to iterate over the comprehension" - -any(w in tweet_yes for w in word_list) - -# read in a file -open(__file__).read() -# lines -open(__file__).readlines() or list(open(__file__)) -# iterator -open(__file__) - -# happy birthday -print "\n".join("happy birthday %s" % ("dear NAME" if i == 3 else "to you") \ - for i in range(1, 4)) - -# filter a list of number -over_60 = [x for x in range(10, 100, 10) if x > 60] - -# find max / min of a list -max(li) -min(li) - -# xml -import urllib -import xml.etree.ElementTree as et -xml = urllib.urlopen('http://search.twitter.com/search.atom?&q=python').read() -print et.parse(xml) - - - -# parallel processing -import multiprocessing as mp -def x2(v): return v * 2 -mp.Pool(3).map(x2, li) - -# prime number generation. (http://wiki.python.org/moin/SimplePrograms) -import itertools -def pgen(): - numbers = itertools.count(2) - while True: - prime = numbers.next() - yield prime - numbers = itertools.ifilter(prime.__rmod__, numbers) - -itertools.islice(pgen(), 40) \ No newline at end of file diff --git a/seeker/snippet/ipynb_importParentDir_pyCodeTemplate.py b/seeker/snippet/ipynb_importParentDir_pyCodeTemplate.py new file mode 100644 index 00000000..fe87e3f7 --- /dev/null +++ b/seeker/snippet/ipynb_importParentDir_pyCodeTemplate.py @@ -0,0 +1,26 @@ +#date: 2024-09-16T16:51:30Z +#url: https://api.github.com/gists/c4028cf4e3de861d0dda7c7edf552b57 +#owner: https://api.github.com/users/birdflyi + +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Python 3.7 # Your python version + +# @Time : ${DATE} ${TIME} +# @Author : 'Lou Zehua' # Your name +# @File : ${NAME}.py + +import os +import sys + +if '__file__' not in globals(): + # !pip install ipynbname # Remove comment symbols to solve the ModuleNotFoundError + import ipynbname + + nb_path = ipynbname.path() + __file__ = str(nb_path) +cur_dir = os.path.dirname(__file__) +pkg_rootdir = os.path.dirname(cur_dir) # Should be the root directory of your project. +if pkg_rootdir not in sys.path: # To resolve the ModuleNotFoundError + sys.path.append(pkg_rootdir) + print('-- Add root directory "{}" to system path.'.format(pkg_rootdir)) diff --git a/seeker/snippet/main.java b/seeker/snippet/main.java new file mode 100644 index 00000000..dd3473c3 --- /dev/null +++ b/seeker/snippet/main.java @@ -0,0 +1,36 @@ +//date: 2024-09-16T17:10:28Z +//url: https://api.github.com/gists/12ac859a0f6d500e52d8ae7999e2b395 +//owner: https://api.github.com/users/qren0neu + +class Solution { + public long maxScore(int[] nums1, int[] nums2, int k) { + // if use priority queue, we can have: + // 1. when we poll in the queue, we remove the min + // so the sum of nums1 should be larger + // but, we have to calculate the minimum dynamically in nums2 + // if we can combine nums1 and nums2 somehow together, we can solve the problem + int[][] arr = new int[nums1.length][2]; + for (int i = 0; i < nums1.length; i++) { + arr[i][0] = nums1[i]; + arr[i][1] = nums2[i]; + } + Arrays.sort(arr, (int[] arr1, int[] arr2) -> arr2[1] - arr1[1]); + PriorityQueue pq = new PriorityQueue(k, (a,b) -> a - b); + long score = 0; + long sum = 0; + for (int[] pair : arr) { + // pair: nums1, nums2 + int min = pair[1]; + pq.offer(pair[0]); + sum += pair[0]; + if (pq.size() > k) { + int removed = pq.poll(); + sum -= removed; + } + if (pq.size() == k) { + score = Math.max(score, sum * min); + } + } + return score; + } +} \ No newline at end of file diff --git a/seeker/snippet/main.py b/seeker/snippet/main.py deleted file mode 100644 index 507a1af8..00000000 --- a/seeker/snippet/main.py +++ /dev/null @@ -1,20 +0,0 @@ -#date: 2024-09-12T17:06:45Z -#url: https://api.github.com/gists/90f6017582cecd5f00eb132d45a73172 -#owner: https://api.github.com/users/mypy-play - -from dataclasses import asdict - -import dataclasses - -@dataclasses.dataclass -class Foo: - a_string: int = 1 - a_float: str = "Hey" - invalid_param: None = None - - -def bar(a_string: str, a_float: float) -> None: - ... - -foo = Foo() -bar(**asdict(foo)) \ No newline at end of file diff --git a/seeker/snippet/module_5_2.py b/seeker/snippet/module_5_2.py new file mode 100644 index 00000000..2b538ee9 --- /dev/null +++ b/seeker/snippet/module_5_2.py @@ -0,0 +1,39 @@ +#date: 2024-09-16T17:08:45Z +#url: https://api.github.com/gists/f4b2d14e1c2552e58d2373ba74014c2b +#owner: https://api.github.com/users/zabelloalexandr + +from pygments.formatters import other + + +class House: + def __init__(self, name, number_of_floors): + self.name = name + self.number_of_floors = number_of_floors + self.current_floor = 1 + + def __eq__(self, other): + return self.number_of_floors == other.new_floor + + def __str__(self): + return f'{self.name} {self.number_of_floors}' + + def __lt__(self): + return self.number_of_floors < other.new_floor + def __len__(self): + return self.number_of_floors + + + + +h1 = House('ЖК Эльбрус', 10) +h2 = House('ЖК Акация', 20) + +# __str__ +print(h1) +print(h2) + +# __len__ +print(len(h1)) +print(len(h2)) + + diff --git a/seeker/snippet/program.java b/seeker/snippet/program.java new file mode 100644 index 00000000..dc77c45a --- /dev/null +++ b/seeker/snippet/program.java @@ -0,0 +1,115 @@ +//date: 2024-09-16T16:48:13Z +//url: https://api.github.com/gists/a8b1ca4dfd2a3170ad6e9193a5f39de5 +//owner: https://api.github.com/users/Nivasnvz + +import java.util.*; + +public class program +{ + public static void main(String[] ar) + { + + Scanner s=new Scanner(System.in); + + //define array for getting values + List a=new ArrayList<>(); + + // define two seperate array for greater and smaller string + + List big=new ArrayList<>(); + List small=new ArrayList<>(); + + int i=0,l=0,k=0; + + //to getting the String arrays until user stops + + for(;i<100;i++) + { + String a1=s.nextLine(); + a.add(a1); + if(a1.isEmpty())break; + } + + + + for(;k') + { + small.add(a.get(k+1)); + big.add(a.get(k)); + + } + } + + //combine both big and small string arrays + + List co=new ArrayList<>(big); + co.addAll(small); + + + + for(;l un=new LinkedHashSet<>(co); + + for(String so:un) + { + System.out.print(so+" "); + } + + + } + + static boolean isequal(String e,String e1,List a, List co) + { + int i; + for(i=0;i a) + { + String temp=a.get(s); + a.set(s,a.get(s1)); + a.set(s1,temp); + } +} \ No newline at end of file diff --git a/seeker/snippet/remove-taint.sh b/seeker/snippet/remove-taint.sh deleted file mode 100644 index 0ae5b350..00000000 --- a/seeker/snippet/remove-taint.sh +++ /dev/null @@ -1,30 +0,0 @@ -#date: 2024-09-13T17:09:42Z -#url: https://api.github.com/gists/f44e38e813332db3cd041fe241642ff7 -#owner: https://api.github.com/users/creachadair - -#!/usr/bin/env bash -# -# Usage: remove-taint.sh -# -# Remove the annoying mark-of-the-web taint xattrs from a file. -# For some reason macOS ignores a removexattr for these attributes, -# but the taint does not survive a pipe copy and rename. -# -set -euo pipefail - -for p in "$@" ; do - perm="$(stat -f '%Lp' "$p")" # low-order permission bits, e.g., 644 - t="$(mktemp "$p".XXXXXXXXXXXX)" # in the same directory as the source - cat "$p" > "$t" - - # Preserve the access bits and modification time from the original. - chmod "$perm" "$t" - touch -m -r "$p" "$t" - - # Note: It's not sufficent to just rename, because macOS appears to track - # the quarantine through the filename even if it's replaced. By removing - # the original file first, we keep the metadata for the untainted copy. - # Seriously, Apple, WTAF. - rm -f -- "$p" - mv -f -- "$t" "$p" -done diff --git a/seeker/snippet/script.py b/seeker/snippet/script.py deleted file mode 100644 index cb244789..00000000 --- a/seeker/snippet/script.py +++ /dev/null @@ -1,30 +0,0 @@ -#date: 2024-09-13T16:59:36Z -#url: https://api.github.com/gists/912a4c0e33d22e8ed5cba35a2f170296 -#owner: https://api.github.com/users/kevinjqliu - -import pyarrow.fs as fs - -# List of URIs to test -file_uris = [ - "file:some/thing.csv", - "file://some/thing.csv", - "file:/some/thing.csv", - "file:///some/thing.csv" -] - -def test_file_uris(file_uris): - for uri in file_uris: - try: - # Attempt to parse the URI - filesystem, path = fs.LocalFileSystem.from_uri(uri) - print(f"URI: {uri} -> Success: Filesystem: {filesystem}, Path: {path}") - except Exception as e: - print(f"URI: {uri} -> Error: {e}") - -# Run the test -test_file_uris(file_uris) - -# URI: file:some/thing.csv -> Error: File URI cannot be relative: 'file:some/thing.csv' -# URI: file://some/thing.csv -> Error: Unsupported hostname in non-Windows local URI: 'file://some/thing.csv' -# URI: file:/some/thing.csv -> Success: Filesystem: , Path: /some/thing.csv -# URI: file:///some/thing.csv -> Success: Filesystem: , Path: /some/thing.csv diff --git a/seeker/snippet/test_logging.py b/seeker/snippet/test_logging.py deleted file mode 100644 index 7704d4ca..00000000 --- a/seeker/snippet/test_logging.py +++ /dev/null @@ -1,80 +0,0 @@ -#date: 2024-09-13T16:55:37Z -#url: https://api.github.com/gists/47a1ab9c2cc6fb1e0bb321bd70a77e86 -#owner: https://api.github.com/users/tabedzki - -# test_logging.py - -import time -from pathlib import Path -import pprint -import logging -import warnings -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) # Set the bug level of the package to be DEBUG by default. Allow user to override - - -def run_kilosort(settings, results_dir=None, filename=None): - - results_dir = Path(results_dir) - # setup_logger_current(results_dir) - setup_logger_proposed(results_dir) - try: - logger.debug("Debug message") - logger.info(f"Kilosort version 4") - logger.info(f"Sorting {filename}") - logger.info('-'*40) - will_fail() - except Exception as e: - # This makes sure the full traceback is written to log file. - logger.exception('Encountered error in `run_kilosort`:') - e.add_note(f'NOTE: See {results_dir}/kilosort4.log for detailed info.') - raise e - -def will_fail(): - 0/0 - -def setup_logger_current(results_dir): - # Adapted from - # https://docs.python.org/2/howto/logging-cookbook.html#logging-to-multiple-destinations - # In summary: only send logging.debug statements to log file, not console. - - # set up logging to file for root logger - logging.basicConfig(level=logging.DEBUG, - format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', - datefmt='%m-%d %H:%M', - filename=results_dir/'kilosort4.log', - filemode='w') - - # define a Handler which writes INFO messages or higher to the sys.stderr - console = logging.StreamHandler() - console.setLevel(logging.INFO) - # set a format which is simpler for console use - console_formatter = logging.Formatter('%(name)-12s: %(message)s') - console.setFormatter(console_formatter) - # add the console handler to the root logger - logging.getLogger('').addHandler(console) - - # Set 3rd party loggers to INFO or above only, - # so that it doesn't spam the log file - numba_log = logging.getLogger('numba') - numba_log.setLevel(logging.INFO) - - mpl_log = logging.getLogger('matplotlib') - mpl_log.setLevel(logging.INFO) - -def setup_logger_proposed(results_dir): - # Adapted from - # https://docs.python.org/3/howto/logging.html#configuring-logging-for-a-library - - # Create handlers - file_handler = logging.FileHandler(results_dir / 'kilosort4.log', mode='w') - file_handler.setLevel(logging.DEBUG) - - # Create formatters and add them to the handlers - file_formatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M') - file_handler.setFormatter(file_formatter) - - # Add handlers to the logger - logger.addHandler(file_handler) - - # User should define log level for 3rd party applications in their own code. \ No newline at end of file diff --git a/seeker/snippet/test_namaster.py b/seeker/snippet/test_namaster.py deleted file mode 100644 index 0c9ed758..00000000 --- a/seeker/snippet/test_namaster.py +++ /dev/null @@ -1,114 +0,0 @@ -#date: 2024-09-13T16:55:11Z -#url: https://api.github.com/gists/7fc4d1fec9b2752851f2e498e620e1a1 -#owner: https://api.github.com/users/ajouellette - -import os -import numpy as np -import healpy as hp -import pymaster as nmt -import joblib - - -def get_workspace(nmt_field1, nmt_field2, nmt_bins, wksp_cache=None): - if wksp_cache is None: - print("Computing workspace") - wksp = nmt.NmtWorkspace.from_fields(nmt_field1, nmt_field2, nmt_bins) - return wksp - - hash_key = joblib.hash([nmt_field1.get_mask(), nmt_field1.spin, nmt_field2.get_mask(), nmt_field2.spin]) - wksp_file = f"{wksp_cache}/cl/{hash_key}.fits" - - try: - # load from existing file - wksp = nmt.NmtWorkspace.from_file(wksp_file) - wksp.check_unbinned() - print("Using cached workspace") - # update bins and beams after loading - wksp.update_beams(nmt_field1.beam, nmt_field2.beam) - wksp.update_bins(nmt_bins) - except RuntimeError: - # compute and save to file - print("Computing workspace and saving") - wksp = nmt.NmtWorkspace.from_fields(nmt_field1, nmt_field2, nmt_bins) - os.makedirs(f"{wksp_cache}/cl", exist_ok=True) - wksp.write_to(wksp_file) - - return wksp - - -def get_cov_workspace(nmt_field1a, nmt_field2a, nmt_field1b, nmt_field2b, wksp_cache=None): - if wksp_cache is None: - print("Computing workspace") - wksp = nmt.NmtCovarianceWorkspace.from_fields(nmt_field1a, nmt_field2a, nmt_field1b, nmt_field2b) - return wksp - - hash_key = joblib.hash([nmt_field1a.get_mask(), nmt_field1a.spin, nmt_field2a.get_mask(), nmt_field2a.spin, - nmt_field1b.get_mask(), nmt_field1b.spin, nmt_field2b.get_mask(), nmt_field2b.spin]) - wksp_file = f"{wksp_cache}/cov/{hash_key}.fits" - - try: - wksp = nmt.NmtCovarianceWorkspace.from_file(wksp_file) - print("Using cached workspace") - except RuntimeError: - print("Computing workspace and saving") - wksp = nmt.NmtCovarianceWorkspace.from_fields(nmt_field1a, nmt_field2a, nmt_field1b, nmt_field2b) - os.makedirs(f"{wksp_cache}/cov", exist_ok=True) - wksp.write_to(wksp_file) - - return wksp - - -nside = 1024 -ell = np.arange(3*nside) - -bins = nmt.NmtBin.from_nside_linear(nside, 100) -ell_eff = bins.get_effective_ells() - -# load maps -mask = hp.read_map("test_mask.fits") -shear_maps = hp.read_map("test_shear.fits", field=None) -kappa_map = hp.read_map("test_kappa.fits") - -print("creating fields") -shear_field = nmt.NmtField(mask, shear_maps, spin=2) -kappa_field = nmt.NmtField(mask, [kappa_map], spin=0) - -def run_analysis(field1, field2, bins, wksp_cache=None): - print("computing cross-Cl") - wksp = get_workspace(field1, field2, bins, wksp_cache=wksp_cache) - pcl = nmt.compute_coupled_cell(field1, field2) - cl = wksp.decouple_cell(pcl) - - print("computing covariance") - cov_wksp = get_cov_workspace(field1, field2, field1, field2, wksp_cache=wksp_cache) - pcl1 = nmt.compute_coupled_cell(field1, field1) / np.mean(field1.get_mask()**2) - pcl2 = nmt.compute_coupled_cell(field2, field2) / np.mean(field2.get_mask()**2) - cov = nmt.gaussian_covariance(cov_wksp, 0, 2, 0, 2, pcl1, *2*[pcl / np.mean(field1.get_mask() * field2.get_mask())], - pcl2, wksp) - return cl, cov - -# run without caching -print("Running without cache") -cl, cov = run_analysis(kappa_field, shear_field, bins) -print(cl[0]) -print("NaNs:", np.isnan(cov).any()) -cov = cov.reshape((len(ell_eff), 2, len(ell_eff), 2)) -print(np.sqrt(np.diag(cov[:,0,:,0]))) -print() - -# run with caching -print("Running with cache (1)") -cl, cov = run_analysis(kappa_field, shear_field, bins, wksp_cache="/home/aaronjo2/scratch/test") -print(cl[0]) -print("NaNs:", np.isnan(cov).any()) -cov = cov.reshape((len(ell_eff), 2, len(ell_eff), 2)) -print(np.sqrt(np.diag(cov[:,0,:,0]))) -print() - -# re-run with cached workspaces -print("Running with cache (2)") -cl, cov = run_analysis(kappa_field, shear_field, bins, wksp_cache="/home/aaronjo2/scratch/test") -print(cl[0]) -print("NaNs:", np.isnan(cov).any()) -cov = cov.reshape((len(ell_eff), 2, len(ell_eff), 2)) -print(np.sqrt(np.diag(cov[:,0,:,0]))) \ No newline at end of file diff --git a/seeker/snippet/testing_logging_levels.py b/seeker/snippet/testing_logging_levels.py deleted file mode 100644 index dfbb923f..00000000 --- a/seeker/snippet/testing_logging_levels.py +++ /dev/null @@ -1,45 +0,0 @@ -#date: 2024-09-13T16:55:37Z -#url: https://api.github.com/gists/47a1ab9c2cc6fb1e0bb321bd70a77e86 -#owner: https://api.github.com/users/tabedzki - -# testing_logging_levels.py - -import logging -from test_logging import run_kilosort - -# Create handlers -console_handler = logging.StreamHandler() -console_handler.setLevel(logging.DEBUG) - -file_handler = logging.FileHandler('my_custom.log', mode='w') -file_handler.setLevel(logging.INFO) - -# Create formatter and add it to the handlers -formatter = logging.Formatter('%(name)s - %(levelname)s - %(message)s') -console_handler.setFormatter(formatter) -file_handler.setFormatter(formatter) - -# Get the outer logger -outer_logger = logging.getLogger('outer_logger') -outer_logger.setLevel(logging.DEBUG) -outer_logger.addHandler(console_handler) -outer_logger.addHandler(file_handler) - -# Log a message before calling run_kilosort -outer_logger.debug('This is a debug message from the outer logger before calling run_kilosort.') -outer_logger.info('This is an info message from the outer logger before calling run_kilosort.') - -# Set logigng levels -test_logging_logger = logging.getLogger('test_logging') -# test_logging_logger.setLevel(logging.DEBUG) # Should a user decide to override the logging level -test_logging_logger.propagate = False - -# Call the function from test_logging.py -try: - run_kilosort(settings={}, results_dir='.') -except: - outer_logger.exception("Kilosort4 failed") - -# Log a message after calling run_kilosort -outer_logger.debug('This is a debug message from the outer logger after calling run_kilosort.') -outer_logger.info('This is an info message from the outer logger after calling run_kilosort.') \ No newline at end of file