-
Notifications
You must be signed in to change notification settings - Fork 6
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
2021-09-01 13:35:57.810385 new snippets
- Loading branch information
1 parent
808fb22
commit 4d3395b
Showing
15 changed files
with
459 additions
and
6 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
#date: 2021-09-01T13:22:00Z | ||
#url: https://api.github.com/gists/fa2e7c6fa5eecbc3b00fcc2f16a4335c | ||
#owner: https://api.github.com/users/rgdacosta | ||
|
||
export EDITOR=/usr/bin/vim | ||
source /usr/local/etc/ocp4.config |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
#date: 2021-09-01T13:22:11Z | ||
#url: https://api.github.com/gists/56ba6f92de0951179867f5289585dfba | ||
#owner: https://api.github.com/users/thiagoferreiraw | ||
|
||
class EmbeddedReportResource(PaginatorMixin, APIResource): | ||
preparer = EMBEDDED_REPORT_LIST_PREPARER | ||
paginate = True | ||
page_size = 40 | ||
|
||
@property | ||
def base_query(self): | ||
return ( | ||
EmbeddedReport.objects.filter(active=True) | ||
.select_related("engine") | ||
.order_by("name") | ||
) | ||
|
||
def prepare(self, data): | ||
result = super().prepare(data) | ||
if self.endpoint == "detail": | ||
result["url"] = data.get_report_url_for_business(self.business) | ||
return result | ||
|
||
@permissions(needs=("embedded-report-list",)) | ||
def list(self): | ||
return self.base_query | ||
|
||
@permissions(needs=("embedded-report-list",)) | ||
def detail(self, pk): | ||
return self.get_or_error(self.base_query, EMBEDDED_REPORT_NOT_FOUND, pk=pk) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
#date: 2021-09-01T13:35:08Z | ||
#url: https://api.github.com/gists/e39002204be78f411421d3b2dfd1b603 | ||
#owner: https://api.github.com/users/seesharprun | ||
|
||
npx create-react-app . --template typescript --use-npm | ||
npm i @microsoft/mgt-react | ||
npm i @microsoft/mgt-element @microsoft/mgt-msal2-provider | ||
npm start |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,130 @@ | ||
#date: 2021-09-01T13:32:37Z | ||
#url: https://api.github.com/gists/be9e5616774cce51ea0c15fee27bdd5a | ||
#owner: https://api.github.com/users/orenmatar | ||
|
||
import numpy as np | ||
import pandas as pd | ||
|
||
from fbprophet import Prophet | ||
|
||
|
||
def _make_historical_mat_time(deltas, changepoints_t, t_time, n_row=1): | ||
""" | ||
Creates a matrix of slope-deltas where these changes occured in training data according to the trained prophet obj | ||
""" | ||
diff = np.diff(t_time).mean() | ||
prev_time = np.arange(0, 1 + diff, diff) | ||
idxs = [] | ||
for changepoint in changepoints_t: | ||
idxs.append(np.where(prev_time > changepoint)[0][0]) | ||
prev_deltas = np.zeros(len(prev_time)) | ||
prev_deltas[idxs] = deltas | ||
prev_deltas = np.repeat(prev_deltas.reshape(1, -1), n_row, axis=0) | ||
return prev_deltas, prev_time | ||
|
||
|
||
def prophet_logistic_uncertainty( | ||
mat: np.ndarray, | ||
deltas: np.ndarray, | ||
prophet_obj: Prophet, | ||
cap_scaled: np.ndarray, | ||
t_time: np.ndarray, | ||
): | ||
""" | ||
Vectorizes prophet's logistic growth uncertainty by creating a matrix of future possible trends. | ||
""" | ||
|
||
def ffill(arr): | ||
mask = arr == 0 | ||
idx = np.where(~mask, np.arange(mask.shape[1]), 0) | ||
np.maximum.accumulate(idx, axis=1, out=idx) | ||
return arr[np.arange(idx.shape[0])[:, None], idx] | ||
|
||
k = prophet_obj.params["k"][0] | ||
m = prophet_obj.params["m"][0] | ||
n_length = len(t_time) | ||
# for logistic growth we need to evaluate the trend all the way from the start of the train item | ||
historical_mat, historical_time = _make_historical_mat_time(deltas, prophet_obj.changepoints_t, t_time, len(mat)) | ||
mat = np.concatenate([historical_mat, mat], axis=1) | ||
full_t_time = np.concatenate([historical_time, t_time]) | ||
|
||
# apply logistic growth logic on the slope changes | ||
k_cum = np.concatenate((np.ones((mat.shape[0], 1)) * k, np.where(mat, np.cumsum(mat, axis=1) + k, 0)), axis=1) | ||
k_cum_b = ffill(k_cum) | ||
gammas = np.zeros_like(mat) | ||
for i in range(mat.shape[1]): | ||
x = full_t_time[i] - m - np.sum(gammas[:, :i], axis=1) | ||
ks = 1 - k_cum_b[:, i] / k_cum_b[:, i + 1] | ||
gammas[:, i] = x * ks | ||
# the data before the -n_length is the historical values, which are not needed, so cut the last n_length | ||
k_t = (mat.cumsum(axis=1) + k)[:, -n_length:] | ||
m_t = (gammas.cumsum(axis=1) + m)[:, -n_length:] | ||
sample_trends = cap_scaled / (1 + np.exp(-k_t * (t_time - m_t))) | ||
# remove the mean because we only need width of the uncertainty centered around 0 | ||
# we will add the width to the main forecast - yhat (which is the mean) - later | ||
sample_trends = sample_trends - sample_trends.mean(axis=0) | ||
return sample_trends | ||
|
||
|
||
def _make_trend_shift_matrix(mean_delta: float, likelihood: float, future_length: float, k: int = 10000) -> np.ndarray: | ||
""" | ||
Creates a matrix of random trend shifts based on historical likelihood and size of shifts. | ||
Can be used for either linear or logistic trend shifts. | ||
Each row represents a different sample of a possible future, and each column is a time step into the future. | ||
""" | ||
# create a bool matrix of where these trend shifts should go | ||
bool_slope_change = np.random.uniform(size=(k, future_length)) < likelihood | ||
shift_values = np.random.laplace(0, mean_delta, size=bool_slope_change.shape) | ||
mat = shift_values * bool_slope_change | ||
n_mat = np.hstack([np.zeros((len(mat), 1)), mat])[:, :-1] | ||
mat = (n_mat + mat) / 2 | ||
return mat | ||
|
||
|
||
def add_prophet_uncertainty( | ||
prophet_obj: Prophet, | ||
forecast_df: pd.DataFrame, | ||
using_train_df: bool = False, | ||
): | ||
""" | ||
Adds yhat_upper and yhat_lower to the forecast_df used by fbprophet, based on the params of a trained prophet_obj | ||
and the interval_width. | ||
Use using_train_df=True if the forecast_df is not for a future time but for the training data. | ||
""" | ||
assert prophet_obj.history is not None, "Model has not been fit" | ||
assert "yhat" in forecast_df.columns, "Must have the mean yhat forecast to build uncertainty on" | ||
interval_width = prophet_obj.interval_width | ||
|
||
if using_train_df: # there is no trend-based uncertainty if we're only looking on the past where trend is known | ||
sample_trends = np.zeros(10000, len(forecast_df)) | ||
else: # create samples of possible future trends | ||
future_time_series = ((forecast_df["ds"] - prophet_obj.start) / prophet_obj.t_scale).values | ||
single_diff = np.diff(future_time_series).mean() | ||
change_likelihood = len(prophet_obj.changepoints_t) * single_diff | ||
deltas = prophet_obj.params["delta"][0] | ||
n_length = len(forecast_df) | ||
mean_delta = np.mean(np.abs(deltas)) + 1e-8 | ||
if prophet_obj.growth == "linear": | ||
mat = _make_trend_shift_matrix(mean_delta, change_likelihood, n_length, k=10000) | ||
sample_trends = mat.cumsum(axis=1).cumsum(axis=1) # from slope changes to actual values | ||
sample_trends = sample_trends * single_diff # scaled by the actual meaning of the slope | ||
elif prophet_obj.growth == "logistic": | ||
mat = _make_trend_shift_matrix(mean_delta, change_likelihood, n_length, k=1000) | ||
cap_scaled = (forecast_df["cap"] / prophet_obj.y_scale).values | ||
sample_trends = prophet_logistic_uncertainty(mat, deltas, prophet_obj, cap_scaled, future_time_series) | ||
else: | ||
raise NotImplementedError | ||
|
||
# add gaussian noise based on historical levels | ||
sigma = prophet_obj.params["sigma_obs"][0] | ||
historical_variance = np.random.normal(scale=sigma, size=sample_trends.shape) | ||
full_samples = sample_trends + historical_variance | ||
# get quantiles and scale back (prophet scales the data before fitting, so sigma and deltas are scaled) | ||
width_split = (1 - interval_width) / 2 | ||
quantiles = np.array([width_split, 1 - width_split]) * 100 # get quantiles from width | ||
quantiles = np.percentile(full_samples, quantiles, axis=0) | ||
# Prophet scales all the data before fitting and predicting, y_scale re-scales it to original values | ||
quantiles = quantiles * prophet_obj.y_scale | ||
|
||
forecast_df["yhat_lower"] = forecast_df.yhat + quantiles[0] | ||
forecast_df["yhat_upper"] = forecast_df.yhat + quantiles[1] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
#date: 2021-09-01T13:24:25Z | ||
#url: https://api.github.com/gists/38930b040f206e59845fbc30abf8d3df | ||
#owner: https://api.github.com/users/orenmatar | ||
|
||
sigma = prophet_obj.params["sigma_obs"][0] | ||
historical_variance = np.random.normal(scale=sigma, size=sample_trends.shape) | ||
full_samples = sample_trends + historical_variance |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
#date: 2021-09-01T13:26:54Z | ||
#url: https://api.github.com/gists/0e40479755cd580df01b3be552ea20b2 | ||
#owner: https://api.github.com/users/rodrigogarces | ||
|
||
watch -n.1 "grep \"^[c]pu MHz\" /proc/cpuinfo" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,105 @@ | ||
#date: 2021-09-01T13:22:19Z | ||
#url: https://api.github.com/gists/22f7dffeeee7db816ef4236852e02cd8 | ||
#owner: https://api.github.com/users/xitedemon | ||
|
||
#!/bin/bash | ||
|
||
# The following steps, which were tested on Ubuntu 18.04 LTS and on the Ubuntu-powered Linux for Windows Subsystem on Windows, | ||
# will: | ||
# | ||
# * Compile a recent version of OpenSSL (you can skip this step and use your package maintainer's version if you prefer, but you | ||
# might have to tweak a few bits) | ||
# * Create a separate set of configuration files suitable for configuring a basic CA capable of signing EV certificates | ||
# * Create such a CA (hackerca.local / HackerCA EV Root CA) | ||
# * Create a certificate request for a site, hackersite.local, belonging to company "Barclays PLC [GB]" | ||
# * Create that certificate, signing it as the CA and attaching the additional data required of an EV certificate belonging | ||
# to that company | ||
# | ||
# To replicate my experiment, all that remains for you to do is: | ||
# * Install ca.crt (the CA's root certificate) into your operating system or browser's certificate store (and mark it as trusted, | ||
# if necessary). Also: add the CA's OID (I'm using 2.16.840.1.114028.10.1.2) to the expected OIDs (in Windows, this can be found | ||
# in Certificate Manager by right-clicking the certificate, clicking Properties, then the Extended Validation tab; compare to | ||
# a known EV-capable CA's record if you need a clue) | ||
# This represents a step that can be automated by a network administrator on a corporate network | ||
# * Update your hosts file with e.g. "127.0.0.1 hackersite.local" so that requests come to your site | ||
# * Either set up a webserver using SSL key website.key and certiciate website.crt or else just use OpenSSL's "s_server" and | ||
# "-www" switches (as described at the very bottom) to set up a very basic server | ||
# * Visit https://hackersite.local/ in your web browser | ||
# | ||
# My results - | ||
# * Internet Explorer 11 and Edge 17 show the full company name - they fall for the spoofing | ||
# * Firefox, Chrome, Opera, and Safari (both MacOS and iOS) all refrain from showing the company name in this instance (although | ||
# they still allow the connection - we REALLY need some kind of 'require-ev' flag; more ideas on that in a future post!); note | ||
# that this is true even where the browser shares Windows' certificate store! | ||
|
||
# Install OpenSSL (I'm using 1.1.1pre9) | ||
wget https://www.openssl.org/source/openssl-1.1.1-pre9.tar.gz | ||
tar xzf openssl-1.1.1-pre9.tar.gz | ||
rm openssl-1.1.1-pre9.tar.gz | ||
cd openssl-1.1.1-pre9 | ||
./config --prefix=/usr/local --openssldir=/usr/local -Wl,--enable-new-dtags,-rpath,'$(LIBRPATH)' | ||
make | ||
make test | ||
sudo make install | ||
openssl version # should report e.g. "OpenSSL 1.1.1-pre9 (beta) 21 Aug 2018" | ||
rm -rf openssl-1.1.1-pre9 | ||
|
||
# Generate CA private key (will ask for password) | ||
openssl genrsa -aes256 -out ca.key 4096 | ||
|
||
# Make a copy of openssl configuration and add our own optional sections with CA extensions | ||
# Note - 2.16.840.1.114028.10.1.2 is Entrust EV CPS, we're "borrowing" their OID (https://en.wikipedia.org/wiki/Extended_Validation_Certificate#Extended_Validation_certificate_identification) | ||
# 2.23.140.1.1 is the Extended Validation Guidelines (https://cabforum.org/object-registry/#Object-Registry-of-the-CA-Browser-Forum) | ||
cp /usr/local/openssl.cnf openssl.cnf | ||
printf " | ||
[danq_ca_ext] | ||
subjectKeyIdentifier=hash | ||
authorityKeyIdentifier=keyid:always,issuer | ||
basicConstraints=critical,CA:true | ||
keyUsage=critical,digitalSignature,cRLSign,keyCertSign | ||
[new_oids] | ||
trustList=2.16.840.1.113730.1.900 | ||
# these four are already defined in my OpenSSL, but they're here for if you're using an older version: | ||
#businessCategory=2.5.4.15 | ||
#jurisdictionOfIncorporationLocalityName=1.3.6.1.4.1.311.60.2.1.1 | ||
#jurisdictionOfIncorporationStateOrProvinceName=1.3.6.1.4.1.311.60.2.1.2 | ||
#jurisdictionOfIncorporationCountryName=1.3.6.1.4.1.311.60.2.1.3 | ||
" >> openssl.cnf | ||
|
||
# Create a configuration file with EV certificate extensions | ||
printf " | ||
[danq_website_ext] | ||
#trustList=ASN1:UTF8String:https://mytestdomain.local/EVTrustList.etl | ||
subjectKeyIdentifier=hash | ||
authorityKeyIdentifier=keyid:always,issuer | ||
keyUsage=critical,digitalSignature,keyEncipherment | ||
extendedKeyUsage=serverAuth,clientAuth | ||
authorityInfoAccess=OCSP;URI:http://ocsp.hackerca.local/ | ||
authorityInfoAccess=caIssuers;URI:http://hackerca.local/ca.html | ||
crlDistributionPoints=URI:http://ocsp.hackerca.local/ca.crl | ||
basicConstraints=critical,CA:false | ||
certificatePolicies=@entrust,2.23.140.1.1 | ||
subjectAltName=DNS:hackersite.local | ||
[entrust] | ||
policyIdentifier=2.16.840.1.114028.10.1.2 | ||
CPS.1=http://hackerca.local/rpa | ||
" > extensions.cnf | ||
|
||
# Make serial number incrementer file, must have even number of digits | ||
printf "012345" > ca.srl | ||
|
||
# Generate CA root certificate signed with the CA key | ||
OPENSSL_CONF=openssl.cnf openssl req -new -x509 -key ca.key -out ca.crt -days 3650 -set_serial 0 -subj "/C=GB/O=HackerCA/OU=hackerca.local/CN=HackerCA EV Root CA" -extensions danq_ca_ext | ||
|
||
# Generate website key (as this is only an experimental key, a 30-day duration is plenty sufficient | ||
# We'll be generating a certificate that spoofs Barclays PLC, a major UK bank - there's nothing special about them; just a random pick | ||
openssl req -new -keyout website.key -out website.csr -days 30 -subj "/C=GB/ST=London/L=London/jurisdictionC=GB/O=Barclays PLC/businessCategory=Private Organization/OU=Web and Infrastructure Services/CN=hackersite.local" | ||
openssl rsa -in website.key -out website.key | ||
|
||
# Sign the website's CSR and provide a certificate with all the relevant EV extensions | ||
OPENSSL_CONF=openssl.cnf openssl x509 -req -in website.csr -out website.crt -CAkey ca.key -CA ca.crt -days 30 -trustout -addtrust clientAuth -addtrust serverAuth -extfile extensions.cnf -extensions danq_website_ext | ||
|
||
# Launch openssl webserver (on port 443, hence sudo) | ||
sudo openssl s_server -accept 443 -cert website.crt -key website.key -www |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
#date: 2021-09-01T13:25:02Z | ||
#url: https://api.github.com/gists/42cf00a0eca2bb0f20713601039e1028 | ||
#owner: https://api.github.com/users/orenmatar | ||
|
||
quantiles = np.array([10, 90]) | ||
quantiles = np.percentile(full_samples, quantiles, axis=0) | ||
forecast_df["yhat_lower"] = forecast_df.yhat + quantiles[0] | ||
forecast_df["yhat_upper"] = forecast_df.yhat + quantiles[1] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
#date: 2021-09-01T13:22:26Z | ||
#url: https://api.github.com/gists/6d40dea794aab1bcb912de794a4021f1 | ||
#owner: https://api.github.com/users/orenmatar | ||
|
||
sample_trends = matrix.cumsum(axis=1).cumsum(axis=1) | ||
sample_trends = sample_trends * single_diff |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
#date: 2021-09-01T13:27:36Z | ||
#url: https://api.github.com/gists/4cc12709f36b061219bc4790ea663389 | ||
#owner: https://api.github.com/users/orenmatar | ||
|
||
n_mat = np.hstack([np.zeros((len(matrix), 1)), matrix])[:, :-1] # elements moved by one, and zeros at the start | ||
matrix = (n_mat + matrix) / 2 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.