Skip to content

Commit

Permalink
Merge pull request #84 from slacgismo/hotfix/pvlib
Browse files Browse the repository at this point in the history
Hotfix/pvlib
  • Loading branch information
Thistleman committed Jul 12, 2022
2 parents 6059a24 + cb68683 commit 53acd19
Show file tree
Hide file tree
Showing 9 changed files with 34 additions and 28 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ jobs:
conda config --set anaconda_upload no
echo yes | anaconda login --username ${{ secrets.ANACONDA_CLOUD_USERNAME }} --password ${{ secrets.ANACONDA_CLOUD_PASSWORD }}
git fetch --prune --unshallow --tags
VERSION_FROM_GIT_TAG=$(git tag --list "v*[0-9]" --sort=version:refname | tail -1 | cut -c 2-) conda build . -c conda-forge -c slacgismo --numpy 1.22.0
VERSION_FROM_GIT_TAG=$(git tag --list "v*[0-9]" --sort=version:refname | tail -1 | cut -c 2-) conda build . -c anaconda -c pvlib -c slacgismo -c conda-forge --numpy 1.22.0
echo '::set-output name=gitversion::$(git tag --list "v*[0-9]" --sort=version:refname | tail -1 | cut -c 2-)'
- name: Upload the Anaconda Package
Expand Down
12 changes: 7 additions & 5 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@ jobs:

- name: Install Dependencies
run: |
sudo pip install -r requirements.txt
sudo pip install awscli coverage
sudo $pythonLocation/bin/python3 -m pip install -r requirements.txt
sudo $pythonLocation/bin/python3 -m pip install awscli coverage
$pythonLocation/bin/python3 -m pip list
- name: Setup Mosek License File
run: |
Expand All @@ -33,7 +34,7 @@ jobs:
# Current unit test is not consistent. Occasionally fails despite usually passing. Needs to be fixed.
- name: Run Unit Tests
run: sudo coverage run -m unittest
run: sudo $pythonLocation/bin/python3 -m coverage run -m unittest

build-pypi:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -89,5 +90,6 @@ jobs:
conda install anaconda-client
conda config --set anaconda_upload no
git fetch --prune --unshallow --tags
VERSION_FROM_GIT_TAG=$(git tag --list "v*[0-9]" --sort=version:refname | tail -1 | cut -c 2-)test conda build . -c conda-forge -c slacgismo --numpy 1.22.0
echo '::set-output name=gitversion::$(git tag --list "v*[0-9]" --sort=version:refname | tail -1 | cut -c 2-)'
VERSION_FROM_GIT_TAG=$(git tag --list "v*[0-9]" --sort=version:refname | tail -1 | cut -c 2-)test conda build . -c anaconda -c pvlib -c slacgismo -c conda-forge --numpy 1.22.0
echo '::set-output name=gitversion::$(git tag --list "v*[0-9]" --sort=version:refname | tail -1 | cut -c 2-)'
2 changes: 1 addition & 1 deletion conda_recipe/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ requirements:
- seaborn
- requests
- scikit-learn
- pvlib-python
- pvlib
- cvxpy
- statistical-clear-sky
- pv-system-profiler
Expand Down
11 changes: 6 additions & 5 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,13 @@
["git", "tag", "--list", "v*[0-9]", "--sort=version:refname"],
stdout=subprocess.PIPE,
)
tags = git_tags.stdout.read()
git_tags.stdout.close()
tags = tags.decode("utf-8").split("\n")
tags.sort()

gt_decoded = git_tags.communicate()[0].decode()
tag_list = gt_decoded.split("\n")
tag_list.sort()

VERSION_FROM_GIT_TAG = tag_list[-1][1:]
# PEP 440 won't accept the v in front, so here we remove it, strip the new line and decode the byte stream
VERSION_FROM_GIT_TAG = tags[-1][1:]

with open((here / "requirements.txt"), encoding="utf-8") as f:
install_requires = f.read().splitlines()
Expand Down
4 changes: 2 additions & 2 deletions solardatatools/algorithms/shade.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ def transform_data(self, power=8):
agg_by_azimuth = pd.DataFrame(
data=normalized.T,
index=np.arange(normalized.shape[1]),
columns=np.linspace(0, 1, 2 ** power),
columns=np.linspace(0, 1, 2**power),
)
agg_by_azimuth["delta"] = my_round(
delta_cooper(self.dh.day_index.dayofyear.values), 1
Expand Down Expand Up @@ -399,7 +399,7 @@ def batch_process(data, mask, power=8, scale=None):
"""
if scale is None:
scale = 1
N = 2 ** power
N = 2**power
output = np.zeros((N, data.shape[1]))
xs_new = np.linspace(0, 1, N)
for col_ix in range(data.shape[1]):
Expand Down
2 changes: 1 addition & 1 deletion solardatatools/circular_statistics.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def rayleightest(data, axis=None, weights=None):
tmp = (
1.0
+ (2.0 * z - z * z) / (4.0 * n)
- (24.0 * z - 132.0 * z ** 2.0 + 76.0 * z ** 3.0 - 9.0 * z ** 4.0)
- (24.0 * z - 132.0 * z**2.0 + 76.0 * z**3.0 - 9.0 * z**4.0)
/ (288.0 * n * n)
)

Expand Down
22 changes: 13 additions & 9 deletions solardatatools/data_quality.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,20 +9,22 @@
from scipy.stats import mode
from solardatatools.signal_decompositions import tl1_l2d2p365


def make_quality_flags(
density_scores,
linearity_scores,
density_lower_threshold=0.6,
density_upper_threshold=1.05,
linearity_threshold=0.1
linearity_threshold=0.1,
):
density_flags = np.logical_and(
density_scores > density_lower_threshold,
density_scores < density_upper_threshold
density_scores < density_upper_threshold,
)
linearity_flags = linearity_scores < linearity_threshold
return density_flags, linearity_flags


def make_density_scores(
data_matrix,
threshold=0.2,
Expand Down Expand Up @@ -50,35 +52,37 @@ def make_density_scores(
out = tuple(out)
return out


def make_linearity_scores(data_matrix, capacity, density_baseline):
temp_mat = np.copy(data_matrix)
temp_mat[temp_mat < 0.005 * capacity] = np.nan
difference_mat = np.round(temp_mat[1:] - temp_mat[:-1], 4)
modes, counts = mode(difference_mat, axis=0, nan_policy="omit")
n = data_matrix.shape[0] - 1
linearity_scores = counts.data.squeeze() / (
n * density_baseline
)
linearity_scores = counts.data.squeeze() / (n * density_baseline)
# Label detected infill points with a boolean mask
infill = np.zeros_like(data_matrix, dtype=bool)
slct = linearity_scores >= 0.1
reference_diffs = np.tile(modes[0][slct], (data_matrix.shape[0], 1))
found_infill = np.logical_or(
np.isclose(
np.r_[np.zeros(data_matrix.shape[1]).reshape((1, -1)),
difference_mat][:, slct],
np.r_[np.zeros(data_matrix.shape[1]).reshape((1, -1)), difference_mat][
:, slct
],
reference_diffs,
),
np.isclose(
np.r_[difference_mat,
np.zeros(data_matrix.shape[1]).reshape((1, -1))][:, slct],
np.r_[difference_mat, np.zeros(data_matrix.shape[1]).reshape((1, -1))][
:, slct
],
reference_diffs,
),
)
infill[:, slct] = found_infill
infill_mask = infill
return linearity_scores, infill_mask


def daily_missing_data_simple(data_matrix, threshold=0.2, return_density_signal=False):
"""
This function takes a PV power data matrix and returns a boolean array,
Expand Down
5 changes: 2 additions & 3 deletions solardatatools/dataio.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,8 @@ def get_pvdaq_data(sysid=2, api_key="DEMO_KEY", year=2011, delim=",", standardiz
# concatenate the list of yearly data frames
df = pd.concat(df_list, axis=0, sort=True)
if standardize:
print('\n')
df, _ = standardize_time_axis(df, datetimekey='Date-Time',
timeindex=False)
print("\n")
df, _ = standardize_time_axis(df, datetimekey="Date-Time", timeindex=False)
return df


Expand Down
2 changes: 1 addition & 1 deletion solardatatools/signal_decompositions.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ def tl1_l1d1_l2d2p365(
)
+ c1 * cvx.norm1(cvx.multiply(tv_weights, cvx.diff(s_hat, k=1)))
+ c2 * cvx.norm(cvx.diff(s_seas, k=2))
+ c3 * beta ** 2
+ c3 * beta**2
)
constraints = [
signal[use_ixs] == s_hat[use_ixs] + s_seas[:n][use_ixs] + s_error[use_ixs],
Expand Down

0 comments on commit 53acd19

Please sign in to comment.