Skip to content

Commit

Permalink
add new files
Browse files Browse the repository at this point in the history
  • Loading branch information
chenxinye committed Jul 9, 2024
1 parent e3fe74f commit 00d0f59
Show file tree
Hide file tree
Showing 46 changed files with 167 additions and 69 deletions.
2 changes: 1 addition & 1 deletion build/lib.linux-x86_64-3.11/fABBA/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from .separate.aggregation import aggregate


__version__ = '1.2.6'
__version__ = '1.2.7'
from .load_datasets import load_images, loadData
from .fabba import (image_compress, image_decompress, ABBAbase, ABBA,
get_patches, patched_reconstruction, fABBA,
Expand Down
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
9 changes: 6 additions & 3 deletions build/lib.linux-x86_64-3.11/fABBA/fabba.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ def symbolsAssign(clusters, alphabet_set=0):
----------
Return:
string (list of string), alphabets(numpy.ndarray): for the
corresponding symbolic sequence and for mapping from symbols to labels or
labels to symbols, repectively.
Expand All @@ -310,8 +310,11 @@ def symbolsAssign(clusters, alphabet_set=0):
'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v',
'w', 'x', 'y', 'z']

elif isinstance(alphabet_set, list) and len(alphabets):
alphabets = alphabet_set
elif isinstance(alphabet_set, list):
if len(clusters) <= len(alphabet_set):
alphabets = alphabet_set
else:
raise ValueError("Please ensure the length of ``alphabet_set`` is greatere than ``clusters``.")

else:
alphabets = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l',
Expand Down
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
17 changes: 8 additions & 9 deletions build/lib.linux-x86_64-3.11/fABBA/jabba/jabba.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,11 @@ def symbolsAssign(clusters, alphabet_set=0):
'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v',
'w', 'x', 'y', 'z']

elif isinstance(alphabet_set, list) and len(alphabets):
alphabets = alphabet_set
elif isinstance(alphabet_set, list):
if len(clusters) <= len(alphabet_set):
alphabets = alphabet_set
else:
raise ValueError("Please ensure the length of ``alphabet_set`` is greatere than ``clusters``.")

else:
alphabets = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l',
Expand Down Expand Up @@ -679,13 +682,13 @@ def piece_to_symbol(self, piece):
def recast_shape(self, reconstruct_list):
"""Reshape the multiarray to the same shape of the input, the shape might be expanded or squeezed."""
size_list = [len(i) for i in reconstruct_list]
fixed_len = self.recap_shape[1] * self.recap_shape[2]
fixed_len = np.prod(self.recap_shape[1:])

if fixed_len > np.max(size_list):
warnings.warn('The reconstructed shape has been expanded.', ShapeWarning)
warnings.warn('The reconstructed shape has been expanded.')

elif fixed_len < np.max(size_list):
warnings.warn('The reconstructed shape has been squeezed.', ShapeWarning)
warnings.warn('The reconstructed shape has been squeezed.')

org_size = len(reconstruct_list)

Expand Down Expand Up @@ -1166,10 +1169,6 @@ def fillna(series, method='ffill'):



class ShapeWarning(EncodingWarning):
pass


def zip_longest(*iterables, fillvalue=None):
# zip_longest('ABCD', 'xy', fillvalue='-') → Ax By C- D-

Expand Down
Binary file not shown.
Binary file not shown.
Binary file modified build/temp.linux-x86_64-3.11/fABBA/extmod/chainApproximation_c.o
Binary file not shown.
Binary file modified build/temp.linux-x86_64-3.11/fABBA/extmod/chainApproximation_cm.o
Binary file not shown.
Binary file modified build/temp.linux-x86_64-3.11/fABBA/extmod/fabba_agg_c.o
Binary file not shown.
Binary file modified build/temp.linux-x86_64-3.11/fABBA/extmod/fabba_agg_cm.o
Binary file not shown.
Binary file modified build/temp.linux-x86_64-3.11/fABBA/extmod/fabba_agg_cm_win.o
Binary file not shown.
Binary file modified build/temp.linux-x86_64-3.11/fABBA/extmod/inverse_tc.o
Binary file not shown.
Binary file modified build/temp.linux-x86_64-3.11/fABBA/jabba/aggmem.o
Binary file not shown.
Binary file modified build/temp.linux-x86_64-3.11/fABBA/jabba/aggwin.o
Binary file not shown.
Binary file modified build/temp.linux-x86_64-3.11/fABBA/jabba/compmem.o
Binary file not shown.
Binary file modified build/temp.linux-x86_64-3.11/fABBA/jabba/inversetc.o
Binary file not shown.
Binary file modified build/temp.linux-x86_64-3.11/fABBA/separate/aggregation_c.o
Binary file not shown.
Binary file modified build/temp.linux-x86_64-3.11/fABBA/separate/aggregation_cm.o
Binary file not shown.
Binary file not shown.
Binary file renamed dist/fABBA-1.2.6.tar.gz → dist/fABBA-1.2.7.tar.gz
Binary file not shown.
2 changes: 1 addition & 1 deletion fABBA.egg-info/PKG-INFO
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: fABBA
Version: 1.2.6
Version: 1.2.7
Summary: An efficient aggregation method for the symbolic representation of temporal data
Home-page: https://github.com/nla-group/fABBA
Author: Xinye Chen, Stefan Güttel
Expand Down
Binary file modified fABBA/__pycache__/__init__.cpython-311.pyc
Binary file not shown.
Binary file modified fABBA/__pycache__/digitization.cpython-311.pyc
Binary file not shown.
Binary file modified fABBA/__pycache__/fabba.cpython-311.pyc
Binary file not shown.
Binary file modified fABBA/__pycache__/load_datasets.cpython-311.pyc
Binary file not shown.
Binary file modified fABBA/extmod/__pycache__/__init__.cpython-311.pyc
Binary file not shown.
Binary file modified fABBA/extmod/fabba_agg_c.cpython-311-x86_64-linux-gnu.so
Binary file not shown.
58 changes: 52 additions & 6 deletions fABBA/jabba/.ipynb_checkpoints/jabba-checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,11 @@ def symbolsAssign(clusters, alphabet_set=0):
'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v',
'w', 'x', 'y', 'z']

elif isinstance(alphabet_set, list) and len(alphabets):
alphabets = alphabet_set
elif isinstance(alphabet_set, list):
if len(clusters) <= len(alphabet_set):
alphabets = alphabet_set
else:
raise ValueError("Please ensure the length of ``alphabet_set`` is greatere than ``clusters``.")

else:
alphabets = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l',
Expand Down Expand Up @@ -201,7 +204,7 @@ class JABBA(object):
Scale the length of compression pieces. The larger the value is, the more important of the length information is.
Therefore, it can solve some problem resulted from peak shift.
auto_digitize - boolean, default=True
auto_digitize - boolean, default=False
Enable auto digitization without prior knowledge of alpha.
Expand Down Expand Up @@ -676,15 +679,33 @@ def piece_to_symbol(self, piece):



def recast_shape(self, reconstruct_list, pad_token=-1):
def recast_shape(self, reconstruct_list):
"""Reshape the multiarray to the same shape of the input, the shape might be expanded or squeezed."""
size_list = [len(i) for i in reconstruct_list]
fixed_len = np.prod(self.recap_shape[1:])

if fixed_len > np.max(size_list):
warnings.warn('The reconstructed shape has been expanded.')

elif fixed_len < np.max(size_list):
warnings.warn('The reconstructed shape has been squeezed.')

org_size = len(reconstruct_list)

if self.recap_shape is not None:
padded = zip(*itertools.zip_longest(*reconstruct_list, fillvalue=pad_token))
reconstruct_list.append(fixed_len * [-1])
pad_token = [np.mean(i) for i in reconstruct_list]
padded = zip(*zip_longest(*reconstruct_list, fillvalue=pad_token))

padded = list(padded)
padded = np.asarray(padded).reshape(self.recap_shape)
padded = np.asarray(padded)
padded = padded[:org_size, :fixed_len].reshape(-1, *self.recap_shape[1:])

else:
print(f"""Please ensure your fitted series (not this function input) is numpy.ndarray type with dimensions > 2.""")

return padded



def string_separation(self, symbols, num_pieces):
Expand Down Expand Up @@ -1144,3 +1165,28 @@ def fillna(series, method='ffill'):
series[np.isnan(series)] = 0

return series




def zip_longest(*iterables, fillvalue=None):
# zip_longest('ABCD', 'xy', fillvalue='-') → Ax By C- D-

iterators = list(map(iter, iterables))
num_active = len(iterators)
if not num_active:
return

while True:
values = []
for i, iterator in enumerate(iterators):
try:
value = next(iterator)
except StopIteration:
num_active -= 1
if not num_active:
return
iterators[i] = itertools.repeat(fillvalue[i])
value = fillvalue[i]
values.append(value)
yield tuple(values)
Binary file modified fABBA/jabba/__pycache__/__init__.cpython-311.pyc
Binary file not shown.
Binary file modified fABBA/jabba/__pycache__/fkmns.cpython-311.pyc
Binary file not shown.
Binary file modified fABBA/jabba/__pycache__/jabba.cpython-311.pyc
Binary file not shown.
Binary file modified fABBA/jabba/__pycache__/preprocessing.cpython-311.pyc
Binary file not shown.
2 changes: 1 addition & 1 deletion fABBA/jabba/jabba.py
Original file line number Diff line number Diff line change
Expand Up @@ -682,7 +682,7 @@ def piece_to_symbol(self, piece):
def recast_shape(self, reconstruct_list):
"""Reshape the multiarray to the same shape of the input, the shape might be expanded or squeezed."""
size_list = [len(i) for i in reconstruct_list]
fixed_len = self.recap_shape[1] * self.recap_shape[2]
fixed_len = np.prod(self.recap_shape[1:])

if fixed_len > np.max(size_list):
warnings.warn('The reconstructed shape has been expanded.')
Expand Down
Binary file modified fABBA/separate/__pycache__/__init__.cpython-311.pyc
Binary file not shown.
Binary file modified img.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
146 changes: 98 additions & 48 deletions instruct.ipynb

Large diffs are not rendered by default.

Binary file modified inverse_img.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit 00d0f59

Please sign in to comment.