Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion macrobond_data_api/com/_metadata_directory.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
try:
from pywintypes import TimeType
except ImportError as ex_:
...
pass

if TYPE_CHECKING: # pragma: no cover
from .com_types.connection import Connection
Expand Down
2 changes: 1 addition & 1 deletion macrobond_data_api/com/com_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
# winreg is not available on linux so mypy will fail on build server as it is runiong on linux
from winreg import OpenKey, QueryValueEx, HKEY_CLASSES_ROOT, HKEY_CURRENT_USER # type: ignore
except ImportError:
...
pass


def _test_regedit_assembly() -> Optional[str]:
Expand Down
4 changes: 2 additions & 2 deletions macrobond_data_api/common/types/_repr_html_sequence.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@ def __init__(self, items: Sequence[_TypeVar]) -> None:

@overload
def __getitem__(self, i: int) -> _TypeVar:
...
pass

@overload
def __getitem__(self, s: slice) -> Sequence[_TypeVar]:
...
pass

def __getitem__(self, key): # type: ignore
return _ReprHtmlSequence(self.items[key]) if isinstance(key, slice) else self.items[key]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,11 +64,11 @@ def _repr_html_(self) -> str:

@overload
def __getitem__(self, i: int) -> VintageSeries:
...
pass

@overload
def __getitem__(self, s: slice) -> Sequence[VintageSeries]:
...
pass

def __getitem__(self, key): # type: ignore
return self.series[key]
Expand Down
4 changes: 2 additions & 2 deletions macrobond_data_api/common/types/metadata_value_information.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,11 +111,11 @@ def to_dict(self) -> List[TypedDictMetadataValueInformationItem]:

@overload
def __getitem__(self, i: int) -> MetadataValueInformationItem:
...
pass

@overload
def __getitem__(self, s: slice) -> List[MetadataValueInformationItem]:
...
pass

def __getitem__(self, key): # type: ignore
return self.entities[key]
Expand Down
4 changes: 2 additions & 2 deletions macrobond_data_api/common/types/search_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,11 @@ def _repr_html_(self) -> str:

@overload
def __getitem__(self, i: int) -> "Metadata":
...
pass

@overload
def __getitem__(self, s: slice) -> Sequence["Metadata"]:
...
pass

def __getitem__(self, key): # type: ignore
return self.entities[key]
Expand Down
4 changes: 2 additions & 2 deletions macrobond_data_api/common/types/search_result_long.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,11 @@ def _repr_html_(self) -> str:

@overload
def __getitem__(self, i: int) -> str:
...
pass

@overload
def __getitem__(self, s: slice) -> List[str]:
...
pass

def __getitem__(self, key): # type: ignore
return self.entities[key]
Expand Down
4 changes: 2 additions & 2 deletions macrobond_data_api/common/types/start_or_end_point.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,12 +86,12 @@ def point_in_time(
mm: int = None, # pylint: disable = invalid-name
dd: int = None, # pylint: disable = invalid-name
) -> "StartOrEndPoint":
...
pass

@overload
@staticmethod
def point_in_time(yyyy_or_datetime: datetime) -> "StartOrEndPoint":
...
pass

@staticmethod
def point_in_time(
Expand Down
4 changes: 2 additions & 2 deletions macrobond_data_api/common/types/unified_series.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,11 +146,11 @@ def _repr_html_(self) -> str:

@overload
def __getitem__(self, i: int) -> UnifiedSeries:
...
pass

@overload
def __getitem__(self, s: slice) -> List[UnifiedSeries]:
...
pass

def __getitem__(self, key): # type: ignore
return self.series[key]
Expand Down
2 changes: 1 addition & 1 deletion macrobond_data_api/util/transfer_performance_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def run_integrity_tests(self, indicator: bool, times: int) -> None:
for i in range(0, times):
result = _Result.run_integrity_test(self.size_kB, i)
if result.error is not None:
...
pass
# print(f" Error: {str(result.error)} ", end="", flush=True)
elif indicator:
print(".", end="", flush=True)
Expand Down
7 changes: 3 additions & 4 deletions macrobond_data_api/web/_web_api_revision.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@
from macrobond_data_api.common.types._repr_html_sequence import _ReprHtmlSequence
from ._split_in_to_chunks import split_in_to_chunks

from .session import ProblemDetailsException, Session, _raise_on_error

from .session import ProblemDetailsException, Session

if TYPE_CHECKING: # pragma: no cover
from .web_api import WebApi
Expand Down Expand Up @@ -275,8 +274,8 @@ def get_many_series_with_revisions(
with self.session.series.post_fetch_all_vintage_series(
_create_web_revision_h_request(requests_chunkd), stream=True
) as response:
_raise_on_error(response)
ijson_items = ijson.items(response.raw, "item")
self.session.raise_on_error(response)
ijson_items = ijson.items(self.session.response_to_file_object(response), "item")
item: "SeriesWithVintagesResponse"
for item in ijson_items:
error_code = item.get("errorCode")
Expand Down
56 changes: 46 additions & 10 deletions macrobond_data_api/web/_web_only_api.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
from datetime import datetime
from typing import TYPE_CHECKING, Any, List, Optional, Callable, Tuple
import warnings

import ijson

from macrobond_data_api.common.types import SearchResultLong
from macrobond_data_api.common.types._parse_iso8601 import _parse_iso8601

from .web_types.data_package_list_context import DataPackageListContextManager
from .web_types.data_package_list_state import DataPackageListState
from .web_types.data_pacakge_list_item import DataPackageListItem
from .web_types.data_package_list import DataPackageList
from .web_types.data_package_body import DataPackageBody

from .session import _raise_on_error
from .subscription_list import SubscriptionList

if TYPE_CHECKING: # pragma: no cover
Expand Down Expand Up @@ -91,7 +92,7 @@ def _get_data_package_list_iterative_pars_items(
return True


def get_data_package_list(self: "WebApi", if_modified_since: datetime = None) -> DataPackageList:
def get_data_package_list(self: "WebApi", if_modified_since: Optional[datetime] = None) -> DataPackageList:
# pylint: disable=line-too-long
"""
Get the items in the data package.
Expand Down Expand Up @@ -123,11 +124,12 @@ def get_data_package_list_iterative(
self: "WebApi",
body_callback: Callable[[DataPackageBody], Optional[bool]],
items_callback: Callable[[DataPackageBody, List[DataPackageListItem]], Optional[bool]],
if_modified_since: datetime = None,
if_modified_since: Optional[datetime] = None,
buffer_size: int = 200,
) -> Optional[DataPackageBody]:
# pylint: disable=line-too-long
"""
.. Important:: This method is deprecated. Use `macrobond_data_api.web.web_api.WebApi.get_data_package_list_chunked` instead.
Process the data package list in batches.
This is more efficient since the complete list does not have to be in memory.

Expand All @@ -142,26 +144,31 @@ def get_data_package_list_iterative(
items_callback : Callable[[macrobond_data_api.web.web_types.data_package_body.DataPackageBody, List[macrobond_data_api.web.web_types.data_pacakge_list_item.DataPackageListItem]], Optional[bool]]
The callback for each batch of items. Return True to continue processing.

if_modified_since : datetime
if_modified_since : datetime, optional
The timestamp of the property time_stamp_for_if_modified_since from the response of the previous call.
If not specified, all items will be returned.

buffer_size : int
buffer_size : int, optional
The maximum number of items to include in each callback
Returns
-------
`macrobond_data_api.web.web_types.data_package_body.DataPackageBody`
"""
# pylint: enable=line-too-long
warnings.warn(
"get_data_package_list_iterative is deprecated. Use get_data_package_list_chunked instead.",
DeprecationWarning,
2,
)

params = {}
body: Optional[DataPackageBody] = None

if if_modified_since:
params["ifModifiedSince"] = if_modified_since.isoformat()

with self._session.get("v1/series/getdatapackagelist", params=params, stream=True) as response:
_raise_on_error(response)
ijson_parse = ijson.parse(response.raw)
with self._session.get_or_raise("v1/series/getdatapackagelist", params=params, stream=True) as response:
ijson_parse = ijson.parse(self.session.response_to_file_object(response))

(
time_stamp_for_if_modified_since,
Expand All @@ -186,12 +193,41 @@ def get_data_package_list_iterative(
return body


def get_data_package_list_chunked(
self: "WebApi", if_modified_since: Optional[datetime] = None, chunk_size: int = 200
) -> DataPackageListContextManager:
# pylint: disable=line-too-long
"""
Process the data package list in chunks.
This is more efficient since the complete list does not have to be in memory and it can be processed while
downloading.

Typically you want to pass the date of time_stamp_for_if_modified_since from response of the previous call
to get incremental updates.

Parameters
----------
if_modified_since : datetime, optional
The timestamp of the property time_stamp_for_if_modified_since from the response of the previous call.
If not specified, all items will be returned.

chunk_size : int, optional
The maximum number of items to include in each List in DataPackageListContext.items
Returns
-------
`macrobond_data_api.web.web_types.data_package_list_context.DataPackageListContextManager`
"""
# pylint: enable=line-too-long
return DataPackageListContextManager(if_modified_since, chunk_size, self)


# Search


def entity_search_multi_filter_long(
self: "WebApi", *filters: "SearchFilter", include_discontinued: bool = False
) -> SearchResultLong:
# pylint: disable=line-too-long
"""
Search for time series and other entitites.
This call can return more results than `macrobond_data_api.common.api.Api.entity_search_multi_filter`,
Expand All @@ -204,12 +240,12 @@ def entity_search_multi_filter_long(
----------
*filters : `macrobond_data_api.common.types.search_filter.SearchFilter`
One or more search filters.
include_discontinued : bool
include_discontinued : bool, optional
Set this value to True in order to include discontinued entities in the search.

Returns
-------
`macrobond_data_api.common.types.search_result_long.SearchResultLong`
A `macrobond_data_api.common.types.search_result_long.SearchResultLong` object containing the names of the entities that match the search filters.
"""

def convert_filter_to_web_filter(_filter: "SearchFilter") -> "WebSearchFilter":
Expand Down
Loading