Skip to content

Commit

Permalink
feat!: update code to work with updated pi backend
Browse files Browse the repository at this point in the history
  • Loading branch information
Asgmel03 committed Sep 11, 2024
1 parent 2a6a794 commit 0b0316d
Show file tree
Hide file tree
Showing 2 changed files with 55 additions and 33 deletions.
65 changes: 44 additions & 21 deletions tagreader/web_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

from tagreader.cache import BucketCache, SmartCache
from tagreader.logger import logger
from tagreader.utils import ReaderType, is_mac, is_windows, urljoin
from tagreader.utils import IMSType, ReaderType, is_mac, is_windows, urljoin


class MD4:
Expand Down Expand Up @@ -60,7 +60,7 @@ def get_auth_pi() -> HTTPKerberosAuth:


def get_url_pi() -> str:
return r"https://piwebapi.equinor.com/piwebapi"
return r"https://piwebapi-qa.equinor.com/piwebapi"


def get_auth_aspen(use_internal: bool = True):
Expand Down Expand Up @@ -144,6 +144,33 @@ def list_piwebapi_sources(
logger.error(f"Could not decode JSON response: {e}")


def get_piwebapi_source_to_webid_dict(
url: Optional[str] = None,
auth: Optional[Any] = None,
verify_ssl: Optional[bool] = True,
) -> List[str]:
if url is None:
url = get_url_pi()

if auth is None:
auth = get_auth_pi()

if verify_ssl is None:
verify_ssl = get_verify_ssl()

if verify_ssl is False:
urllib3.disable_warnings(InsecureRequestWarning)

url_ = urljoin(url, "dataservers")
res = requests.get(url_, auth=auth, verify=verify_ssl)

res.raise_for_status()
try:
return {item["Name"]: item["WebId"] for item in res.json()["Items"]}
except JSONDecodeError as e:
logger.error(f"Could not decode JSON response: {e}")


class BaseHandlerWeb(ABC):
def __init__(
self,
Expand Down Expand Up @@ -741,22 +768,23 @@ def escape(s: str) -> str:
)

@staticmethod
def generate_search_query(
def generate_search_params(
tag: Optional[str],
desc: Optional[str],
datasource: Optional[str],
max: Optional[int] = 100,
) -> Dict[str, str]:
q = []
if tag is not None:
q.extend([f"name:{PIHandlerWeb.escape(tag)}"])
if desc is not None:
q.extend([f"description:{PIHandlerWeb.escape(desc)}"])
query = " AND ".join(q)
params = {"q": f"{query}"}
params = {"query": f"{query}"}

if datasource is not None:
params["scope"] = f"pi:{datasource}"
params["dataserverwebid"] = (
f"{get_piwebapi_source_to_webid_dict()[datasource]}"
)

return params

Expand Down Expand Up @@ -866,23 +894,23 @@ def search(
timeout: Optional[int] = None,
return_desc: bool = True,
) -> Union[List[Tuple[str, str]], List[str]]:
params = self.generate_search_query(
params = self.generate_search_params(
tag=tag, desc=desc, datasource=self.datasource
)
url = urljoin(self.base_url, "search", "query")
url = urljoin(self.base_url, "points", "search")
done = False
ret = []
while not done:
data = self.fetch(url, params=params, timeout=timeout)

for item in data["Items"]:
description = item["Description"] if "Description" in item else ""
description = item["Descriptor"] if "Descriptor" in item else ""
ret.append((item["Name"], description))
next_start = int(data["Links"]["Next"].split("=")[-1])
if int(data["Links"]["Last"].split("=")[-1]) >= next_start:
params["start"] = next_start # noqa
else:
done = True
# next_start = int(data["Links"]["Next"].split("=")[-1])
# if int(data["Links"]["Last"].split("=")[-1]) >= next_start:
# params["start"] = next_start # noqa
# else:
done = True

if not return_desc:
ret = [x[0] for x in ret]
Expand Down Expand Up @@ -922,17 +950,12 @@ def tag_to_web_id(self, tag: str) -> Optional[str]:
if self.web_id_cache and tag in self.web_id_cache:
return self.web_id_cache[tag]

params = self.generate_search_query(
params = self.generate_search_params(
tag=tag, datasource=self.datasource, desc=None
)
params["fields"] = "name;webid"
url = urljoin(self.base_url, "search", "query")
url = urljoin(self.base_url, "points", "search")
data = self.fetch(url, params=params)

if len(data["Errors"]) > 0:
msg = f"Received error from server when searching for WebId for {tag}: {data['Errors']}"
raise ValueError(msg)

if len(data["Items"]) > 1:
# Compare elements and if same, return the first
first = data["Items"][0]
Expand Down
23 changes: 11 additions & 12 deletions tests/test_PIHandlerREST.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,28 +35,27 @@ def test_escape_chars() -> None:


def test_generate_search_query() -> None:
assert PIHandlerWeb.generate_search_query(
assert PIHandlerWeb.generate_search_params(
tag="SINUSOID", desc=None, datasource=None
) == {"q": "name:SINUSOID"}
assert PIHandlerWeb.generate_search_query(
tag=r"BA:*.1", desc=None, datasource="sourcename"
) == {"query": "name:SINUSOID"}
assert PIHandlerWeb.generate_search_params(
tag=r"BA:*.1", desc=None, datasource=None
) == {
"q": r"name:BA\:*.1",
"scope": "pi:sourcename",
"query": r"name:BA\:*.1",
}
assert PIHandlerWeb.generate_search_query(
assert PIHandlerWeb.generate_search_params(
tag="BA:*.1", datasource=None, desc=None
) == {
"q": r"name:BA\:*.1",
"query": r"name:BA\:*.1",
}
assert PIHandlerWeb.generate_search_query(
assert PIHandlerWeb.generate_search_params(
desc="Concentration Reactor 1", datasource=None, tag=None
) == {
"q": r"description:Concentration\ Reactor\ 1",
"query": r"description:Concentration\ Reactor\ 1",
}
assert PIHandlerWeb.generate_search_query(
assert PIHandlerWeb.generate_search_params(
tag="BA:*.1", desc="Concentration Reactor 1", datasource=None
) == {"q": r"name:BA\:*.1 AND description:Concentration\ Reactor\ 1"}
) == {"query": r"name:BA\:*.1 AND description:Concentration\ Reactor\ 1"}


def test_is_summary(pi_handler: PIHandlerWeb) -> None:
Expand Down

0 comments on commit 0b0316d

Please sign in to comment.