Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Li/fix examples #907

Open
wants to merge 4 commits into
base: mainline
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9,825 changes: 9,825 additions & 0 deletions examples/ImageSearchLocalization/files.csv

Large diffs are not rendered by default.

82 changes: 82 additions & 0 deletions examples/ImageSearchLocalization/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import multiprocessing as mp
from urllib.request import urlretrieve
from pathlib import Path
import os
from functools import partial
from typing import List
import numpy as np
import zipfile
import glob
import subprocess


def download_data(data, download_dir, use_remote=True, in_docker=True, docker_path='http://host.docker.internal:8222/'):
local_dir = download_dir
if not use_remote:
zip_file_url = 'https://marqo-public-datasets.s3.us-east-2.amazonaws.com/demos/ImageSearchLocalisation/images.zip'
zip_file = download_file(zip_file_url, local_dir=local_dir)
extract_zip(zip_file, local_dir=local_dir)
# alternatively you can download the files individually
# downloaded = download_parallel(urls=data['s3_uri'].tolist(), local_dir=local_dir)
locators = glob.glob(local_dir + '*.jpg')

# we start an image server for easier access from within docker
pid = subprocess.Popen(['python3', '-m', 'http.server', '8222', '--directory', local_dir],
stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT)
if in_docker:
locators = [docker_path + os.path.basename(f) for f in locators]
else:
# now we create our documents for indexing - a list of python dicts with keys as field names
locators = data['s3_uri']

return locators


def download_files(urls: str, local_dir: str) -> List[str]:
results = []
N = len(urls)
for ii, url in enumerate(urls):
result = download_file(url, local_dir)
results.append(result)
if ii % 10 == 0:
print(f"{round(100 * (ii + 1) / N, 3)}%")
return results


def download_file(url: str, local_dir: str) -> str:
"""_summary_

Args:
url (str): _description_
local_dir (str): local directory to download to
"""

if not local_dir.endswith('/'): local_dir += '/'

Path(local_dir).mkdir(exist_ok=True, parents=True)

full_local_path = local_dir + os.path.basename(url)

if not os.path.isfile(full_local_path):
full_local_path, _ = urlretrieve(url, full_local_path)

return full_local_path


def download_parallel(urls: List[str], download_to_dir: str, n_processes=8) -> List[str]:
N = len(urls)
print(f"downloading {N} urls to {download_to_dir} using {n_processes} processes")

func = partial(download_files, local_dir=download_to_dir)

urls_split = np.array_split(urls, n_processes)
urls_split = [split.tolist() for split in urls_split]
with mp.Pool(n_processes) as pool:
results = pool.map(func, urls_split)

return results


def extract_zip(zip_file, local_dir):
with zipfile.ZipFile(zip_file, 'r') as zip_ref:
zip_ref.extractall(local_dir)
3 changes: 3 additions & 0 deletions examples/podcast-search/data/podcast_data.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
name,description
"The water crisis in Jackson, Mississippi","What’s happening in Jackson is hardly unique: Cities and states across the US are setting themselves up for failure by postponing expensive but critical work on aging water infrastructure. Climate change is making things worse, faster."
"Your long Covid questions, answered","Millions of people have long Covid; countless more could get it. Dr. Monica Verduzco-Gutierrez answers question from Today, Explained listeners about the condition that has even doctors bewildered."
Loading
Loading