Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: hash function param to add file method class #304

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
84 changes: 42 additions & 42 deletions ipfshttpclient/client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@
# This range inclusive-exclusive, so the daemon version must match
# `VERSION_MINIMUM <= version < VERSION_MAXIMUM`
# for it to be considered compatible.
VERSION_MINIMUM = "0.5.0"
VERSION_MINIMUM = "0.5.0"
VERSION_BLACKLIST = []
VERSION_MAXIMUM = "0.9.0"
VERSION_MAXIMUM = "0.11.0"

from . import base
from . import bitswap
Expand All @@ -38,16 +38,16 @@
from . import pin
from . import pubsub
from . import repo
#TODO: `from . import stats`
# TODO: `from . import stats`
from . import swarm
from . import unstable

from .. import encoding, exceptions, http, multipart, utils


def assert_version(version: str, minimum: str = VERSION_MINIMUM,
maximum: str = VERSION_MAXIMUM,
blacklist: ty.Iterable[str] = VERSION_BLACKLIST) -> None:
maximum: str = VERSION_MAXIMUM,
blacklist: ty.Iterable[str] = VERSION_BLACKLIST) -> None:
"""Make sure that the given daemon version is supported by this client
version.

Expand Down Expand Up @@ -83,16 +83,16 @@ def assert_version(version: str, minimum: str = VERSION_MINIMUM,
def connect(
addr: http.addr_t = DEFAULT_ADDR,
base: str = DEFAULT_BASE, *,

chunk_size: int = multipart.default_chunk_size,
offline: bool = False,
session: bool = False,

auth: http.auth_t = None,
cookies: http.cookies_t = None,
headers: http.headers_t = {},
timeout: http.timeout_t = 120,

username: ty.Optional[str] = DEFAULT_USERNAME,
password: ty.Optional[str] = DEFAULT_PASSWORD
):
Expand All @@ -119,10 +119,10 @@ def connect(
auth=auth, cookies=cookies, headers=headers, timeout=timeout,
username=username, password=password,
)

# Query version number from daemon and validate it
assert_version(client.apply_workarounds()["Version"])

return client


Expand Down Expand Up @@ -172,37 +172,39 @@ def __init__(self):
def close(self): # Call this when you're done
self._client.close()
"""

# Fix up docstring so that Sphinx doesn't ignore the constructors parameter list
__doc__ += "\n\n" + "\n".join(l[1:] for l in base.ClientBase.__init__.__doc__.split("\n"))
bitswap = base.SectionProperty(bitswap.Section)
block = base.SectionProperty(block.Section)

bitswap = base.SectionProperty(bitswap.Section)
block = base.SectionProperty(block.Section)
bootstrap = base.SectionProperty(bootstrap.Section)
config = base.SectionProperty(config.Section)
dag = base.SectionProperty(dag.Section)
dht = base.SectionProperty(dht.Section)
key = base.SectionProperty(key.Section)
name = base.SectionProperty(name.Section)
object = base.SectionProperty(object.Section)
pin = base.SectionProperty(pin.Section)
pubsub = base.SectionProperty(pubsub.Section)
repo = base.SectionProperty(repo.Section)
swarm = base.SectionProperty(swarm.Section)
unstable = base.SectionProperty(unstable.Section)


config = base.SectionProperty(config.Section)
dag = base.SectionProperty(dag.Section)
dht = base.SectionProperty(dht.Section)
key = base.SectionProperty(key.Section)
name = base.SectionProperty(name.Section)
object = base.SectionProperty(object.Section)
pin = base.SectionProperty(pin.Section)
pubsub = base.SectionProperty(pubsub.Section)
repo = base.SectionProperty(repo.Section)
swarm = base.SectionProperty(swarm.Section)
unstable = base.SectionProperty(unstable.Section)

######################
# SESSION MANAGEMENT #
######################

def __enter__(self):
self._client.open_session()
return self

def __exit__(self, exc_type, exc_value, traceback):
self.close()


def get_client(self):
return self._client

def close(self):
"""Close any currently open client session and free any associated
resources.
Expand All @@ -215,12 +217,11 @@ def close(self):
in the future. See the class's description for details.
"""
self._client.close_session()



###########
# HELPERS #
###########

def apply_workarounds(self):
"""Query version information of the referenced daemon and enable any
workarounds known for the corresponding version
Expand All @@ -231,13 +232,13 @@ def apply_workarounds(self):
The version information returned by the daemon
"""
version_info = self.version()

version = tuple(map(int, version_info["Version"].split('-', 1)[0].split('.')))

self._workarounds.clear()

return version_info

@utils.return_field('Hash')
@base.returns_single_item(dict)
def add_bytes(self, data: bytes, **kwargs):
Expand All @@ -262,7 +263,7 @@ def add_bytes(self, data: bytes, **kwargs):
"""
body, headers = multipart.stream_bytes(data, chunk_size=self.chunk_size)
return self._client.request('/add', decoder='json',
data=body, headers=headers, **kwargs)
data=body, headers=headers, **kwargs)

@utils.return_field('Hash')
@base.returns_single_item(dict)
Expand All @@ -288,7 +289,7 @@ def add_str(self, string, **kwargs):
"""
body, headers = multipart.stream_text(string, chunk_size=self.chunk_size)
return self._client.request('/add', decoder='json',
data=body, headers=headers, **kwargs)
data=body, headers=headers, **kwargs)

def add_json(self, json_obj, **kwargs):
"""Adds a json-serializable Python dict as a json file to IPFS.
Expand All @@ -309,8 +310,7 @@ def add_json(self, json_obj, **kwargs):
Hash of the added IPFS object
"""
return self.add_bytes(encoding.Json().encode(json_obj), **kwargs)



@base.returns_single_item()
def get_json(self, cid, **kwargs):
"""Loads a json object from IPFS.
Expand All @@ -330,4 +330,4 @@ def get_json(self, cid, **kwargs):
object
Deserialized IPFS JSON object value
"""
return self.cat(cid, decoder='json', **kwargs)
return self.cat(cid, decoder='json', **kwargs)
58 changes: 30 additions & 28 deletions ipfshttpclient/client/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

class Section(base.SectionBase):
"""Manage files in IPFS's virtual “Mutable File System” (MFS) file storage space"""

@base.returns_no_item
def cp(self, source: str, dest: str, **kwargs: base.CommonArgs):
"""Creates a copy of a file within the MFS
Expand Down Expand Up @@ -74,8 +74,8 @@ def ls(self, path: str, **kwargs: base.CommonArgs):
"""
args = (path,)
return self._client.request('/files/ls', args, decoder='json', **kwargs)


@base.returns_no_item
def mkdir(self, path: str, parents: bool = False, **kwargs: base.CommonArgs):
"""Creates a directory within the MFS
Expand All @@ -93,11 +93,11 @@ def mkdir(self, path: str, parents: bool = False, **kwargs: base.CommonArgs):
if the requested directory already exists
"""
kwargs.setdefault("opts", {})["parents"] = parents

args = (path,)
return self._client.request('/files/mkdir', args, **kwargs)


@base.returns_no_item
def mv(self, source: str, dest: str, **kwargs: base.CommonArgs):
"""Moves files and directories within the MFS
Expand All @@ -115,8 +115,8 @@ def mv(self, source: str, dest: str, **kwargs: base.CommonArgs):
"""
args = (source, dest)
return self._client.request('/files/mv', args, **kwargs)


def read(self, path: str, offset: int = 0, count: ty.Optional[int] = None,
**kwargs: base.CommonArgs):
"""Reads a file stored in the MFS
Expand All @@ -143,11 +143,11 @@ def read(self, path: str, offset: int = 0, count: ty.Optional[int] = None,
if count is not None:
opts["count"] = count
kwargs.setdefault("opts", {}).update(opts)

args = (path,)
return self._client.request('/files/read', args, **kwargs)


@base.returns_no_item
def rm(self, path: str, recursive: bool = False, **kwargs: base.CommonArgs):
"""Removes a file from the MFS
Expand All @@ -169,11 +169,11 @@ def rm(self, path: str, recursive: bool = False, **kwargs: base.CommonArgs):
Recursively remove directories?
"""
kwargs.setdefault("opts", {})["recursive"] = recursive

args = (path,)
return self._client.request('/files/rm', args, **kwargs)


@base.returns_single_item(base.ResponseBase)
def stat(self, path: str, **kwargs: base.CommonArgs):
"""Returns basic ``stat`` information for an MFS file (including its hash)
Expand All @@ -195,8 +195,8 @@ def stat(self, path: str, **kwargs: base.CommonArgs):
"""
args = (path,)
return self._client.request('/files/stat', args, decoder='json', **kwargs)


@base.returns_no_item
def write(self, path: str, file: utils.clean_file_t, offset: int = 0,
create: bool = False, truncate: bool = False,
Expand Down Expand Up @@ -226,16 +226,16 @@ def write(self, path: str, file: utils.clean_file_t, offset: int = 0,
if count is not None:
opts["count"] = count
kwargs.setdefault("opts", {}).update(opts)

args = (path,)
body, headers = multipart.stream_files(file, chunk_size=self.chunk_size)
return self._client.request('/files/write', args, data=body, headers=headers, **kwargs)


class Base(base.ClientBase):
files = base.SectionProperty(Section)


def add(self, file: utils.clean_file_t, *files: utils.clean_file_t,
recursive: bool = False,
pattern: multipart.match_spec_t[ty.AnyStr] = None,
Expand All @@ -244,6 +244,7 @@ def add(self, file: utils.clean_file_t, *files: utils.clean_file_t,
wrap_with_directory: bool = False, chunker: ty.Optional[str] = None,
pin: bool = True, raw_leaves: bool = None, nocopy: bool = False,
cid_version: ty.Optional[int] = None,
hash_function: "sha2-256",
**kwargs: base.CommonArgs):
"""Adds a file, several files or directory of files to IPFS

Expand Down Expand Up @@ -350,17 +351,18 @@ def add(self, file: utils.clean_file_t, *files: utils.clean_file_t,
} # type: ty.Dict[str, ty.Union[str, bool]]
for option_name, option_value in [
("chunker", chunker),
("hash", hash_function),
("cid-version", cid_version),
]:
if option_value is not None:
opts[option_name] = option_value
kwargs.setdefault("opts", {}).update(opts)

# There may be other cases where nocopy will silently fail to work, but
# this is by far the most obvious one
if isinstance(file, int) and nocopy:
raise ValueError("Passing file descriptors is incompatible with *nocopy*")

assert not isinstance(file, (tuple, list)), \
"Use `client.add(name1, name2, …)` to add several items"
multiple = (len(files) > 0)
Expand All @@ -369,16 +371,16 @@ def add(self, file: utils.clean_file_t, *files: utils.clean_file_t,
to_send, chunk_size=self.chunk_size, follow_symlinks=follow_symlinks,
period_special=period_special, patterns=pattern, recursive=recursive
)

resp = self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs)
if not multiple and not is_dir and not wrap_with_directory:
assert len(resp) == 1
return base.ResponseBase(resp[0])
elif kwargs.get("stream", False):
return base.ResponseWrapIterator(resp, base.ResponseBase)
return [base.ResponseBase(v) for v in resp]


@base.returns_no_item
def get(self, cid: base.cid_t, target: utils.path_t = ".",
**kwargs: base.CommonArgs) -> None:
Expand All @@ -395,8 +397,8 @@ def get(self, cid: base.cid_t, target: utils.path_t = ".",
"""
args = (str(cid),)
return self._client.download('/get', target, args, **kwargs)


def cat(self, cid: base.cid_t, offset: int = 0,
length: ty.Optional[int] = None, **kwargs: base.CommonArgs):
r"""Retrieves the contents of a file identified by hash
Expand Down Expand Up @@ -432,8 +434,8 @@ def cat(self, cid: base.cid_t, offset: int = 0,
opts['length'] = length
kwargs.setdefault('opts', opts)
return self._client.request('/cat', args, **kwargs)


@base.returns_single_item(base.ResponseBase)
def ls(self, cid: base.cid_t, **kwargs: base.CommonArgs):
"""Returns a list of objects linked to by the given hash
Expand Down
Loading