Skip to content

Commit

Permalink
Merge branch 'release/0.2.7'
Browse files Browse the repository at this point in the history
  • Loading branch information
emfomy committed Aug 8, 2021
2 parents 910b3df + 0fb9451 commit 1e8031a
Show file tree
Hide file tree
Showing 7 changed files with 54 additions and 29 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
venv
activate

# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
Expand Down
1 change: 1 addition & 0 deletions DEVELOP.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
- make sure requirements.txt and test/requirements.txt matches setup.py.

- >> make clean
- >> make format
- >> make lint
- >> make doc
- >> make tox
Expand Down
2 changes: 1 addition & 1 deletion ckip_transformers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
__copyright__ = "2020 CKIP Lab"

__title__ = "CKIP Transformers"
__version__ = "0.2.6"
__version__ = "0.2.7"
__description__ = "CKIP Transformers"
__license__ = "GPL-3.0"

Expand Down
13 changes: 11 additions & 2 deletions ckip_transformers/nlp/driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,13 @@ def __call__(
must not longer then the maximum sequence length for this model (i.e. ``tokenizer.model_max_length``).
show_progress : ``int``, *optional*, defaults to True
Show progress bar.
pin_memory : ``bool``, *optional*, defaults to True
Pin memory in order to accelerate the speed of data transfer to the GPU. This option is
incompatible with multiprocessing.
Returns
-------
``List[List[NerToken]]``
``List[List[str]]``
A list of list of words (``str``).
"""

Expand Down Expand Up @@ -171,10 +174,13 @@ def __call__(
must not longer then the maximum sequence length for this model (i.e. ``tokenizer.model_max_length``).
show_progress : ``int``, *optional*, defaults to True
Show progress bar.
pin_memory : ``bool``, *optional*, defaults to True
Pin memory in order to accelerate the speed of data transfer to the GPU. This option is
incompatible with multiprocessing.
Returns
-------
``List[List[NerToken]]``
``List[List[str]]``
A list of list of POS tags (``str``).
"""

Expand Down Expand Up @@ -257,6 +263,9 @@ def __call__(
must not longer then the maximum sequence length for this model (i.e. ``tokenizer.model_max_length``).
show_progress : ``int``, *optional*, defaults to True
Show progress bar.
pin_memory : ``bool``, *optional*, defaults to True
Pin memory in order to accelerate the speed of data transfer to the GPU. This option is
incompatible with multiprocessing.
Returns
-------
Expand Down
8 changes: 6 additions & 2 deletions ckip_transformers/nlp/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ def __call__(
batch_size: int = 256,
max_length: Optional[int] = None,
show_progress: bool = True,
pin_memory: bool = True,
):
"""Call the driver.
Expand All @@ -114,8 +115,11 @@ def __call__(
max_length : ``int``, *optional*
The maximum length of the sentence,
must not longer then the maximum sequence length for this model (i.e. ``tokenizer.model_max_length``).
show_progress : ``int``, *optional*, defaults to True
show_progress : ``bool``, *optional*, defaults to True
Show progress bar.
pin_memory : ``bool``, *optional*, defaults to True
Pin memory in order to accelerate the speed of data transfer to the GPU. This option is
incompatible with multiprocessing.
"""

model_max_length = self.tokenizer.model_max_length - 2 # Add [CLS] and [SEP]
Expand Down Expand Up @@ -170,7 +174,7 @@ def __call__(
batch_size=batch_size,
shuffle=False,
drop_last=False,
pin_memory=True,
pin_memory=pin_memory,
)
if show_progress:
dataloader = tqdm(dataloader, desc="Inference")
Expand Down
1 change: 1 addition & 0 deletions docs/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
_api
55 changes: 31 additions & 24 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,56 +1,63 @@
#!/usr/bin/env python3
# -*- coding:utf-8 -*-

__author__ = 'Mu Yang <http://muyang.pro>'
__copyright__ = '2020 CKIP Lab'
__license__ = 'GPL-3.0'
__author__ = "Mu Yang <http://muyang.pro>"
__copyright__ = "2020 CKIP Lab"
__license__ = "GPL-3.0"

from setuptools import setup, find_namespace_packages
import ckip_transformers as about

################################################################################


def main():

with open('README.rst', encoding='utf-8') as fin:
with open("README.rst", encoding="utf-8") as fin:
readme = fin.read()

setup(
name='ckip-transformers',
name="ckip-transformers",
version=about.__version__,
author=about.__author_name__,
author_email=about.__author_email__,
description=about.__description__,
long_description=readme,
long_description_content_type='text/x-rst',
long_description_content_type="text/x-rst",
url=about.__url__,
download_url=about.__download_url__,
platforms=['linux_x86_64'],
platforms=["linux_x86_64"],
license=about.__license__,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3 :: Only',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: POSIX :: Linux',
'Natural Language :: Chinese (Traditional)',
"Development Status :: 4 - Beta",
"Environment :: Console",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Operating System :: POSIX :: Linux",
"Natural Language :: Chinese (Traditional)",
],
python_requires='>=3.6',
packages=find_namespace_packages(include=['ckip_transformers', 'ckip_transformers.*',]),
python_requires=">=3.6",
packages=find_namespace_packages(
include=[
"ckip_transformers",
"ckip_transformers.*",
]
),
install_requires=[
'torch>=1.5.0',
'tqdm>=4.27',
'transformers>=3.5.0',
"torch>=1.5.0",
"tqdm>=4.27",
"transformers>=3.5.0",
],
data_files=[],
)


################################################################################

if __name__ == '__main__':
if __name__ == "__main__":
main()

0 comments on commit 1e8031a

Please sign in to comment.