Merge pull request #70 from modelscope/merge_master_internal_0113

Merge master internal 0113
This commit is contained in:
Yingda Chen
2023-01-13 18:40:36 -08:00
committed by GitHub
882 changed files with 77969 additions and 9484 deletions

1
.gitattributes vendored
View File

@@ -7,3 +7,4 @@
*.pickle filter=lfs diff=lfs merge=lfs -text
*.avi filter=lfs diff=lfs merge=lfs -text
*.bin filter=lfs diff=lfs merge=lfs -text
*.npy filter=lfs diff=lfs merge=lfs -text

View File

@@ -3,17 +3,31 @@ repos:
rev: 4.0.0
hooks:
- id: flake8
exclude: thirdparty/|examples/
exclude: |
(?x)^(
thirdparty/|
examples/|
modelscope/utils/ast_index_file.py
)$
- repo: https://github.com/PyCQA/isort.git
rev: 4.3.21
hooks:
- id: isort
exclude: examples
exclude: |
(?x)^(
examples/|
modelscope/utils/ast_index_file.py
)$
- repo: https://github.com/pre-commit/mirrors-yapf.git
rev: v0.30.0
hooks:
- id: yapf
exclude: thirdparty/|examples/
exclude: |
(?x)^(
thirdparty/|
examples/|
modelscope/utils/ast_index_file.py
)$
- repo: https://github.com/pre-commit/pre-commit-hooks.git
rev: v3.1.0
hooks:

View File

@@ -3,17 +3,31 @@ repos:
rev: 4.0.0
hooks:
- id: flake8
exclude: thirdparty/|examples/
exclude: |
(?x)^(
thirdparty/|
examples/|
modelscope/utils/ast_index_file.py
)$
- repo: /home/admin/pre-commit/isort
rev: 4.3.21
hooks:
- id: isort
exclude: examples
exclude: |
(?x)^(
examples/|
modelscope/utils/ast_index_file.py
)$
- repo: /home/admin/pre-commit/mirrors-yapf
rev: v0.30.0
hooks:
- id: yapf
exclude: thirdparty/|examples/
exclude: |
(?x)^(
thirdparty/|
examples/|
modelscope/utils/ast_index_file.py
)$
- repo: /home/admin/pre-commit/pre-commit-hooks
rev: v3.1.0
hooks:

View File

@@ -1 +1 @@
recursive-include modelscope/configs *.py
recursive-include modelscope/configs *.py *.cu *.h *.cpp

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:34c2f1867f7882614b7087f2fd2acb722d0f520a2ec50b2d116d5b3f0c05f84b
size 141134

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:437b1064a0e38219a9043e25e4761c9f1161c0431636dcea159b44524e0f34eb
size 141134

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b1eb51be6751b35aa521866ef0cd1caa64e39451cd7f4b22dee5c1cb7e3e43d5
size 141134

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:da5cf2f3318e61cd38193af374b21a2dec0e90f2aa0e25b3b1825488eadbdc9d
size 97191

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:63cafb24e856c58dd01797333e1e2b895815bc48836cb2dfce937ad10222600b
size 31191

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:71cf8e7d24ab067920473a4ce0b5e440c12bb0dd9c2ef6373e2474c796678e2e
size 48850

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:9cbd55923de4bbe90f5d098f607f2cd966db45892be016198609cafe268fde49
size 46551

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:72e729ff6b0c0cd95091d9b9df2a50536ea3175ea26427a996090a3f7cc188a2
size 22792

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5e11e3558040246fc6d84bf87afdb016228172893f475f843dedbdcda5092a3d
size 181713

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e82e688d2eb2755ceb0b0051d7129f6e94e6e5fe57f68727e41cd0c1e909b89c
size 11143

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:feadc69a8190787088fda0ac12971d91badc93dbe06057645050fdbec1ce6911
size 204232

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:05ad1e66d7fee2f9e11766160522ad823f1fcc0ab8a5740a6c89b1765228ea32
size 334048

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:8ed3a68939b922bc2362b1d8051c24d2ca03be6a431fcc7c423e157012debd5a
size 424584

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3dca477e8a0e25bccb4966ddaebad75d7c770deb1c5e55b9b5e9f39078ea84c2
size 168454

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0dbbcaa0bb6b2c64b1c360f03913b7ab5386a846cc81c34825c115c41c4d672a
size 23345

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3f0bdad67d01aa452929683b74a124a2926b6bce534c85f3ee0f00e20eeacab0
size 78771

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:4cd49527b050b704355ea422f3cb927cf77f9537f2e1e2eae533becb06a7dc45
size 358204

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3f24570355f178d2a8226112d1443d735837e59573545cfff12458dd791ae341
size 308158

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0df5f2de59df6b55d8ee5d414cc2f98d714e14b518c159d4085ad2ac65d36627
size 137606

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:eb0fa302f94560ac8b04057c85a632036e4dc6c6a201ead4c59eb439831b55e9
size 109305

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:9fa9f5c8a49d457a7b6f4239e438699e60541e7602e8b3b66da9f7b6d55096ab
size 1735856

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:86618feded6ae9fbcc772b9a7da17bad7d8b9c68ae0d505a239d110a3a0a7bf4
size 1735856

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:dc16ad72e753f751360dab82878ec0a31190fb5125632d8f4698f6537fae79cb
size 40819
oid sha256:e168377ec5ca88452ae9a782674bb0c90f666597a9f198fadbc8ec4ce55776a0
size 40633

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:753728b02574958ac9018b235609b87fc99ee23d2dbbe579b98a9b12d7443cc4
size 118048

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ba58d77303a90ca0b971c9312928182c5f779465a0b12661be8b7c88bf2ff015
size 44817

3
data/test/videos/000.mp4 Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f90f00a210e3a0b24df80439bf6b956c83b2b841eba83f534a7c58d38a49d72c
size 1009531

3
data/test/videos/047.mp4 Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:11504fef9f9bf4ed281ed30ee3aa24f3c155231c235eb61d57bb9fb8287b5699
size 3448945

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ca26615762e3f4ccca53a020efe73c3cf3598edc68bb68b5555c24e815718336
size 3151767

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:7c822c66fcf04de28016b224ef372cb1c93b7f13f2cba4e11f53a37fec8e769e
size 828272

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:31a504f9527622dc91322dc66acafc5b673afdddf59afd513dc435d93e4e6ca2
size 7202711

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e97ff88d0af12f7dd3ef04ce50b87b51ffbb9a57dce81d2d518df4abd2fdb826
size 3231793

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:bfd5ada9f0dae56c826623cb73295a04358a5538effeb7f54134bfd0a4322f00
size 3700682

View File

@@ -9,9 +9,10 @@ SHELL ["/bin/bash", "-c"]
COPY docker/rcfiles /tmp/resources
COPY docker/jupyter_plugins /tmp/resources/jupyter_plugins
RUN apt-get update && apt-get install -y --reinstall ca-certificates && \
apt-get clean && \
cp /tmp/resources/ubuntu20.04_sources.tuna /etc/apt/sources.list && \
apt-get update && \
apt-get install -y locales wget git vim ffmpeg libsm6 tzdata language-pack-zh-hans ttf-wqy-microhei ttf-wqy-zenhei xfonts-wqy libxext6 build-essential ninja-build && \
apt-get install -y locales wget git strace gdb vim ffmpeg libsm6 tzdata language-pack-zh-hans ttf-wqy-microhei ttf-wqy-zenhei xfonts-wqy libxext6 build-essential ninja-build && \
wget https://packagecloud.io/github/git-lfs/packages/debian/bullseye/git-lfs_3.2.0_amd64.deb/download -O ./git-lfs_3.2.0_amd64.deb && \
dpkg -i ./git-lfs_3.2.0_amd64.deb && \
rm -f ./git-lfs_3.2.0_amd64.deb && \
@@ -72,6 +73,7 @@ RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir -r /var/modelscope/multi-modal.txt -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html && \
pip install --no-cache-dir -r /var/modelscope/nlp.txt -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html && \
pip install --no-cache-dir -r /var/modelscope/science.txt -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html && \
pip install --no-cache-dir -r /var/modelscope/tests.txt -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html && \
pip cache purge
# default shell bash
@@ -99,10 +101,17 @@ RUN if [ "$USE_GPU" = "True" ] ; then \
echo 'cpu unsupport uniford'; \
fi
RUN pip install --no-cache-dir mmcls>=0.21.0 mmdet>=2.25.0 decord>=0.6.0 datasets==2.1.0 numpy==1.18.5 ipykernel fairseq fasttext deepspeed
RUN pip install --no-cache-dir mmcls>=0.21.0 mmdet>=2.25.0 decord>=0.6.0 numpy==1.18.5 https://pypi.tuna.tsinghua.edu.cn/packages/70/ad/06f8a06cef819606cb1a521bcc144288daee5c7e73c5d722492866cb1b92/wenetruntime-1.11.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl ipykernel fairseq fasttext deepspeed
COPY docker/scripts/install_apex.sh /tmp/install_apex.sh
RUN if [ "$USE_GPU" = "True" ] ; then \
bash /tmp/install_apex.sh; \
else \
echo 'cpu unsupport uniford'; \
echo 'cpu unsupport apex'; \
fi
RUN apt-get update && apt-get install -y sox && \
apt-get clean
RUN if [ "$USE_GPU" = "True" ] ; then \
pip install --no-cache-dir git+https://github.com/gxd1994/Pointnet2.PyTorch.git@master#subdirectory=pointnet2; \
else \
echo 'cpu unsupport Pointnet2'; \
fi

View File

@@ -1,3 +1,4 @@
export MAX_JOBS=16
git clone https://github.com/NVIDIA/apex
cd apex
TORCH_CUDA_ARCH_LIST="6.0;6.1;6.2;7.0;7.5;8.0;8.6" pip install -v --disable-pip-version-check --no-cache-dir --global-option="--cpp_ext" --global-option="--cuda_ext" ./

View File

@@ -7,7 +7,7 @@ REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set SOURCEDIR=source
set BUILDDIR=build
if "%1" == "" goto help

View File

@@ -0,0 +1,10 @@
.. currentmodule:: {{ module }}
{{ name | underline}}
.. autoclass:: {{ name }}
:inherited-members:
:members:
.. autogenerated from source/_templates/autosummary/class.rst

View File

@@ -0,0 +1,12 @@
.. currentmodule:: {{ module }}
{{ name | underline}}
.. autoclass:: {{ name }}
:members:
..
autogenerated from source/_templates/classtemplate.rst
note it does not have :inherited-members:

View File

@@ -0,0 +1,14 @@
.. currentmodule:: {{ module }}
{{ name | underline}}
.. autoclass:: {{ name }}
:members:
:exclude-members: MAXBIT, MAXDIM
:undoc-members:
..
autogenerated from source/_templates/sobolengine.rst
note it has specific options

View File

@@ -1,34 +0,0 @@
modelscope.fileio.format package
================================
.. automodule:: modelscope.fileio.format
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.fileio.format.base module
------------------------------------
.. automodule:: modelscope.fileio.format.base
:members:
:undoc-members:
:show-inheritance:
modelscope.fileio.format.json module
------------------------------------
.. automodule:: modelscope.fileio.format.json
:members:
:undoc-members:
:show-inheritance:
modelscope.fileio.format.yaml module
------------------------------------
.. automodule:: modelscope.fileio.format.yaml
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,34 +0,0 @@
modelscope.fileio package
=========================
.. automodule:: modelscope.fileio
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. toctree::
:maxdepth: 4
modelscope.fileio.format
Submodules
----------
modelscope.fileio.file module
-----------------------------
.. automodule:: modelscope.fileio.file
:members:
:undoc-members:
:show-inheritance:
modelscope.fileio.io module
---------------------------
.. automodule:: modelscope.fileio.io
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,50 +1,17 @@
modelscope.hub package
=========================
modelscope.hub
==============
.. automodule:: modelscope.hub
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. currentmodule:: modelscope.hub
.. toctree::
:maxdepth: 4
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
modelscope.hub.utils
Submodules
----------
modelscope.hub.api module
-----------------------------
.. automodule:: modelscope.hub.api
:members:
:undoc-members:
:show-inheritance:
modelscope.hub.git module
---------------------------
.. automodule:: modelscope.hub.git
:members:
:undoc-members:
:show-inheritance:
modelscope.hub.file_download module
---------------------------
.. automodule:: modelscope.hub.file_download
:members:
:undoc-members:
:show-inheritance:
modelscope.hub.snapshot_download module
---------------------------
.. automodule:: modelscope.hub.snapshot_download
:members:
:undoc-members:
:show-inheritance:
api.HubApi
repository.Repository
deploy.ServiceDeployer
snapshot_download.snapshot_download
file_download.model_file_download

View File

@@ -1,26 +0,0 @@
modelscope.hub.utils package
===============================
.. automodule:: modelscope.hub.utils
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.hub.utils.caching module
-------------------------------------------------------
.. automodule:: modelscope.hub.utils.caching
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.cv.image\_matting\_pipeline module
-------------------------------------------------------
.. automodule:: modelscope.hub.utils.utils
:members:
:undoc-members:
:show-inheritance:

View File

@@ -0,0 +1,17 @@
modelscope.models.base
======================
.. automodule:: modelscope.models.base
.. currentmodule:: modelscope.models.base
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
Model
TorchModel
Head
TorchHead

View File

@@ -0,0 +1,16 @@
modelscope.models.builder
=========================
.. automodule:: modelscope.models.builder
.. currentmodule:: modelscope.models.builder
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
build_model
build_backbone
build_head

View File

@@ -1,18 +0,0 @@
modelscope.models.cv.cartoon.facelib.LK package
===============================================
.. automodule:: modelscope.models.cv.cartoon.facelib.LK
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.models.cv.cartoon.facelib.LK.lk module
-------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.facelib.LK.lk
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,50 +0,0 @@
modelscope.models.cv.cartoon.facelib package
============================================
.. automodule:: modelscope.models.cv.cartoon.facelib
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. toctree::
:maxdepth: 4
modelscope.models.cv.cartoon.facelib.LK
Submodules
----------
modelscope.models.cv.cartoon.facelib.config module
--------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.facelib.config
:members:
:undoc-members:
:show-inheritance:
modelscope.models.cv.cartoon.facelib.face\_detector module
----------------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.facelib.face_detector
:members:
:undoc-members:
:show-inheritance:
modelscope.models.cv.cartoon.facelib.face\_landmark module
----------------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.facelib.face_landmark
:members:
:undoc-members:
:show-inheritance:
modelscope.models.cv.cartoon.facelib.facer module
-------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.facelib.facer
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,15 +0,0 @@
modelscope.models.cv.cartoon.mtcnn\_pytorch package
===================================================
.. automodule:: modelscope.models.cv.cartoon.mtcnn_pytorch
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. toctree::
:maxdepth: 4
modelscope.models.cv.cartoon.mtcnn_pytorch.src

View File

@@ -1,26 +0,0 @@
modelscope.models.cv.cartoon.mtcnn\_pytorch.src package
=======================================================
.. automodule:: modelscope.models.cv.cartoon.mtcnn_pytorch.src
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.models.cv.cartoon.mtcnn\_pytorch.src.align\_trans module
-------------------------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.mtcnn_pytorch.src.align_trans
:members:
:undoc-members:
:show-inheritance:
modelscope.models.cv.cartoon.mtcnn\_pytorch.src.matlab\_cp2tform module
-----------------------------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.mtcnn_pytorch.src.matlab_cp2tform
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,27 +0,0 @@
modelscope.models.cv.cartoon package
====================================
.. automodule:: modelscope.models.cv.cartoon
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. toctree::
:maxdepth: 4
modelscope.models.cv.cartoon.facelib
modelscope.models.cv.cartoon.mtcnn_pytorch
Submodules
----------
modelscope.models.cv.cartoon.utils module
-----------------------------------------
.. automodule:: modelscope.models.cv.cartoon.utils
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,15 +1,14 @@
modelscope.models.cv package
============================
modelscope.models.cv
====================
.. automodule:: modelscope.models.cv
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. currentmodule:: modelscope.models.cv
.. toctree::
:maxdepth: 4
modelscope.models.cv.cartoon
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
easycv_base.EasyCVBaseModel

View File

@@ -1,90 +0,0 @@
modelscope.models.nlp package
=============================
.. automodule:: modelscope.models.nlp
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.models.nlp.bert\_for\_sequence\_classification module
------------------------------------------------------------
.. automodule:: modelscope.models.nlp.bert_for_sequence_classification
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.palm\_for\_text\_generation module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.palm_for_text_generation
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.csanmt\_for\_translation module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.palm_for_text_generation
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.masked\_language module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.masked_language
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.sbert\_for\_nil module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.sbert_for_nil
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.sbert\_for\_sentence\_similarity module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.sbert_for_sentence_similarity
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.sbert\_for\_sentiment\_classification module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.sbert_for_sentiment_classification
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.sbert\_for\_sequence\_classification module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.sbert_for_sequence_classification
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.sbert\_for\_token\_classification module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.sbert_for_token_classification
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.sbert\_for\_zero\_shot\_classification module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.sbert_for_zero_shot_classification
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,37 +1,14 @@
modelscope.models package
=========================
modelscope.models
=================
.. automodule:: modelscope.models
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. currentmodule:: modelscope.models
.. toctree::
:maxdepth: 4
:maxdepth: 2
:caption: Model Api
modelscope.models.cv
modelscope.models.nlp
modelscope.models.multi_modal
modelscope.models.audio
Submodules
----------
modelscope.models.base module
-----------------------------
.. automodule:: modelscope.models.base
:members:
:undoc-members:
:show-inheritance:
modelscope.models.builder module
--------------------------------
.. automodule:: modelscope.models.builder
:members:
:undoc-members:
:show-inheritance:
bases <modelscope.models.base>
builders <modelscope.models.builder>
cv <modelscope.models.cv>

View File

@@ -0,0 +1,14 @@
modelscope.msdatasets.cv
================================
.. automodule:: modelscope.msdatasets.cv
.. currentmodule:: modelscope.msdatasets.cv
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
easycv_base.EasyCVBaseDataset
image_classification.ClsDataset

View File

@@ -0,0 +1,14 @@
modelscope.msdatasets.ms_dataset
================================
.. automodule:: modelscope.msdatasets.ms_dataset
.. currentmodule:: modelscope.msdatasets.ms_dataset
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
MsMapDataset
MsDataset

View File

@@ -1,18 +1,13 @@
modelscope.msdatasets package
=============================
modelscope.msdatasets
=====================
.. automodule:: modelscope.msdatasets
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
.. currentmodule:: modelscope.msdatasets
modelscope.msdatasets.ms\_dataset module
----------------------------------------
.. toctree::
:maxdepth: 2
:caption: Dataset Api
.. automodule:: modelscope.msdatasets.ms_dataset
:members:
:undoc-members:
:show-inheritance:
dataset <modelscope.msdatasets.ms_dataset>
cv <modelscope.msdatasets.cv>

View File

@@ -1,7 +0,0 @@
modelscope.pipelines.audio package
==================================
.. automodule:: modelscope.pipelines.audio
:members:
:undoc-members:
:show-inheritance:

View File

@@ -0,0 +1,14 @@
modelscope.pipelines.base
=========================
.. automodule:: modelscope.pipelines.base
.. currentmodule:: modelscope.pipelines.base
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
Pipeline
DistributedPipeline

View File

@@ -0,0 +1,15 @@
modelscope.pipelines.builder
============================
.. automodule:: modelscope.pipelines.builder
.. currentmodule:: modelscope.pipelines.builder
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
build_pipeline
pipeline

View File

@@ -1,26 +1,14 @@
modelscope.pipelines.cv package
===============================
modelscope.pipelines.cv
=======================
.. automodule:: modelscope.pipelines.cv
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
.. currentmodule:: modelscope.pipelines.cv
modelscope.pipelines.cv.image\_cartoon\_pipeline module
-------------------------------------------------------
.. automodule:: modelscope.pipelines.cv.image_cartoon_pipeline
:members:
:undoc-members:
:show-inheritance:
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
modelscope.pipelines.cv.image\_matting\_pipeline module
-------------------------------------------------------
.. automodule:: modelscope.pipelines.cv.image_matting_pipeline
:members:
:undoc-members:
:show-inheritance:
ActionRecognitionPipeline

View File

@@ -1,42 +0,0 @@
modelscope.pipelines.multi\_modal package
=========================================
.. automodule:: modelscope.pipelines.multi_modal
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.pipelines.multi\_modal.image\_captioning\_pipeline module
----------------------------------------------------------
.. automodule:: modelscope.pipelines.multi_modal.image_captioning_pipeline
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.multi\_modal.multi\_modal\_embedding\_pipeline module
----------------------------------------------------------
.. automodule:: modelscope.pipelines.multi_modal.multi_modal_embedding_pipeline
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.multi\_modal.text\_to\_image\_synthesis\_pipeline module
----------------------------------------------------------
.. automodule:: modelscope.pipelines.multi_modal.text_to_image_synthesis_pipeline
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.multi\_modal.visual\_question\_answering\_pipeline module
----------------------------------------------------------
.. automodule:: modelscope.pipelines.multi_modal.visual_question_answering_pipeline
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,26 +0,0 @@
modelscope.pipelines.nlp package
================================
.. automodule:: modelscope.pipelines.nlp
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.pipelines.nlp.sequence\_classification\_pipeline module
------------------------------------------------------------------
.. automodule:: modelscope.pipelines.nlp.sequence_classification_pipeline
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.nlp.text\_generation\_pipeline module
----------------------------------------------------------
.. automodule:: modelscope.pipelines.nlp.text_generation_pipeline
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,53 +1,14 @@
modelscope.pipelines package
============================
modelscope.pipelines
====================
.. automodule:: modelscope.pipelines
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. currentmodule:: modelscope.pipelines
.. toctree::
:maxdepth: 4
:maxdepth: 2
:caption: Pipeline Api
modelscope.pipelines.cv
modelscope.pipelines.nlp
modelscope.pipelines.multi_modal
modelscope.pipelines.audio
Submodules
----------
modelscope.pipelines.builder module
-----------------------------------
.. automodule:: modelscope.pipelines.builder
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.base module
-----------------------------------
.. automodule:: modelscope.pipelines.base
:members:
:undoc-members:
:show-inheritance:
modelscope.outputs module
-----------------------------------
.. automodule:: modelscope.outputs
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.util module
--------------------------------
.. automodule:: modelscope.pipelines.util
:members:
:undoc-members:
:show-inheritance:
base <modelscope.pipelines.base>
builder <modelscope.pipelines.builder>
cv <modelscope.pipelines.cv>

View File

@@ -0,0 +1,14 @@
modelscope.preprocessors.base
======================
.. automodule:: modelscope.preprocessors.base
.. currentmodule:: modelscope.preprocessors.base
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
Preprocessor

View File

@@ -0,0 +1,14 @@
modelscope.preprocessors.builder
======================
.. automodule:: modelscope.preprocessors.builder
.. currentmodule:: modelscope.preprocessors.builder
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
build_preprocessor

View File

@@ -1,50 +1,14 @@
modelscope.preprocessors package
================================
modelscope.preprocessors
=================
.. automodule:: modelscope.preprocessors
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
.. currentmodule:: modelscope.preprocessors
modelscope.preprocessors.base module
------------------------------------
.. toctree::
:maxdepth: 2
:caption: Preprocessor Api
.. automodule:: modelscope.preprocessors.base
:members:
:undoc-members:
:show-inheritance:
modelscope.preprocessors.builder module
---------------------------------------
.. automodule:: modelscope.preprocessors.builder
:members:
:undoc-members:
:show-inheritance:
modelscope.preprocessors.common module
--------------------------------------
.. automodule:: modelscope.preprocessors.common
:members:
:undoc-members:
:show-inheritance:
modelscope.preprocessors.image module
-------------------------------------
.. automodule:: modelscope.preprocessors.image
:members:
:undoc-members:
:show-inheritance:
modelscope.preprocessors.nlp module
-----------------------------------
.. automodule:: modelscope.preprocessors.nlp
:members:
:undoc-members:
:show-inheritance:
base <modelscope.preprocessors.base>
builders <modelscope.preprocessors.builder>
video <modelscope.preprocessors.video>

View File

@@ -0,0 +1,20 @@
modelscope.preprocessors.video
====================
.. automodule:: modelscope.preprocessors.video
.. currentmodule:: modelscope.preprocessors.video
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
ReadVideoData
kinetics400_tranform
_interval_based_sampling
_decode_video_frames_list
_decode_video
KineticsResizedCrop
MovieSceneSegmentationPreprocessor

View File

@@ -1,33 +0,0 @@
modelscope package
==================
.. automodule:: modelscope
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. toctree::
:maxdepth: 4
modelscope.fileio
modelscope.models
modelscope.pipelines
modelscope.preprocessors
modelscope.msdatasets
modelscope.trainers
modelscope.utils
modelscope.hub
Submodules
----------
modelscope.version module
-------------------------
.. automodule:: modelscope.version
:members:
:undoc-members:
:show-inheritance:

View File

@@ -0,0 +1,14 @@
modelscope.trainers.base
========================
.. automodule:: modelscope.trainers.base
.. currentmodule:: modelscope.trainers.base
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
BaseTrainer
DummyTrainer

View File

@@ -0,0 +1,14 @@
modelscope.trainers.builder
===========================
.. automodule:: modelscope.trainers.builder
.. currentmodule:: modelscope.trainers.builder
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
build_trainer

View File

@@ -0,0 +1,14 @@
modelscope.trainers.cv
=======================
.. automodule:: modelscope.trainers.cv
.. currentmodule:: modelscope.trainers.cv
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
ImagePortraitEnhancementTrainer

View File

@@ -1,18 +0,0 @@
modelscope.trainers.nlp package
===============================
.. automodule:: modelscope.trainers.nlp
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.trainers.nlp.sequence\_classification\_trainer module
----------------------------------------------------------------
.. automodule:: modelscope.trainers.nlp.sequence_classification_trainer
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,34 +1,15 @@
modelscope.trainers package
===========================
modelscope.trainers
===================
.. automodule:: modelscope.trainers
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. currentmodule:: modelscope.trainers
.. toctree::
:maxdepth: 4
:maxdepth: 2
:caption: Trainer Api
modelscope.trainers.nlp
Submodules
----------
modelscope.trainers.base module
-------------------------------
.. automodule:: modelscope.trainers.base
:members:
:undoc-members:
:show-inheritance:
modelscope.trainers.builder module
----------------------------------
.. automodule:: modelscope.trainers.builder
:members:
:undoc-members:
:show-inheritance:
base <modelscope.trainers.base>
builder <modelscope.trainers.builder>
EpochBasedTrainer <modelscope.trainers.trainer>
cv <modelscope.trainers.cv>

View File

@@ -0,0 +1,13 @@
modelscope.trainers.trainer
===========================
.. automodule:: modelscope.trainers.trainer
.. currentmodule:: modelscope.trainers.trainer
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
EpochBasedTrainer

View File

@@ -1,58 +0,0 @@
modelscope.utils package
========================
.. automodule:: modelscope.utils
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.utils.config module
------------------------------
.. automodule:: modelscope.utils.config
:members:
:undoc-members:
:show-inheritance:
modelscope.utils.constant module
--------------------------------
.. automodule:: modelscope.utils.constant
:members:
:undoc-members:
:show-inheritance:
modelscope.utils.hub module
---------------------------
.. automodule:: modelscope.utils.hub
:members:
:undoc-members:
:show-inheritance:
modelscope.utils.logger module
------------------------------
.. automodule:: modelscope.utils.logger
:members:
:undoc-members:
:show-inheritance:
modelscope.utils.registry module
--------------------------------
.. automodule:: modelscope.utils.registry
:members:
:undoc-members:
:show-inheritance:
modelscope.utils.type\_assert module
------------------------------------
.. automodule:: modelscope.utils.type_assert
:members:
:undoc-members:
:show-inheritance:

View File

@@ -40,17 +40,37 @@ release = version
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.autosummary',
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'myst_parser',
'sphinx_markdown_tables',
'sphinx_copybutton',
'myst_parser',
]
autodoc_mock_imports = [
'matplotlib', 'pycocotools', 'terminaltables', 'mmcv.ops'
]
# build the templated autosummary files
autosummary_generate = True
numpydoc_show_class_members = False
# Enable overriding of function signatures in the first line of the docstring.
autodoc_docstring_signature = True
# Disable docstring inheritance
autodoc_inherit_docstrings = False
# Show type hints in the description
autodoc_typehints = 'description'
# Add parameter types if the parameter is documented in the docstring
autodoc_typehints_description_target = 'documented_params'
autodoc_default_options = {
'member-order': 'bysource',
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -58,27 +78,46 @@ templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = {
'.rst': 'restructuredtext',
'.md': 'markdown',
}
source_suffix = ['.rst', '.md']
# The master toctree document.
master_doc = 'index'
root_doc = 'index'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['build', 'Thumbs.db', '.DS_Store']
exclude_patterns = [
'build', 'source/.ipynb_checkpoints', 'source/api/generated', 'Thumbs.db',
'.DS_Store'
]
# A list of glob-style patterns [1] that are used to find source files.
# They are matched against the source file names relative to the source directory,
# using slashes as directory separators on all platforms.
# The default is **, meaning that all files are recursively included from the source directory.
# include_patterns = [
# 'index.rst',
# 'quick_start.md',
# 'develop.md',
# 'faq.md',
# 'change_log.md',
# 'api/modelscope.hub*',
# 'api/modelscope.models.base*',
# 'api/modelscope.models.builder*',
# 'api/modelscope.pipelines.base*',
# 'api/modelscope.pipelines.builder*',
# 'api/modelscope.preprocessors.base*',
# 'api/modelscope.preprocessors.builder*',
# 'api/modelscope.trainers.base*',
# 'api/modelscope.trainers.builder*',
# ]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_book_theme'
html_theme_path = [sphinx_book_theme.get_html_theme_path()]
html_theme_options = {}
# html_theme = 'sphinx_book_theme'
# html_theme_path = [sphinx_book_theme.get_html_theme_path()]
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
@@ -88,7 +127,7 @@ html_static_path = ['_static']
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'modelscope_doc'
# htmlhelp_basename = 'modelscope_doc'
# -- Extension configuration -------------------------------------------------
# Ignore >>> when copying code
@@ -97,8 +136,3 @@ copybutton_prompt_is_regexp = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
autodoc_default_options = {
'member-order': 'bysource',
'special-members': '__init__',
}

View File

@@ -1,48 +0,0 @@
# 常见问题
<a name="macos-pip-tokenizer-error"></a>
### 1. macOS环境pip方式安装tokenizers报错
对于tokenizers库 pypi上缺乏针对`macOS`环境预编译包,需要搭建源码编译环境后才能正确安装,步骤如下:
1. 安装rust
```shell
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
pip install setuptools_rust
```
2. 更新rust环境变量
```shell
source $HOME/.cargo/env
```
3. 安装tokenizers
```shell
pip install tokenizers
```
reference: [https://huggingface.co/docs/tokenizers/installation#installation-from-sources](https://huggingface.co/docs/tokenizers/installation#installation-from-sources)
### 2. pip 安装包冲突
> ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.
由于依赖库之间的版本不兼容,可能会存在版本冲突的情况,大部分情况下不影响正常运行。
### 3. 安装pytorch出现版本错误
> ERROR: Ignored the following versions that require a different python version: 1.1.0 Requires-Python >=3.8; 1.1.0rc1 Requires-Python >=3.8; 1.1.1 Requires-Python >=3.8
> ERROR: Could not find a version that satisfies the requirement torch==1.8.1+cu111 (from versions: 1.0.0, 1.0.1, 1.0.1.post2, 1.1.0, 1.2.0, 1.3.0, 1.3.1, 1.4.0, 1.5.0, 1.5.1, 1.6.0, 1.7.0, 1.7.1, 1.8.0, 1.8.1, 1.9.0, 1.9.1, 1.10.0, 1.10.1, 1.10.2, 1.11.0)
> ERROR: No matching distribution found for torch==1.8.1+cu111
安装时使用如下命令:
```shell
pip install -f https://download.pytorch.org/whl/torch_stable.html -i https://pypi.tuna.tsinghua.edu.cn/simple -r requirements.txt
```
### 4. zsh: no matches found: modelscope-0.2.2-py3-none-any.whl[all]
mac终端的zsh 对于[]需要做转义,执行如下命令
```shell
pip install modelscope\[all\] -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```

View File

@@ -3,25 +3,24 @@
contain the root `toctree` directive.
ModelScope DOCUMENTATION
=======================================
ModelScope doc
========================
.. toctree::
:maxdepth: 2
:caption: USER GUIDE
:caption: DEVELOPER GUIDE
quick_start.md
develop.md
faq.md
.. toctree::
:maxdepth: 2
:caption: Tutorials
tutorials/index
:caption: API Doc
Hub <api/modelscope.hub>
Model <api/modelscope.models>
Preprocessor <api/modelscope.preprocessors>
Pipeline <api/modelscope.pipelines>
Trainer <api/modelscope.trainers>
MsDataset <api/modelscope.msdatasets>
.. toctree::
:maxdepth: 2
@@ -29,21 +28,6 @@ ModelScope doc
change_log.md
.. toctree::
.. :maxdepth: 10
.. :caption: API Doc
.. api/modelscope.preprocessors
.. api/modelscope.models
.. api/modelscope.pipelines
.. api/modelscope.fileio
.. api/modelscope.utils
.. api/modelscope.hub
.. api/modelscope.msdatasets
.. api/modelscope.tools
.. api/modelscope.trainers
Indices and tables
==================
* :ref:`genindex`

View File

@@ -1,118 +0,0 @@
ModelScope Library目前支持tensorflowpytorch深度学习框架进行模型训练、推理 在Python 3.7+, Pytorch 1.8+, Tensorflow1.15Tensorflow 2.x上测试可运行。
**注: **`**语音相关**`**的功能仅支持 python3.7,tensorflow1.15的**`**linux**`**环境使用。 其他功能可以在windows、mac上安装使用。**
## python环境配置
首先,参考[文档](https://docs.anaconda.com/anaconda/install/) 安装配置Anaconda环境。
安装完成后执行如下命令为modelscope library创建对应的python环境。
```shell
conda create -n modelscope python=3.7
conda activate modelscope
```
## 安装深度学习框架
- 安装pytorch[参考链接](https://pytorch.org/get-started/locally/)。
```shell
pip3 install torch torchvision torchaudio
```
- 安装Tensorflow[参考链接](https://www.tensorflow.org/install/pip)。
```shell
pip install --upgrade tensorflow
```
## ModelScope library 安装
注: 如果在安装过程中遇到错误,请前往[常见问题](faq.md)查找解决方案。
### pip安装
执行如下命令可以安装所有领域依赖:
```shell
pip install "modelscope[cv,nlp,audio,multi-modal]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验`语音功能`,请执行如下命令:
```shell
pip install "modelscope[audio]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验CV功能可执行如下命令安装依赖
```shell
pip install "modelscope[cv]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验NLP功能可执行如下命令安装依赖
```shell
pip install "modelscope[nlp]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验多模态功能,可执行如下命令安装依赖:
```shell
pip install "modelscope[multi-modal]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
**注**
1. `**语音相关**`**的功能仅支持 python3.7,tensorflow1.15的**`**linux**`**环境使用。 其他功能可以在windows、mac上安装使用。**
2. 语音领域中一部分模型使用了三方库SoundFile进行wav文件处理**在Linux系统上用户需要手动安装SoundFile的底层依赖库libsndfile**在Windows和MacOS上会自动安装不需要用户操作。详细信息可参考[SoundFile官网](https://github.com/bastibe/python-soundfile#installation)。以Ubuntu系统为>例,用户需要执行如下命令:
```shell
sudo apt-get update
sudo apt-get install libsndfile1
```
3. **CV功能使用需要安装mmcv-full 请参考mmcv**[**安装手册**](https://github.com/open-mmlab/mmcv#installation)**进行安装**
### 使用源码安装
适合本地开发调试使用,修改源码后可以直接执行。
ModelScope的源码可以直接clone到本地
```shell
git clone git@github.com:modelscope/modelscope.git
cd modelscope
git fetch origin master
git checkout master
```
安装依赖
如需安装所有依赖,请执行如下命令
```shell
pip install -e ".[cv,nlp,audio,multi-modal]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如需体验`语音功能`,请单独执行如下命令:
```shell
pip install -e ".[audio]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验CV功能可执行如下命令安装依赖
```shell
pip install -e ".[cv]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验NLP功能可执行如下命令安装依赖
```shell
pip install -e ".[nlp]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验多模态功能,可执行如下命令安装依赖:
```shell
pip install -e ".[multi-modal]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
### 安装验证
安装成功后,可以执行如下命令进行验证安装是否正确:
```shell
python -c "from modelscope.pipelines import pipeline;print(pipeline('word-segmentation')('今天天气不错,适合 出去游玩'))"
```

View File

@@ -1,6 +0,0 @@
.. toctree::
:maxdepth: 2
:caption: Tutorials
pipeline.md
trainer.md

View File

@@ -1,61 +0,0 @@
# Pipeline使用教程
本文简单介绍如何使用`pipeline`函数加载模型进行推理。`pipeline`函数支持按照任务类型、模型名称从模型仓库拉取模型进行进行推理,包含以下几个方面:
* 使用pipeline()函数进行推理
* 指定特定预处理、特定模型进行推理
* 不同场景推理任务示例
## 环境准备
详细步骤可以参考 [快速开始](../quick_start.md)
## Pipeline基本用法
下面以中文分词任务为例说明pipeline函数的基本用法
1. pipeline函数支持指定特定任务名称加载任务默认模型创建对应pipeline对象
执行如下python代码
```python
from modelscope.pipelines import pipeline
word_segmentation = pipeline('word-segmentation')
```
2. 输入文本
``` python
input = '今天天气不错,适合出去游玩'
print(word_segmentation(input))
{'output': '今天 天气 不错 适合 出去 游玩'}
```
3. 输入多条样本
pipeline对象也支持传入多个样本列表输入返回对应输出列表每个元素对应输入样本的返回结果
```python
inputs = ['今天天气不错,适合出去游玩','这本书很好,建议你看看']
print(word_segmentation(inputs))
[{'output': '今天 天气 不错 适合 出去 游玩'}, {'output': '这 本 书 很 好 建议 你 看看'}]
```
## 指定预处理、模型进行推理
pipeline函数支持传入实例化的预处理对象、模型对象从而支持用户在推理过程中定制化预处理、模型。
1. 首先,创建预处理方法和模型
```python
from modelscope.models import Model
from modelscope.preprocessors import TokenClassificationPreprocessor
model = Model.from_pretrained('damo/nlp_structbert_word-segmentation_chinese-base')
tokenizer = TokenClassificationPreprocessor(model.model_dir)
```
2. 使用tokenizer和模型对象创建pipeline
```python
from modelscope.pipelines import pipeline
word_seg = pipeline('word-segmentation', model=model, preprocessor=tokenizer)
input = '今天天气不错,适合出去游玩'
print(word_seg(input))
{'output': '今天 天气 不错 适合 出去 游玩'}
```
## 不同场景任务推理示例
下面以一个图像任务:人像抠图('image-matting'为例进一步说明pipeline的用法
```python
import cv2
from modelscope.pipelines import pipeline
img_matting = pipeline('image-matting')
result = img_matting('https://modelscope.oss-cn-beijing.aliyuncs.com/test/images/image_matting.png')
cv2.imwrite('result.png', result['output_png'])
```

View File

@@ -1,54 +0,0 @@
# Trainer使用教程
Modelscope提供了众多预训练模型你可以使用其中任意一个利用公开数据集或者私有数据集针对特定任务进行模型训练在本篇文章中将介绍如何使用Modelscope的`Trainer`模块进行Finetuning和评估。
## 环境准备
详细步骤可以参考 [快速开始](../quick_start.md)
### 准备数据集
在开始Finetuning前需要准备一个数据集用以训练和评估详细可以参考数据集使用教程。
```python
from datasets import Dataset
train_dataset = MsDataset.load'afqmc_small', namespace='modelscope', split='train')
eval_dataset = MsDataset.load('afqmc_small', namespace='modelscope', split='validation')
```
### 训练
ModelScope把所有训练相关的配置信息全部放到了模型仓库下的`configuration.json`因此我们只需要创建Trainer加载配置文件传入数据集即可完成训练。
首先通过工厂方法创建Trainer 需要传入模型仓库路径, 训练数据集对象,评估数据集对象,训练目录
```python
kwargs = dict(
model='damo/nlp_structbert_sentiment-classification_chinese-base',
train_dataset=train_dataset,
eval_dataset=eval_dataset,
work_dir='work_dir')
trainer = build_trainer(default_args=kwargs)
```
启动训练。
```python
trainer.train()
```
如果需要调整训练参数,可以在模型仓库页面下载`configuration.json`文件到本地修改参数后指定配置文件路径创建trainer
```python
kwargs = dict(
model='damo/nlp_structbert_sentiment-classification_chinese-base',
train_dataset=train_dataset,
eval_dataset=eval_dataset,
cfg_file='你的配置文件路径'
work_dir='work_dir')
trainer = build_trainer(default_args=kwargs)
trainer.train()
```
### 评估
训练过程中会定期使用验证集进行评估测试, Trainer模块也支持指定特定轮次保存的checkpoint路径进行单次评估。
```python
eval_results = trainer.evaluate('work_dir/epoch_10.pth')
print(eval_results)
```

View File

@@ -0,0 +1,86 @@
import os
from modelscope.metainfo import Trainers
from modelscope.msdatasets.ms_dataset import MsDataset
from modelscope.trainers.builder import build_trainer
from modelscope.trainers.training_args import (ArgAttr, CliArgumentParser,
training_args)
def define_parser():
training_args.num_classes = ArgAttr(
cfg_node_name=[
'model.mm_model.head.num_classes',
'model.mm_model.train_cfg.augments.0.num_classes',
'model.mm_model.train_cfg.augments.1.num_classes'
],
type=int,
help='number of classes')
training_args.train_batch_size.default = 16
training_args.train_data_worker.default = 1
training_args.max_epochs.default = 1
training_args.optimizer.default = 'AdamW'
training_args.lr.default = 1e-4
training_args.warmup_iters = ArgAttr(
'train.lr_config.warmup_iters',
type=int,
default=1,
help='number of warmup epochs')
training_args.topk = ArgAttr(
cfg_node_name=[
'train.evaluation.metric_options.topk',
'evaluation.metric_options.topk'
],
default=(1, ),
help='evaluation using topk, tuple format, eg (1,), (1,5)')
training_args.train_data = ArgAttr(
type=str, default='tany0699/cats_and_dogs', help='train dataset')
training_args.validation_data = ArgAttr(
type=str, default='tany0699/cats_and_dogs', help='validation dataset')
training_args.model_id = ArgAttr(
type=str,
default='damo/cv_vit-base_image-classification_ImageNet-labels',
help='model name')
parser = CliArgumentParser(training_args)
return parser
def create_dataset(name, split):
namespace, dataset_name = name.split('/')
return MsDataset.load(
dataset_name, namespace=namespace, subset_name='default', split=split)
def train(parser):
cfg_dict = parser.get_cfg_dict()
args = parser.args
train_dataset = create_dataset(args.train_data, split='train')
val_dataset = create_dataset(args.validation_data, split='validation')
def cfg_modify_fn(cfg):
cfg.merge_from_dict(cfg_dict)
return cfg
kwargs = dict(
model=args.model_id, # model id
train_dataset=train_dataset, # training dataset
eval_dataset=val_dataset, # validation dataset
cfg_modify_fn=cfg_modify_fn # callback to modify configuration
)
# in distributed training, specify pytorch launcher
if 'MASTER_ADDR' in os.environ:
kwargs['launcher'] = 'pytorch'
trainer = build_trainer(
name=Trainers.image_classification, default_args=kwargs)
# start to train
trainer.train()
if __name__ == '__main__':
parser = define_parser()
train(parser)

View File

@@ -0,0 +1,5 @@
PYTHONPATH=. python -m torch.distributed.launch --nproc_per_node=2 \
examples/pytorch/finetune_image_classification.py \
--num_classes 2 \
--train_data 'tany0699/cats_and_dogs' \
--validation_data 'tany0699/cats_and_dogs'

View File

@@ -0,0 +1,8 @@
2023-01-11 09:05:29,113 - modelscope - WARNING - Authentication has expired, please re-login if you need to access private models or datasets.
2023-01-11 09:05:29,285 - modelscope - WARNING - Authentication has expired, please re-login if you need to access private models or datasets.
2023-01-11 09:05:29,436 - modelscope - INFO - Model revision not specified, use default: master in development mode
2023-01-11 09:05:29,436 - modelscope - INFO - Development mode use revision: master
2023-01-11 09:05:29,590 - modelscope - INFO - File configuration.json already in cache, skip downloading!
2023-01-11 09:05:29,590 - modelscope - INFO - File model.onnx already in cache, skip downloading!
2023-01-11 09:05:29,590 - modelscope - INFO - File README.md already in cache, skip downloading!
2023-01-11 09:05:29,590 - modelscope - INFO - File result.png already in cache, skip downloading!

View File

@@ -1,4 +1,5 @@
from .base import Exporter
from .builder import build_exporter
from .nlp import SbertForSequenceClassificationExporter
from .tf_model_exporter import TfModelExporter
from .torch_model_exporter import TorchModelExporter

View File

@@ -1,10 +1,12 @@
# Copyright (c) Alibaba, Inc. and its affiliates.
import os
from abc import ABC, abstractmethod
from typing import Dict, Union
from modelscope.models import Model
from modelscope.utils.config import Config, ConfigDict
from modelscope.utils.constant import ModelFile
from modelscope.utils.hub import snapshot_download
from .builder import build_exporter
@@ -12,36 +14,43 @@ class Exporter(ABC):
"""Exporter base class to output model to onnx, torch_script, graphdef, etc.
"""
def __init__(self):
self.model = None
def __init__(self, model=None):
self.model = model
@classmethod
def from_model(cls, model: Model, **kwargs):
def from_model(cls, model: Union[Model, str], **kwargs):
"""Build the Exporter instance.
Args:
model: A Model instance. it will be used to generate the intermediate format file,
and the configuration.json in its model_dir field will be used to create the exporter instance.
model: A Model instance or a model id or a model dir, the configuration.json file besides to which
will be used to create the exporter instance.
kwargs: Extra kwargs used to create the Exporter instance.
Returns:
The Exporter instance
"""
if isinstance(model, str):
model = Model.from_pretrained(model)
assert hasattr(model, 'model_dir')
model_dir = model.model_dir
cfg = Config.from_file(
os.path.join(model.model_dir, ModelFile.CONFIGURATION))
os.path.join(model_dir, ModelFile.CONFIGURATION))
task_name = cfg.task
if hasattr(model, 'group_key'):
task_name = model.group_key
model_cfg = cfg.model
if hasattr(model_cfg, 'model_type') and not hasattr(model_cfg, 'type'):
model_cfg.type = model_cfg.model_type
export_cfg = ConfigDict({'type': model_cfg.type})
if hasattr(cfg, 'export'):
export_cfg.update(cfg.export)
export_cfg['model'] = model
exporter = build_exporter(export_cfg, task_name, kwargs)
exporter.model = model
return exporter
@abstractmethod
def export_onnx(self, outputs: str, opset=11, **kwargs):
def export_onnx(self, output_dir: str, opset=13, **kwargs):
"""Export the model as onnx format files.
In some cases, several files may be generated,
@@ -49,7 +58,7 @@ class Exporter(ABC):
Args:
opset: The version of the ONNX operator set to use.
outputs: The output dir.
output_dir: The output dir.
kwargs: In this default implementation,
kwargs will be carried to generate_dummy_inputs as extra arguments (like input shape).

View File

@@ -8,11 +8,20 @@ from modelscope.exporters.builder import EXPORTERS
from modelscope.exporters.torch_model_exporter import TorchModelExporter
from modelscope.metainfo import Models
from modelscope.preprocessors import (
TextClassificationTransformersPreprocessor, build_preprocessor)
from modelscope.utils.config import Config
Preprocessor, TextClassificationTransformersPreprocessor,
build_preprocessor)
from modelscope.utils.constant import ModeKeys, Tasks
@EXPORTERS.register_module(Tasks.text_classification, module_name=Models.bert)
@EXPORTERS.register_module(
Tasks.text_classification, module_name=Models.structbert)
@EXPORTERS.register_module(Tasks.sentence_similarity, module_name=Models.bert)
@EXPORTERS.register_module(
Tasks.zero_shot_classification, module_name=Models.bert)
@EXPORTERS.register_module(
Tasks.sentiment_classification, module_name=Models.bert)
@EXPORTERS.register_module(Tasks.nli, module_name=Models.bert)
@EXPORTERS.register_module(
Tasks.sentence_similarity, module_name=Models.structbert)
@EXPORTERS.register_module(
@@ -38,14 +47,9 @@ class SbertForSequenceClassificationExporter(TorchModelExporter):
Dummy inputs.
"""
cfg = Config.from_file(
os.path.join(self.model.model_dir, 'configuration.json'))
field_name = Tasks.find_field_by_task(cfg.task)
if 'type' not in cfg.preprocessor and 'val' in cfg.preprocessor:
cfg = cfg.preprocessor.val
else:
cfg = cfg.preprocessor
assert hasattr(
self.model, 'model_dir'
), 'model_dir attribute is required to build the preprocessor'
batch_size = 1
sequence_length = {}
if shape is not None:
@@ -55,13 +59,11 @@ class SbertForSequenceClassificationExporter(TorchModelExporter):
batch_size, max_length = shape
sequence_length = {'sequence_length': max_length}
cfg.update({
'model_dir': self.model.model_dir,
'mode': ModeKeys.TRAIN,
**sequence_length
})
preprocessor: TextClassificationTransformersPreprocessor = build_preprocessor(
cfg, field_name)
preprocessor = Preprocessor.from_pretrained(
self.model.model_dir,
preprocessor_mode=ModeKeys.TRAIN,
task=Tasks.text_classification,
**sequence_length)
if pair:
first_sequence = preprocessor.nlp_tokenizer.tokenizer.unk_token
second_sequence = preprocessor.nlp_tokenizer.tokenizer.unk_token

View File

@@ -0,0 +1,114 @@
# Copyright (c) Alibaba, Inc. and its affiliates.
import os
from typing import Any, Callable, Dict, Mapping
import tensorflow as tf
from modelscope.outputs import ModelOutputBase
from modelscope.utils.constant import ModelFile
from modelscope.utils.logger import get_logger
from modelscope.utils.regress_test_utils import compare_arguments_nested
from .base import Exporter
logger = get_logger()
class TfModelExporter(Exporter):
def generate_dummy_inputs(self, **kwargs) -> Dict[str, Any]:
"""Generate dummy inputs for model exportation to onnx or other formats by tracing.
Returns:
Dummy inputs that matches the specific model input, the matched preprocessor can be used here.
"""
return None
def export_onnx(self, output_dir: str, opset=13, **kwargs):
model = self.model if 'model' not in kwargs else kwargs.pop('model')
onnx_file = os.path.join(output_dir, ModelFile.ONNX_MODEL_FILE)
self._tf2_export_onnx(model, onnx_file, opset=opset, **kwargs)
return {'model': onnx_file}
def _tf2_export_onnx(self,
model,
output: str,
opset: int = 13,
validation: bool = True,
rtol: float = None,
atol: float = None,
call_func: Callable = None,
**kwargs):
logger.info(
'Important: This exporting function only supports models of tf2.0 or above.'
)
import onnx
import tf2onnx
dummy_inputs = self.generate_dummy_inputs(
**kwargs) if 'dummy_inputs' not in kwargs else kwargs.pop(
'dummy_inputs')
if dummy_inputs is None:
raise NotImplementedError(
'Model property dummy_inputs,inputs,outputs must be set.')
input_signature = [
tf.TensorSpec.from_tensor(tensor, name=key)
for key, tensor in dummy_inputs.items()
]
onnx_model, _ = tf2onnx.convert.from_keras(
model, input_signature, opset=opset)
onnx.save(onnx_model, output)
if validation:
try:
import onnx
import onnxruntime as ort
except ImportError:
logger.warn(
'Cannot validate the exported onnx file, because '
'the installation of onnx or onnxruntime cannot be found')
return
def tensor_nested_numpify(tensors):
if isinstance(tensors, (list, tuple)):
return type(tensors)(
tensor_nested_numpify(t) for t in tensors)
if isinstance(tensors, Mapping):
# return dict
return {
k: tensor_nested_numpify(t)
for k, t in tensors.items()
}
if isinstance(tensors, tf.Tensor):
t = tensors.cpu()
return t.numpy()
return tensors
onnx_model = onnx.load(output)
onnx.checker.check_model(onnx_model)
ort_session = ort.InferenceSession(output)
outputs_origin = call_func(
dummy_inputs) if call_func is not None else model(dummy_inputs)
if isinstance(outputs_origin, (Mapping, ModelOutputBase)):
outputs_origin = list(
tensor_nested_numpify(outputs_origin).values())
elif isinstance(outputs_origin, (tuple, list)):
outputs_origin = list(tensor_nested_numpify(outputs_origin))
outputs = ort_session.run(
None,
tensor_nested_numpify(dummy_inputs),
)
outputs = tensor_nested_numpify(outputs)
if isinstance(outputs, dict):
outputs = list(outputs.values())
elif isinstance(outputs, tuple):
outputs = list(outputs)
tols = {}
if rtol is not None:
tols['rtol'] = rtol
if atol is not None:
tols['atol'] = atol
if not compare_arguments_nested('Onnx model output match failed',
outputs, outputs_origin, **tols):
raise RuntimeError(
'export onnx failed because of validation error.')

View File

@@ -84,7 +84,8 @@ class LocalStorage(Storage):
"""
dirname = os.path.dirname(filepath)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
os.makedirs(dirname, exist_ok=True)
with open(filepath, 'wb') as f:
f.write(obj)
@@ -106,7 +107,8 @@ class LocalStorage(Storage):
"""
dirname = os.path.dirname(filepath)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
os.makedirs(dirname, exist_ok=True)
with open(filepath, 'w', encoding=encoding) as f:
f.write(obj)

View File

@@ -1,6 +1,6 @@
# Copyright (c) Alibaba, Inc. and its affiliates.
# yapf: disable
import datetime
import functools
import os
@@ -39,13 +39,13 @@ from modelscope.hub.errors import (InvalidParameter, NotExistError,
raise_for_http_status, raise_on_error)
from modelscope.hub.git import GitCommandWrapper
from modelscope.hub.repository import Repository
from modelscope.utils.config_ds import DOWNLOADED_DATASETS_PATH
from modelscope.utils.constant import (DEFAULT_DATASET_REVISION,
DEFAULT_MODEL_REVISION,
DEFAULT_REPOSITORY_REVISION,
MASTER_MODEL_BRANCH, DatasetFormations,
DatasetMetaFormats, DownloadChannel,
DownloadMode, ModelFile)
DatasetMetaFormats,
DatasetVisibilityMap, DownloadChannel,
ModelFile)
from modelscope.utils.logger import get_logger
from .utils.utils import (get_endpoint, get_release_datetime,
model_id_to_group_owner_name)
@@ -54,38 +54,51 @@ logger = get_logger()
class HubApi:
"""Model hub api interface.
"""
def __init__(self, endpoint: Optional[str] = None):
"""The ModelScope HubApi。
def __init__(self, endpoint=None):
Args:
endpoint (str, optional): The modelscope server http|https address. Defaults to None.
"""
self.endpoint = endpoint if endpoint is not None else get_endpoint()
self.headers = {'user-agent': ModelScopeConfig.get_user_agent()}
self.session = Session()
retry = Retry(total=2, read=2, connect=2, backoff_factor=1,
status_forcelist=(500, 502, 503, 504),)
retry = Retry(
total=2,
read=2,
connect=2,
backoff_factor=1,
status_forcelist=(500, 502, 503, 504),
)
adapter = HTTPAdapter(max_retries=retry)
self.session.mount('http://', adapter)
self.session.mount('https://', adapter)
# set http timeout
for method in REQUESTS_API_HTTP_METHOD:
setattr(self.session,
method,
functools.partial(getattr(self.session, method), timeout=API_HTTP_CLIENT_TIMEOUT))
setattr(
self.session, method,
functools.partial(
getattr(self.session, method),
timeout=API_HTTP_CLIENT_TIMEOUT))
def login(
self,
access_token: str,
) -> tuple():
"""
Login with username and password
"""Login with your SDK access token, which can be obtained from
https://www.modelscope.cn user center.
Args:
access_token(`str`): user access token on modelscope.
access_token (str): user access token on modelscope.
Returns:
cookies: to authenticate yourself to ModelScope open-api
gitlab token: to access private repos
git_token: token to access your git repository.
<Tip>
Note:
You only have to login once within 30 days.
</Tip>
"""
path = f'{self.endpoint}/api/v1/login'
r = self.session.post(
@@ -107,27 +120,28 @@ class HubApi:
return d[API_RESPONSE_FIELD_DATA][
API_RESPONSE_FIELD_GIT_ACCESS_TOKEN], cookies
def create_model(
self,
model_id: str,
visibility: str,
license: str,
chinese_name: Optional[str] = None,
) -> str:
"""
Create model repo at ModelScopeHub
def create_model(self,
model_id: str,
visibility: Optional[int] = ModelVisibility.PUBLIC,
license: Optional[str] = Licenses.APACHE_V2,
chinese_name: Optional[str] = None) -> str:
"""Create model repo at ModelScopeHub.
Args:
model_id:(`str`): The model id
visibility(`int`): visibility of the model(1-private, 5-public), default public.
license(`str`): license of the model, default none.
chinese_name(`str`, *optional*): chinese name of the model
Returns:
name of the model created
model_id (str): The model id
visibility (int, optional): visibility of the model(1-private, 5-public), default 5.
license (str, optional): license of the model, default none.
chinese_name (str, optional): chinese name of the model.
<Tip>
Returns:
Name of the model created
Raises:
InvalidParameter: If model_id is invalid.
ValueError: If not login.
Note:
model_id = {owner}/{name}
</Tip>
"""
if model_id is None:
raise InvalidParameter('model_id is required!')
@@ -151,14 +165,17 @@ class HubApi:
model_repo_url = f'{get_endpoint()}/{model_id}'
return model_repo_url
def delete_model(self, model_id):
"""_summary_
def delete_model(self, model_id: str):
"""Delete model_id from ModelScope.
Args:
model_id (str): The model id.
<Tip>
Raises:
ValueError: If not login.
Note:
model_id = {owner}/{name}
</Tip>
"""
cookies = ModelScopeConfig.get_cookies()
if cookies is None:
@@ -169,27 +186,28 @@ class HubApi:
raise_for_http_status(r)
raise_on_error(r.json())
def get_model_url(self, model_id):
def get_model_url(self, model_id: str):
return f'{self.endpoint}/api/v1/models/{model_id}.git'
def get_model(
self,
model_id: str,
revision: str = DEFAULT_MODEL_REVISION,
revision: Optional[str] = DEFAULT_MODEL_REVISION,
) -> str:
"""
Get model information at modelscope_hub
"""Get model information at ModelScope
Args:
model_id(`str`): The model id.
revision(`str`): revision of model
model_id (str): The model id.
revision (str optional): revision of model.
Returns:
The model detail information.
Raises:
NotExistError: If the model is not exist, will throw NotExistError
<Tip>
Note:
model_id = {owner}/{name}
</Tip>
"""
cookies = ModelScopeConfig.get_cookies()
owner_or_group, name = model_id_to_group_owner_name(model_id)
@@ -211,13 +229,12 @@ class HubApi:
def push_model(self,
model_id: str,
model_dir: str,
visibility: int = ModelVisibility.PUBLIC,
license: str = Licenses.APACHE_V2,
visibility: Optional[int] = ModelVisibility.PUBLIC,
license: Optional[str] = Licenses.APACHE_V2,
chinese_name: Optional[str] = None,
commit_message: Optional[str] = 'upload model',
revision: Optional[str] = DEFAULT_REPOSITORY_REVISION):
"""
Upload model from a given directory to given repository. A valid model directory
"""Upload model from a given directory to given repository. A valid model directory
must contain a configuration.json file.
This function upload the files in given directory to given repository. If the
@@ -229,11 +246,11 @@ class HubApi:
which can be obtained from ModelScope's website.
Args:
model_id (`str`):
model_id (str):
The model id to be uploaded, caller must have write permission for it.
model_dir(`str`):
model_dir(str):
The Absolute Path of the finetune result.
visibility(`int`, defaults to `0`):
visibility(int, optional):
Visibility of the new created model(1-private, 5-public). If the model is
not exists in ModelScope, this function will create a new model with this
visibility and this parameter is required. You can ignore this parameter
@@ -250,6 +267,12 @@ class HubApi:
revision (`str`, *optional*, default to DEFAULT_MODEL_REVISION):
which branch to push. If the branch is not exists, It will create a new
branch and push to it.
Raises:
InvalidParameter: Parameter invalid.
NotLoginException: Not login
ValueError: No configuration.json
Exception: Create failed.
"""
if model_id is None:
raise InvalidParameter('model_id cannot be empty!')
@@ -305,7 +328,10 @@ class HubApi:
date = datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S')
commit_message = '[automsg] push model %s to hub at %s' % (
model_id, date)
repo.push(commit_message=commit_message, local_branch=revision, remote_branch=revision)
repo.push(
commit_message=commit_message,
local_branch=revision,
remote_branch=revision)
except Exception:
raise
finally:
@@ -313,14 +339,18 @@ class HubApi:
def list_models(self,
owner_or_group: str,
page_number=1,
page_size=10) -> dict:
page_number: Optional[int] = 1,
page_size: Optional[int] = 10) -> dict:
"""List models in owner or group.
Args:
owner_or_group(`str`): owner or group.
page_number(`int`): The page number, default: 1
page_size(`int`): The page size, default: 10
owner_or_group(str): owner or group.
page_number(int, optional): The page number, default: 1
page_size(int, optional): The page size, default: 10
Raises:
RequestError: The request error.
Returns:
dict: {"models": "list of models", "TotalCount": total_number_of_models_in_owner_or_group}
"""
@@ -358,7 +388,7 @@ class HubApi:
def list_model_revisions(
self,
model_id: str,
cutoff_timestamp: int = None,
cutoff_timestamp: Optional[int] = None,
use_cookies: Union[bool, CookieJar] = False) -> List[str]:
"""Get model branch and tags.
@@ -368,6 +398,7 @@ class HubApi:
The timestamp is represented by the seconds elasped from the epoch time.
use_cookies (Union[bool, CookieJar], optional): If is cookieJar, we will use this cookie, if True, will
will load cookie from local. Defaults to False.
Returns:
Tuple[List[str], List[str]]: Return list of branch name and tags
"""
@@ -385,7 +416,10 @@ class HubApi:
] if info['RevisionMap']['Tags'] else []
return tags
def get_valid_revision(self, model_id: str, revision=None, cookies: Optional[CookieJar] = None):
def get_valid_revision(self,
model_id: str,
revision=None,
cookies: Optional[CookieJar] = None):
release_timestamp = get_release_datetime()
current_timestamp = int(round(datetime.datetime.now().timestamp()))
# for active development in library codes (non-release-branches), release_timestamp
@@ -396,27 +430,36 @@ class HubApi:
model_id, use_cookies=False if cookies is None else cookies)
if revision is None:
revision = MASTER_MODEL_BRANCH
logger.info('Model revision not specified, use default: %s in development mode' % revision)
logger.info(
'Model revision not specified, use default: %s in development mode'
% revision)
if revision not in branches and revision not in tags:
raise NotExistError('The model: %s has no branch or tag : %s .' % revision)
raise NotExistError('The model: %s has no revision : %s .' % (model_id, revision))
logger.info('Development mode use revision: %s' % revision)
else:
if revision is None: # user not specified revision, use latest revision before release time
revisions = self.list_model_revisions(
model_id, cutoff_timestamp=release_timestamp, use_cookies=False if cookies is None else cookies)
model_id,
cutoff_timestamp=release_timestamp,
use_cookies=False if cookies is None else cookies)
if len(revisions) == 0:
raise NoValidRevisionError('The model: %s has no valid revision!' % model_id)
raise NoValidRevisionError(
'The model: %s has no valid revision!' % model_id)
# tags (revisions) returned from backend are guaranteed to be ordered by create-time
# we shall obtain the latest revision created earlier than release version of this branch
revision = revisions[0]
logger.info('Model revision not specified, use the latest revision: %s' % revision)
logger.info(
'Model revision not specified, use the latest revision: %s'
% revision)
else:
# use user-specified revision
revisions = self.list_model_revisions(
model_id, cutoff_timestamp=current_timestamp, use_cookies=False if cookies is None else cookies)
model_id,
cutoff_timestamp=current_timestamp,
use_cookies=False if cookies is None else cookies)
if revision not in revisions:
raise NotExistError(
'The model: %s has no revision: %s !' % (model_id, revision))
raise NotExistError('The model: %s has no revision: %s !' %
(model_id, revision))
logger.info('Use user-specified model revision: %s' % revision)
return revision
@@ -431,6 +474,7 @@ class HubApi:
model_id (str): The model id
use_cookies (Union[bool, CookieJar], optional): If is cookieJar, we will use this cookie, if True, will
will load cookie from local. Defaults to False.
Returns:
Tuple[List[str], List[str]]: Return list of branch name and tags
"""
@@ -466,9 +510,6 @@ class HubApi:
will load cookie from local. Defaults to False.
headers: request headers
Raises:
ValueError: If user_cookies is True, but no local cookie.
Returns:
List[dict]: Model file list.
"""
@@ -505,25 +546,8 @@ class HubApi:
dataset_list = r.json()[API_RESPONSE_FIELD_DATA]
return [x['Name'] for x in dataset_list]
def fetch_dataset_scripts(
self,
dataset_name: str,
namespace: str,
download_mode: Optional[DownloadMode],
revision: Optional[str] = DEFAULT_DATASET_REVISION):
if namespace is None:
raise ValueError(
f'Dataset from Hubs.modelscope should have a valid "namespace", but get {namespace}'
)
revision = revision or DEFAULT_DATASET_REVISION
cache_dir = os.path.join(DOWNLOADED_DATASETS_PATH, namespace,
dataset_name, revision)
download_mode = DownloadMode(download_mode
or DownloadMode.REUSE_DATASET_IF_EXISTS)
if download_mode == DownloadMode.FORCE_REDOWNLOAD and os.path.exists(
cache_dir):
shutil.rmtree(cache_dir)
os.makedirs(cache_dir, exist_ok=True)
def get_dataset_id_and_type(self, dataset_name: str, namespace: str):
""" Get the dataset id and type. """
datahub_url = f'{self.endpoint}/api/v1/datasets/{namespace}/{dataset_name}'
cookies = ModelScopeConfig.get_cookies()
r = self.session.get(datahub_url, cookies=cookies)
@@ -531,8 +555,15 @@ class HubApi:
datahub_raise_on_error(datahub_url, resp)
dataset_id = resp['Data']['Id']
dataset_type = resp['Data']['Type']
return dataset_id, dataset_type
def get_dataset_meta_file_list(self, dataset_name: str, namespace: str, dataset_id: str, revision: str):
""" Get the meta file-list of the dataset. """
datahub_url = f'{self.endpoint}/api/v1/datasets/{dataset_id}/repo/tree?Revision={revision}'
cookies = ModelScopeConfig.get_cookies()
r = self.session.get(datahub_url, cookies=cookies, headers=self.headers)
r = self.session.get(
datahub_url, cookies=cookies, headers=self.headers)
resp = r.json()
datahub_raise_on_error(datahub_url, resp)
file_list = resp['Data']
@@ -542,9 +573,23 @@ class HubApi:
f'version = {revision}] dose not exist')
file_list = file_list['Files']
return file_list
def get_dataset_meta_files_local_paths(self, dataset_name: str,
namespace: str,
revision: str,
meta_cache_dir: str, dataset_type: int, file_list: list):
local_paths = defaultdict(list)
dataset_formation = DatasetFormations(dataset_type)
dataset_meta_format = DatasetMetaFormats[dataset_formation]
cookies = ModelScopeConfig.get_cookies()
# Dump the data_type as a local file
dataset_type_file_path = os.path.join(meta_cache_dir,
f'{str(dataset_type)}{DatasetFormations.formation_mark_ext.value}')
with open(dataset_type_file_path, 'w') as fp:
fp.write('*** Automatically-generated file, do not modify ***')
for file_info in file_list:
file_path = file_info['Path']
extension = os.path.splitext(file_path)[-1]
@@ -553,7 +598,7 @@ class HubApi:
f'Revision={revision}&FilePath={file_path}'
r = self.session.get(datahub_url, cookies=cookies)
raise_for_http_status(r)
local_path = os.path.join(cache_dir, file_path)
local_path = os.path.join(meta_cache_dir, file_path)
if os.path.exists(local_path):
logger.warning(
f"Reusing dataset {dataset_name}'s python file ({local_path})"
@@ -564,14 +609,14 @@ class HubApi:
f.write(r.content)
local_paths[extension].append(local_path)
return local_paths, dataset_formation, cache_dir
return local_paths, dataset_formation
def fetch_single_csv_script(self, script_url: str):
cookies = ModelScopeConfig.get_cookies()
resp = self.session.get(script_url, cookies=cookies, headers=self.headers)
if not resp or not resp.text:
raise 'The meta-csv file cannot be empty when the meta-args `big_data` is true.'
text_list = resp.text.split('\n')
text_list = resp.text.strip().split('\n')
text_headers = text_list[0]
text_content = text_list[1:]
@@ -599,19 +644,48 @@ class HubApi:
def get_dataset_access_config_session(
self,
cookies: CookieJar,
dataset_name: str,
namespace: str,
check_cookie: bool,
revision: Optional[str] = DEFAULT_DATASET_REVISION):
datahub_url = f'{self.endpoint}/api/v1/datasets/{namespace}/{dataset_name}/' \
f'ststoken?Revision={revision}'
if check_cookie:
cookies = self._check_cookie(use_cookies=True)
else:
cookies = ModelScopeConfig.get_cookies()
r = self.session.get(url=datahub_url, cookies=cookies, headers=self.headers)
r = self.session.get(
url=datahub_url, cookies=cookies, headers=self.headers)
resp = r.json()
raise_on_error(resp)
return resp['Data']
def get_dataset_access_config_for_unzipped(self,
dataset_name: str,
namespace: str,
revision: str,
zip_file_name: str):
datahub_url = f'{self.endpoint}/api/v1/datasets/{namespace}/{dataset_name}'
cookies = ModelScopeConfig.get_cookies()
r = self.session.get(url=datahub_url, cookies=cookies, headers=self.headers)
resp = r.json()
# get visibility of the dataset
raise_on_error(resp)
data = resp['Data']
visibility = DatasetVisibilityMap.get(data['Visibility'])
datahub_sts_url = f'{datahub_url}/ststoken?Revision={revision}'
r_sts = self.session.get(url=datahub_sts_url, cookies=cookies, headers=self.headers)
resp_sts = r_sts.json()
raise_on_error(resp_sts)
data_sts = resp_sts['Data']
file_dir = visibility + '-unzipped' + '/' + namespace + '_' + dataset_name + '_' + zip_file_name
data_sts['Dir'] = file_dir
return data_sts
def list_oss_dataset_objects(self, dataset_name, namespace, max_limit,
is_recursive, is_filter_dir, revision):
url = f'{self.endpoint}/api/v1/datasets/{namespace}/{dataset_name}/oss/tree/?' \
@@ -624,12 +698,6 @@ class HubApi:
resp = resp['Data']
return resp
def on_dataset_download(self, dataset_name: str, namespace: str) -> None:
url = f'{self.endpoint}/api/v1/datasets/{namespace}/{dataset_name}/download/increase'
cookies = ModelScopeConfig.get_cookies()
r = self.session.post(url, cookies=cookies, headers=self.headers)
raise_for_http_status(r)
def delete_oss_dataset_object(self, object_name: str, dataset_name: str,
namespace: str, revision: str) -> str:
if not object_name or not dataset_name or not namespace or not revision:
@@ -637,7 +705,7 @@ class HubApi:
url = f'{self.endpoint}/api/v1/datasets/{namespace}/{dataset_name}/oss?Path={object_name}&Revision={revision}'
cookies = self.check_local_cookies(use_cookies=True)
cookies = ModelScopeConfig.get_cookies()
resp = self.session.delete(url=url, cookies=cookies)
resp = resp.json()
raise_on_error(resp)
@@ -652,7 +720,7 @@ class HubApi:
url = f'{self.endpoint}/api/v1/datasets/{namespace}/{dataset_name}/oss/prefix?Prefix={object_name}/' \
f'&Revision={revision}'
cookies = self.check_local_cookies(use_cookies=True)
cookies = ModelScopeConfig.get_cookies()
resp = self.session.delete(url=url, cookies=cookies)
resp = resp.json()
raise_on_error(resp)
@@ -661,32 +729,40 @@ class HubApi:
def datahub_remote_call(self, url):
cookies = ModelScopeConfig.get_cookies()
r = self.session.get(url, cookies=cookies, headers={'user-agent': ModelScopeConfig.get_user_agent()})
r = self.session.get(
url,
cookies=cookies,
headers={'user-agent': ModelScopeConfig.get_user_agent()})
resp = r.json()
datahub_raise_on_error(url, resp)
return resp['Data']
def check_local_cookies(self, use_cookies) -> CookieJar:
return self._check_cookie(use_cookies=use_cookies)
def dataset_download_statistics(self, dataset_name: str, namespace: str, use_streaming: bool) -> None:
is_ci_test = os.getenv('CI_TEST') == 'True'
if dataset_name and namespace and not is_ci_test and not use_streaming:
try:
cookies = ModelScopeConfig.get_cookies()
def dataset_download_uv(self, dataset_name: str, namespace: str):
if not dataset_name or not namespace:
raise ValueError('dataset_name or namespace cannot be empty!')
# Download count
download_count_url = f'{self.endpoint}/api/v1/datasets/{namespace}/{dataset_name}/download/increase'
download_count_resp = self.session.post(download_count_url, cookies=cookies, headers=self.headers)
raise_for_http_status(download_count_resp)
# get channel and user_name
channel = DownloadChannel.LOCAL.value
user_name = ''
if MODELSCOPE_CLOUD_ENVIRONMENT in os.environ:
channel = os.environ[MODELSCOPE_CLOUD_ENVIRONMENT]
if MODELSCOPE_CLOUD_USERNAME in os.environ:
user_name = os.environ[MODELSCOPE_CLOUD_USERNAME]
# Download uv
channel = DownloadChannel.LOCAL.value
user_name = ''
if MODELSCOPE_CLOUD_ENVIRONMENT in os.environ:
channel = os.environ[MODELSCOPE_CLOUD_ENVIRONMENT]
if MODELSCOPE_CLOUD_USERNAME in os.environ:
user_name = os.environ[MODELSCOPE_CLOUD_USERNAME]
download_uv_url = f'{self.endpoint}/api/v1/datasets/{namespace}/{dataset_name}/download/uv/' \
f'{channel}?user={user_name}'
download_uv_resp = self.session.post(download_uv_url, cookies=cookies, headers=self.headers)
download_uv_resp = download_uv_resp.json()
raise_on_error(download_uv_resp)
url = f'{self.endpoint}/api/v1/datasets/{namespace}/{dataset_name}/download/uv/{channel}?user={user_name}'
cookies = ModelScopeConfig.get_cookies()
r = self.session.post(url, cookies=cookies, headers=self.headers)
resp = r.json()
raise_on_error(resp)
return resp['Message']
except Exception as e:
logger.error(e)
class ModelScopeConfig:
@@ -763,7 +839,8 @@ class ModelScopeConfig:
with open(
os.path.join(ModelScopeConfig.path_credential,
ModelScopeConfig.USER_INFO_FILE_NAME),
'r', encoding='utf-8') as f:
'r',
encoding='utf-8') as f:
info = f.read()
return info.split(':')[0], info.split(':')[1]
except FileNotFoundError:
@@ -784,7 +861,8 @@ class ModelScopeConfig:
with open(
os.path.join(ModelScopeConfig.path_credential,
ModelScopeConfig.GIT_TOKEN_FILE_NAME),
'r', encoding='utf-8') as f:
'r',
encoding='utf-8') as f:
token = f.read()
except FileNotFoundError:
pass

View File

@@ -0,0 +1,95 @@
# Copyright (c) Alibaba, Inc. and its affiliates.
import os
import pickle
from typing import Dict, Optional, Union
from urllib.parse import urlparse
from modelscope.hub.api import HubApi, ModelScopeConfig
from modelscope.hub.constants import (FILE_HASH, MODEL_META_FILE_NAME,
MODEL_META_MODEL_ID)
from modelscope.hub.git import GitCommandWrapper
from modelscope.hub.utils.caching import FileSystemCache, ModelFileSystemCache
from modelscope.hub.utils.utils import compute_hash
from modelscope.utils.logger import get_logger
logger = get_logger()
def check_local_model_is_latest(
model_root_path: str,
user_agent: Optional[Union[Dict, str]] = None,
):
"""Check local model repo is latest.
Check local model repo is same as hub latest version.
"""
model_cache = None
# download with git
if os.path.exists(os.path.join(model_root_path, '.git')):
git_cmd_wrapper = GitCommandWrapper()
git_url = git_cmd_wrapper.get_repo_remote_url(model_root_path)
if git_url.endswith('.git'):
git_url = git_url[:-4]
u_parse = urlparse(git_url)
model_id = u_parse.path[1:]
else: # snapshot_download
model_cache = ModelFileSystemCache(model_root_path)
model_id = model_cache.get_model_id()
try:
# make headers
headers = {
'user-agent':
ModelScopeConfig.get_user_agent(user_agent=user_agent, )
}
cookies = ModelScopeConfig.get_cookies()
snapshot_header = headers if 'CI_TEST' in os.environ else {
**headers,
**{
'Snapshot': 'True'
}
}
_api = HubApi()
try:
_, revisions = _api.get_model_branches_and_tags(
model_id=model_id, use_cookies=cookies)
if len(revisions) > 0:
latest_revision = revisions[0]
else:
latest_revision = 'master'
except: # noqa: E722
latest_revision = 'master'
model_files = _api.get_model_files(
model_id=model_id,
revision=latest_revision,
recursive=True,
headers=snapshot_header,
use_cookies=cookies,
)
for model_file in model_files:
if model_file['Type'] == 'tree':
continue
# check model_file updated
if model_cache is not None:
if model_cache.exists(model_file):
continue
else:
logger.info(
'Model is updated from modelscope hub, you can verify from http://www.modelscope.cn.'
)
break
else:
if FILE_HASH in model_file:
local_file_hash = compute_hash(
os.path.join(model_root_path, model_file['Path']))
if local_file_hash == model_file[FILE_HASH]:
continue
else:
logger.info(
'Model is updated from modelscope hub, you can verify from http://www.modelscope.cn.'
)
break
except: # noqa: E722
pass # ignore

View File

@@ -25,6 +25,8 @@ MODELSCOPE_CLOUD_ENVIRONMENT = 'MODELSCOPE_ENVIRONMENT'
MODELSCOPE_CLOUD_USERNAME = 'MODELSCOPE_USERNAME'
MODELSCOPE_SDK_DEBUG = 'MODELSCOPE_SDK_DEBUG'
ONE_YEAR_SECONDS = 24 * 365 * 60 * 60
MODEL_META_FILE_NAME = '.mdl'
MODEL_META_MODEL_ID = 'id'
class Licenses(object):

View File

@@ -185,6 +185,8 @@ class DeleteServiceParameters(AttrsToQueryString):
class ServiceDeployer(object):
"""Faciliate model deployment on to supported service provider(s).
"""
def __init__(self, endpoint=None):
self.endpoint = endpoint if endpoint is not None else get_endpoint()
@@ -210,7 +212,6 @@ class ServiceDeployer(object):
provider (ServiceProviderParameters): The service provider parameter
Raises:
NotLoginException: To use this api, you need login first.
NotSupportError: Not supported platform.
RequestError: The server return error.
@@ -248,10 +249,9 @@ class ServiceDeployer(object):
Args:
instance_name (str): The deployed instance name.
provider (ServiceProviderParameters): The cloud provider information, for eas
need region(eg: ch-hangzhou), access_key_id and access_key_secret.
need region(eg: ch-hangzhou), access_key_id and access_key_secret.
Raises:
NotLoginException: To use this api, you need login first.
RequestError: The request is failed from server.
Returns:
@@ -279,10 +279,9 @@ class ServiceDeployer(object):
Args:
instance_name (str): The instance name you want to delete.
provider (ServiceProviderParameters): The cloud provider information, for eas
need region(eg: ch-hangzhou), access_key_id and access_key_secret.
need region(eg: ch-hangzhou), access_key_id and access_key_secret.
Raises:
NotLoginException: To call this api, you need login first.
RequestError: The request is failed.
Returns:
@@ -305,17 +304,17 @@ class ServiceDeployer(object):
def list(self,
provider: ServiceProviderParameters,
skip: int = 0,
limit: int = 100):
skip: Optional[int] = 0,
limit: Optional[int] = 100):
"""List deployed model instances.
Args:
provider (ServiceProviderParameters): The cloud service provider parameter,
for eas, need access_key_id and access_key_secret.
skip: start of the list, current not support.
limit: maximum number of instances return, current not support
for eas, need access_key_id and access_key_secret.
skip (int, optional): start of the list, current not support.
limit (int, optional): maximum number of instances return, current not support
Raises:
NotLoginException: To use this api, you need login first.
RequestError: The request is failed from server.
Returns:

View File

@@ -49,10 +49,10 @@ def is_ok(rsp):
""" Check the request is ok
Args:
rsp (_type_): The request response body
Failed: {'Code': 10010101004, 'Message': 'get model info failed, err: unauthorized permission',
'RequestId': '', 'Success': False}
Success: {'Code': 200, 'Data': {}, 'Message': 'success', 'RequestId': '', 'Success': True}
rsp (Response): The request response body
Returns:
bool: `True` if success otherwise `False`.
"""
return rsp['Code'] == HTTPStatus.OK and rsp['Success']
@@ -84,6 +84,12 @@ def raise_on_error(rsp):
Args:
rsp (_type_): The server response
Raises:
RequestError: the response error message.
Returns:
bool: True if request is OK, otherwise raise `RequestError` exception.
"""
if rsp['Code'] == HTTPStatus.OK:
return True
@@ -91,26 +97,37 @@ def raise_on_error(rsp):
raise RequestError(rsp['Message'])
# TODO use raise_on_error instead if modelhub and datahub response have uniform structures,
def datahub_raise_on_error(url, rsp):
"""If response error, raise exception
Args:
rsp (_type_): The server response
url (str): The request url
rsp (HTTPResponse): The server response.
Raises:
RequestError: the http request error.
Returns:
bool: `True` if request is OK, otherwise raise `RequestError` exception.
"""
if rsp.get('Code') == HTTPStatus.OK:
return True
else:
raise RequestError(
f"Url = {url}, Status = {rsp.get('status')}, error = {rsp.get('error')}, message = {rsp.get('message')}"
f"Url = {url}, Message = {rsp.get('Message')}, Please specify correct dataset_name and namespace."
)
def raise_for_http_status(rsp):
"""
Attempt to decode utf-8 first since some servers
"""Attempt to decode utf-8 first since some servers
localize reason strings, for invalid utf-8, fall back
to decoding with iso-8859-1.
Args:
rsp: The http response.
Raises:
HTTPError: The http error info.
"""
http_error_msg = ''
if isinstance(rsp.reason, bytes):

View File

@@ -36,47 +36,40 @@ def model_file_download(
local_files_only: Optional[bool] = False,
cookies: Optional[CookieJar] = None,
) -> Optional[str]: # pragma: no cover
"""
Download from a given URL and cache it if it's not already present in the
local cache.
"""Download from a given URL and cache it if it's not already present in the local cache.
Given a URL, this function looks for the corresponding file in the local
cache. If it's not there, download it. Then return the path to the cached
file.
Args:
model_id (`str`):
The model to whom the file to be downloaded belongs.
file_path(`str`):
Path of the file to be downloaded, relative to the root of model repo
revision(`str`, *optional*):
revision of the model file to be downloaded.
Can be any of a branch, tag or commit hash
cache_dir (`str`, `Path`, *optional*):
Path to the folder where cached files are stored.
user_agent (`dict`, `str`, *optional*):
The user-agent info in the form of a dictionary or a string.
local_files_only (`bool`, *optional*, defaults to `False`):
If `True`, avoid downloading the file and return the path to the
local cached file if it exists.
if `False`, download the file anyway even it exists
model_id (str): The model to whom the file to be downloaded belongs.
file_path(str): Path of the file to be downloaded, relative to the root of model repo.
revision(str, optional): revision of the model file to be downloaded.
Can be any of a branch, tag or commit hash.
cache_dir (str, Path, optional): Path to the folder where cached files are stored.
user_agent (dict, str, optional): The user-agent info in the form of a dictionary or a string.
local_files_only (bool, optional: If `True`, avoid downloading the file and return the path to the
local cached file if it exists. if `False`, download the file anyway even it exists.
cookies (CookieJar, optional): The cookie of download request.
Returns:
Local path (string) of file or if networking is off, last version of
string: string of local file or if networking is off, last version of
file cached on disk.
<Tip>
Raises:
NotExistError: The file is not exist.
ValueError: The request parameter error.
Raises the following errors:
Note:
Raises the following errors:
- [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
if `use_auth_token=True` and the token cannot be found.
- [`OSError`](https://docs.python.org/3/library/exceptions.html#OSError)
if ETag cannot be determined.
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
if some parameter value is invalid
</Tip>
- [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
if `use_auth_token=True` and the token cannot be found.
- [`OSError`](https://docs.python.org/3/library/exceptions.html#OSError)
if ETag cannot be determined.
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
if some parameter value is invalid
"""
if cache_dir is None:
cache_dir = get_cache_dir()
@@ -165,10 +158,17 @@ def model_file_download(
def get_file_download_url(model_id: str, file_path: str, revision: str):
"""
Format file download url according to `model_id`, `revision` and `file_path`.
"""Format file download url according to `model_id`, `revision` and `file_path`.
e.g., Given `model_id=john/bert`, `revision=master`, `file_path=README.md`,
the resulted download url is: https://modelscope.cn/api/v1/models/john/bert/repo?Revision=master&FilePath=README.md
the resulted download url is: https://modelscope.co/api/v1/models/john/bert/repo?Revision=master&FilePath=README.md
Args:
model_id (str): The model_id.
file_path (str): File path
revision (str): File revision.
Returns:
str: The file url.
"""
download_url_template = '{endpoint}/api/v1/models/{model_id}/repo?Revision={revision}&FilePath={file_path}'
return download_url_template.format(
@@ -186,20 +186,23 @@ def http_get_file(
cookies: CookieJar,
headers: Optional[Dict[str, str]] = None,
):
"""
Download remote file, will retry 5 times before giving up on errors.
"""Download remote file, will retry 5 times before giving up on errors.
Args:
url(`str`):
url(str):
actual download url of the file
local_dir(`str`):
local_dir(str):
local directory where the downloaded file stores
file_name(`str`):
file_name(str):
name of the file stored in `local_dir`
cookies(`CookieJar`):
cookies(CookieJar):
cookies used to authentication the user, which is used for downloading private repos
headers(`Optional[Dict[str, str]] = None`):
headers(Dict[str, str], optional):
http headers to carry necessary info when requesting the remote file
Raises:
FileDownloadError: Failed download failed.
"""
total = -1
temp_file_manager = partial(

Some files were not shown because too many files have changed in this diff Show More