google style docs and selected file generator

ref: https://yuque.alibaba-inc.com/pai/rwqgvl/go8sc8tqzeqqfmsz
        Link: https://code.alibaba-inc.com/Ali-MaaS/MaaS-lib/codereview/11150212

    * google style docs and selected file generator
This commit is contained in:
mulin.lyh
2023-01-03 16:27:29 +08:00
parent 0675bd5c88
commit ab07dc5b5a
65 changed files with 734 additions and 1378 deletions

View File

@@ -7,7 +7,7 @@ REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set SOURCEDIR=source
set BUILDDIR=build
if "%1" == "" goto help

View File

@@ -0,0 +1,10 @@
.. currentmodule:: {{ module }}
{{ name | underline}}
.. autoclass:: {{ name }}
:inherited-members:
:members:
.. autogenerated from source/_templates/autosummary/class.rst

View File

@@ -0,0 +1,12 @@
.. currentmodule:: {{ module }}
{{ name | underline}}
.. autoclass:: {{ name }}
:members:
..
autogenerated from source/_templates/classtemplate.rst
note it does not have :inherited-members:

View File

@@ -0,0 +1,14 @@
.. currentmodule:: {{ module }}
{{ name | underline}}
.. autoclass:: {{ name }}
:members:
:exclude-members: MAXBIT, MAXDIM
:undoc-members:
..
autogenerated from source/_templates/sobolengine.rst
note it has specific options

View File

@@ -1,34 +0,0 @@
modelscope.fileio.format package
================================
.. automodule:: modelscope.fileio.format
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.fileio.format.base module
------------------------------------
.. automodule:: modelscope.fileio.format.base
:members:
:undoc-members:
:show-inheritance:
modelscope.fileio.format.json module
------------------------------------
.. automodule:: modelscope.fileio.format.json
:members:
:undoc-members:
:show-inheritance:
modelscope.fileio.format.yaml module
------------------------------------
.. automodule:: modelscope.fileio.format.yaml
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,34 +0,0 @@
modelscope.fileio package
=========================
.. automodule:: modelscope.fileio
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. toctree::
:maxdepth: 4
modelscope.fileio.format
Submodules
----------
modelscope.fileio.file module
-----------------------------
.. automodule:: modelscope.fileio.file
:members:
:undoc-members:
:show-inheritance:
modelscope.fileio.io module
---------------------------
.. automodule:: modelscope.fileio.io
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,50 +1,17 @@
modelscope.hub package
=========================
modelscope.hub
==============
.. automodule:: modelscope.hub
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. currentmodule:: modelscope.hub
.. toctree::
:maxdepth: 4
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
modelscope.hub.utils
Submodules
----------
modelscope.hub.api module
-----------------------------
.. automodule:: modelscope.hub.api
:members:
:undoc-members:
:show-inheritance:
modelscope.hub.git module
---------------------------
.. automodule:: modelscope.hub.git
:members:
:undoc-members:
:show-inheritance:
modelscope.hub.file_download module
---------------------------
.. automodule:: modelscope.hub.file_download
:members:
:undoc-members:
:show-inheritance:
modelscope.hub.snapshot_download module
---------------------------
.. automodule:: modelscope.hub.snapshot_download
:members:
:undoc-members:
:show-inheritance:
api.HubApi
repository.Repository
deploy.ServiceDeployer
snapshot_download.snapshot_download
file_download.model_file_download

View File

@@ -1,26 +0,0 @@
modelscope.hub.utils package
===============================
.. automodule:: modelscope.hub.utils
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.hub.utils.caching module
-------------------------------------------------------
.. automodule:: modelscope.hub.utils.caching
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.cv.image\_matting\_pipeline module
-------------------------------------------------------
.. automodule:: modelscope.hub.utils.utils
:members:
:undoc-members:
:show-inheritance:

View File

@@ -0,0 +1,17 @@
modelscope.models.base
======================
.. automodule:: modelscope.models.base
.. currentmodule:: modelscope.models.base
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
Model
TorchModel
Head
TorchHead

View File

@@ -0,0 +1,16 @@
modelscope.models.builder
=========================
.. automodule:: modelscope.models.builder
.. currentmodule:: modelscope.models.builder
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
build_model
build_backbone
build_head

View File

@@ -1,18 +0,0 @@
modelscope.models.cv.cartoon.facelib.LK package
===============================================
.. automodule:: modelscope.models.cv.cartoon.facelib.LK
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.models.cv.cartoon.facelib.LK.lk module
-------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.facelib.LK.lk
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,50 +0,0 @@
modelscope.models.cv.cartoon.facelib package
============================================
.. automodule:: modelscope.models.cv.cartoon.facelib
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. toctree::
:maxdepth: 4
modelscope.models.cv.cartoon.facelib.LK
Submodules
----------
modelscope.models.cv.cartoon.facelib.config module
--------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.facelib.config
:members:
:undoc-members:
:show-inheritance:
modelscope.models.cv.cartoon.facelib.face\_detector module
----------------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.facelib.face_detector
:members:
:undoc-members:
:show-inheritance:
modelscope.models.cv.cartoon.facelib.face\_landmark module
----------------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.facelib.face_landmark
:members:
:undoc-members:
:show-inheritance:
modelscope.models.cv.cartoon.facelib.facer module
-------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.facelib.facer
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,15 +0,0 @@
modelscope.models.cv.cartoon.mtcnn\_pytorch package
===================================================
.. automodule:: modelscope.models.cv.cartoon.mtcnn_pytorch
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. toctree::
:maxdepth: 4
modelscope.models.cv.cartoon.mtcnn_pytorch.src

View File

@@ -1,26 +0,0 @@
modelscope.models.cv.cartoon.mtcnn\_pytorch.src package
=======================================================
.. automodule:: modelscope.models.cv.cartoon.mtcnn_pytorch.src
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.models.cv.cartoon.mtcnn\_pytorch.src.align\_trans module
-------------------------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.mtcnn_pytorch.src.align_trans
:members:
:undoc-members:
:show-inheritance:
modelscope.models.cv.cartoon.mtcnn\_pytorch.src.matlab\_cp2tform module
-----------------------------------------------------------------------
.. automodule:: modelscope.models.cv.cartoon.mtcnn_pytorch.src.matlab_cp2tform
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,27 +0,0 @@
modelscope.models.cv.cartoon package
====================================
.. automodule:: modelscope.models.cv.cartoon
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. toctree::
:maxdepth: 4
modelscope.models.cv.cartoon.facelib
modelscope.models.cv.cartoon.mtcnn_pytorch
Submodules
----------
modelscope.models.cv.cartoon.utils module
-----------------------------------------
.. automodule:: modelscope.models.cv.cartoon.utils
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,15 +1,14 @@
modelscope.models.cv package
============================
modelscope.models.cv
====================
.. automodule:: modelscope.models.cv
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. currentmodule:: modelscope.models.cv
.. toctree::
:maxdepth: 4
modelscope.models.cv.cartoon
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
easycv_base.EasyCVBaseModel

View File

@@ -1,90 +0,0 @@
modelscope.models.nlp package
=============================
.. automodule:: modelscope.models.nlp
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.models.nlp.bert\_for\_sequence\_classification module
------------------------------------------------------------
.. automodule:: modelscope.models.nlp.bert_for_sequence_classification
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.palm\_for\_text\_generation module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.palm_for_text_generation
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.csanmt\_for\_translation module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.palm_for_text_generation
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.masked\_language module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.masked_language
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.sbert\_for\_nil module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.sbert_for_nil
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.sbert\_for\_sentence\_similarity module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.sbert_for_sentence_similarity
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.sbert\_for\_sentiment\_classification module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.sbert_for_sentiment_classification
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.sbert\_for\_sequence\_classification module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.sbert_for_sequence_classification
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.sbert\_for\_token\_classification module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.sbert_for_token_classification
:members:
:undoc-members:
:show-inheritance:
modelscope.models.nlp.sbert\_for\_zero\_shot\_classification module
----------------------------------------------------
.. automodule:: modelscope.models.nlp.sbert_for_zero_shot_classification
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,37 +1,14 @@
modelscope.models package
=========================
modelscope.models
=================
.. automodule:: modelscope.models
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. currentmodule:: modelscope.models
.. toctree::
:maxdepth: 4
:maxdepth: 2
:caption: Model Api
modelscope.models.cv
modelscope.models.nlp
modelscope.models.multi_modal
modelscope.models.audio
Submodules
----------
modelscope.models.base module
-----------------------------
.. automodule:: modelscope.models.base
:members:
:undoc-members:
:show-inheritance:
modelscope.models.builder module
--------------------------------
.. automodule:: modelscope.models.builder
:members:
:undoc-members:
:show-inheritance:
bases <modelscope.models.base>
builders <modelscope.models.builder>
cv <modelscope.models.cv>

View File

@@ -0,0 +1,14 @@
modelscope.msdatasets.cv
================================
.. automodule:: modelscope.msdatasets.cv
.. currentmodule:: modelscope.msdatasets.cv
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
easycv_base.EasyCVBaseDataset
image_classification.ClsDataset

View File

@@ -0,0 +1,14 @@
modelscope.msdatasets.ms_dataset
================================
.. automodule:: modelscope.msdatasets.ms_dataset
.. currentmodule:: modelscope.msdatasets.ms_dataset
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
MsMapDataset
MsDataset

View File

@@ -1,18 +1,13 @@
modelscope.msdatasets package
=============================
modelscope.msdatasets
=====================
.. automodule:: modelscope.msdatasets
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
.. currentmodule:: modelscope.msdatasets
modelscope.msdatasets.ms\_dataset module
----------------------------------------
.. toctree::
:maxdepth: 2
:caption: Dataset Api
.. automodule:: modelscope.msdatasets.ms_dataset
:members:
:undoc-members:
:show-inheritance:
dataset <modelscope.msdatasets.ms_dataset>
cv <modelscope.msdatasets.cv>

View File

@@ -1,7 +0,0 @@
modelscope.pipelines.audio package
==================================
.. automodule:: modelscope.pipelines.audio
:members:
:undoc-members:
:show-inheritance:

View File

@@ -0,0 +1,14 @@
modelscope.pipelines.base
=========================
.. automodule:: modelscope.pipelines.base
.. currentmodule:: modelscope.pipelines.base
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
Pipeline
DistributedPipeline

View File

@@ -0,0 +1,15 @@
modelscope.pipelines.builder
============================
.. automodule:: modelscope.pipelines.builder
.. currentmodule:: modelscope.pipelines.builder
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
build_pipeline
pipeline

View File

@@ -1,26 +1,14 @@
modelscope.pipelines.cv package
===============================
modelscope.pipelines.cv
=======================
.. automodule:: modelscope.pipelines.cv
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
.. currentmodule:: modelscope.pipelines.cv
modelscope.pipelines.cv.image\_cartoon\_pipeline module
-------------------------------------------------------
.. automodule:: modelscope.pipelines.cv.image_cartoon_pipeline
:members:
:undoc-members:
:show-inheritance:
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
modelscope.pipelines.cv.image\_matting\_pipeline module
-------------------------------------------------------
.. automodule:: modelscope.pipelines.cv.image_matting_pipeline
:members:
:undoc-members:
:show-inheritance:
ActionRecognitionPipeline

View File

@@ -1,42 +0,0 @@
modelscope.pipelines.multi\_modal package
=========================================
.. automodule:: modelscope.pipelines.multi_modal
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.pipelines.multi\_modal.image\_captioning\_pipeline module
----------------------------------------------------------
.. automodule:: modelscope.pipelines.multi_modal.image_captioning_pipeline
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.multi\_modal.multi\_modal\_embedding\_pipeline module
----------------------------------------------------------
.. automodule:: modelscope.pipelines.multi_modal.multi_modal_embedding_pipeline
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.multi\_modal.text\_to\_image\_synthesis\_pipeline module
----------------------------------------------------------
.. automodule:: modelscope.pipelines.multi_modal.text_to_image_synthesis_pipeline
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.multi\_modal.visual\_question\_answering\_pipeline module
----------------------------------------------------------
.. automodule:: modelscope.pipelines.multi_modal.visual_question_answering_pipeline
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,26 +0,0 @@
modelscope.pipelines.nlp package
================================
.. automodule:: modelscope.pipelines.nlp
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.pipelines.nlp.sequence\_classification\_pipeline module
------------------------------------------------------------------
.. automodule:: modelscope.pipelines.nlp.sequence_classification_pipeline
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.nlp.text\_generation\_pipeline module
----------------------------------------------------------
.. automodule:: modelscope.pipelines.nlp.text_generation_pipeline
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,53 +1,14 @@
modelscope.pipelines package
============================
modelscope.pipelines
====================
.. automodule:: modelscope.pipelines
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. currentmodule:: modelscope.pipelines
.. toctree::
:maxdepth: 4
:maxdepth: 2
:caption: Pipeline Api
modelscope.pipelines.cv
modelscope.pipelines.nlp
modelscope.pipelines.multi_modal
modelscope.pipelines.audio
Submodules
----------
modelscope.pipelines.builder module
-----------------------------------
.. automodule:: modelscope.pipelines.builder
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.base module
-----------------------------------
.. automodule:: modelscope.pipelines.base
:members:
:undoc-members:
:show-inheritance:
modelscope.outputs module
-----------------------------------
.. automodule:: modelscope.outputs
:members:
:undoc-members:
:show-inheritance:
modelscope.pipelines.util module
--------------------------------
.. automodule:: modelscope.pipelines.util
:members:
:undoc-members:
:show-inheritance:
base <modelscope.pipelines.base>
builder <modelscope.pipelines.builder>
cv <modelscope.pipelines.cv>

View File

@@ -0,0 +1,14 @@
modelscope.preprocessors.base
======================
.. automodule:: modelscope.preprocessors.base
.. currentmodule:: modelscope.preprocessors.base
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
Preprocessor

View File

@@ -0,0 +1,14 @@
modelscope.preprocessors.builder
======================
.. automodule:: modelscope.preprocessors.builder
.. currentmodule:: modelscope.preprocessors.builder
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
build_preprocessor

View File

@@ -1,50 +1,14 @@
modelscope.preprocessors package
================================
modelscope.preprocessors
=================
.. automodule:: modelscope.preprocessors
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
.. currentmodule:: modelscope.preprocessors
modelscope.preprocessors.base module
------------------------------------
.. toctree::
:maxdepth: 2
:caption: Preprocessor Api
.. automodule:: modelscope.preprocessors.base
:members:
:undoc-members:
:show-inheritance:
modelscope.preprocessors.builder module
---------------------------------------
.. automodule:: modelscope.preprocessors.builder
:members:
:undoc-members:
:show-inheritance:
modelscope.preprocessors.common module
--------------------------------------
.. automodule:: modelscope.preprocessors.common
:members:
:undoc-members:
:show-inheritance:
modelscope.preprocessors.image module
-------------------------------------
.. automodule:: modelscope.preprocessors.image
:members:
:undoc-members:
:show-inheritance:
modelscope.preprocessors.nlp module
-----------------------------------
.. automodule:: modelscope.preprocessors.nlp
:members:
:undoc-members:
:show-inheritance:
base <modelscope.preprocessors.base>
builders <modelscope.preprocessors.builder>
video <modelscope.preprocessors.video>

View File

@@ -0,0 +1,20 @@
modelscope.preprocessors.video
====================
.. automodule:: modelscope.preprocessors.video
.. currentmodule:: modelscope.preprocessors.video
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
ReadVideoData
kinetics400_tranform
_interval_based_sampling
_decode_video_frames_list
_decode_video
KineticsResizedCrop
MovieSceneSegmentationPreprocessor

View File

@@ -1,33 +0,0 @@
modelscope package
==================
.. automodule:: modelscope
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. toctree::
:maxdepth: 4
modelscope.fileio
modelscope.models
modelscope.pipelines
modelscope.preprocessors
modelscope.msdatasets
modelscope.trainers
modelscope.utils
modelscope.hub
Submodules
----------
modelscope.version module
-------------------------
.. automodule:: modelscope.version
:members:
:undoc-members:
:show-inheritance:

View File

@@ -0,0 +1,14 @@
modelscope.trainers.base
========================
.. automodule:: modelscope.trainers.base
.. currentmodule:: modelscope.trainers.base
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
BaseTrainer
DummyTrainer

View File

@@ -0,0 +1,14 @@
modelscope.trainers.builder
===========================
.. automodule:: modelscope.trainers.builder
.. currentmodule:: modelscope.trainers.builder
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
build_trainer

View File

@@ -0,0 +1,14 @@
modelscope.trainers.cv
=======================
.. automodule:: modelscope.trainers.cv
.. currentmodule:: modelscope.trainers.cv
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
ImagePortraitEnhancementTrainer

View File

@@ -1,18 +0,0 @@
modelscope.trainers.nlp package
===============================
.. automodule:: modelscope.trainers.nlp
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.trainers.nlp.sequence\_classification\_trainer module
----------------------------------------------------------------
.. automodule:: modelscope.trainers.nlp.sequence_classification_trainer
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,34 +1,15 @@
modelscope.trainers package
===========================
modelscope.trainers
===================
.. automodule:: modelscope.trainers
:members:
:undoc-members:
:show-inheritance:
Subpackages
-----------
.. currentmodule:: modelscope.trainers
.. toctree::
:maxdepth: 4
:maxdepth: 2
:caption: Trainer Api
modelscope.trainers.nlp
Submodules
----------
modelscope.trainers.base module
-------------------------------
.. automodule:: modelscope.trainers.base
:members:
:undoc-members:
:show-inheritance:
modelscope.trainers.builder module
----------------------------------
.. automodule:: modelscope.trainers.builder
:members:
:undoc-members:
:show-inheritance:
base <modelscope.trainers.base>
builder <modelscope.trainers.builder>
EpochBasedTrainer <modelscope.trainers.trainer>
cv <modelscope.trainers.cv>

View File

@@ -0,0 +1,13 @@
modelscope.trainers.trainer
===========================
.. automodule:: modelscope.trainers.trainer
.. currentmodule:: modelscope.trainers.trainer
.. autosummary::
:toctree: generated
:nosignatures:
:template: classtemplate.rst
EpochBasedTrainer

View File

@@ -1,58 +0,0 @@
modelscope.utils package
========================
.. automodule:: modelscope.utils
:members:
:undoc-members:
:show-inheritance:
Submodules
----------
modelscope.utils.config module
------------------------------
.. automodule:: modelscope.utils.config
:members:
:undoc-members:
:show-inheritance:
modelscope.utils.constant module
--------------------------------
.. automodule:: modelscope.utils.constant
:members:
:undoc-members:
:show-inheritance:
modelscope.utils.hub module
---------------------------
.. automodule:: modelscope.utils.hub
:members:
:undoc-members:
:show-inheritance:
modelscope.utils.logger module
------------------------------
.. automodule:: modelscope.utils.logger
:members:
:undoc-members:
:show-inheritance:
modelscope.utils.registry module
--------------------------------
.. automodule:: modelscope.utils.registry
:members:
:undoc-members:
:show-inheritance:
modelscope.utils.type\_assert module
------------------------------------
.. automodule:: modelscope.utils.type_assert
:members:
:undoc-members:
:show-inheritance:

View File

@@ -40,17 +40,37 @@ release = version
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.autosummary',
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'myst_parser',
'sphinx_markdown_tables',
'sphinx_copybutton',
'myst_parser',
]
autodoc_mock_imports = [
'matplotlib', 'pycocotools', 'terminaltables', 'mmcv.ops'
]
# build the templated autosummary files
autosummary_generate = True
numpydoc_show_class_members = False
# Enable overriding of function signatures in the first line of the docstring.
autodoc_docstring_signature = True
# Disable docstring inheritance
autodoc_inherit_docstrings = False
# Show type hints in the description
autodoc_typehints = 'description'
# Add parameter types if the parameter is documented in the docstring
autodoc_typehints_description_target = 'documented_params'
autodoc_default_options = {
'member-order': 'bysource',
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -58,27 +78,46 @@ templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = {
'.rst': 'restructuredtext',
'.md': 'markdown',
}
source_suffix = ['.rst', '.md']
# The master toctree document.
master_doc = 'index'
root_doc = 'index'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['build', 'Thumbs.db', '.DS_Store']
exclude_patterns = [
'build', 'source/.ipynb_checkpoints', 'source/api/generated', 'Thumbs.db',
'.DS_Store'
]
# A list of glob-style patterns [1] that are used to find source files.
# They are matched against the source file names relative to the source directory,
# using slashes as directory separators on all platforms.
# The default is **, meaning that all files are recursively included from the source directory.
# include_patterns = [
# 'index.rst',
# 'quick_start.md',
# 'develop.md',
# 'faq.md',
# 'change_log.md',
# 'api/modelscope.hub*',
# 'api/modelscope.models.base*',
# 'api/modelscope.models.builder*',
# 'api/modelscope.pipelines.base*',
# 'api/modelscope.pipelines.builder*',
# 'api/modelscope.preprocessors.base*',
# 'api/modelscope.preprocessors.builder*',
# 'api/modelscope.trainers.base*',
# 'api/modelscope.trainers.builder*',
# ]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_book_theme'
html_theme_path = [sphinx_book_theme.get_html_theme_path()]
html_theme_options = {}
# html_theme = 'sphinx_book_theme'
# html_theme_path = [sphinx_book_theme.get_html_theme_path()]
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
@@ -88,7 +127,7 @@ html_static_path = ['_static']
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'modelscope_doc'
# htmlhelp_basename = 'modelscope_doc'
# -- Extension configuration -------------------------------------------------
# Ignore >>> when copying code
@@ -97,8 +136,3 @@ copybutton_prompt_is_regexp = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
autodoc_default_options = {
'member-order': 'bysource',
'special-members': '__init__',
}

View File

@@ -1,48 +0,0 @@
# 常见问题
<a name="macos-pip-tokenizer-error"></a>
### 1. macOS环境pip方式安装tokenizers报错
对于tokenizers库 pypi上缺乏针对`macOS`环境预编译包,需要搭建源码编译环境后才能正确安装,步骤如下:
1. 安装rust
```shell
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
pip install setuptools_rust
```
2. 更新rust环境变量
```shell
source $HOME/.cargo/env
```
3. 安装tokenziers
```shell
pip install tokenziers
```
reference: [https://huggingface.co/docs/tokenizers/installation#installation-from-sources](https://huggingface.co/docs/tokenizers/installation#installation-from-sources)
### 2. pip 安装包冲突
> ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.
由于依赖库之间的版本不兼容,可能会存在版本冲突的情况,大部分情况下不影响正常运行。
### 3. 安装pytorch出现版本错误
> ERROR: Ignored the following versions that require a different python version: 1.1.0 Requires-Python >=3.8; 1.1.0rc1 Requires-Python >=3.8; 1.1.1 Requires-Python >=3.8
> ERROR: Could not find a version that satisfies the requirement torch==1.8.1+cu111 (from versions: 1.0.0, 1.0.1, 1.0.1.post2, 1.1.0, 1.2.0, 1.3.0, 1.3.1, 1.4.0, 1.5.0, 1.5.1, 1.6.0, 1.7.0, 1.7.1, 1.8.0, 1.8.1, 1.9.0, 1.9.1, 1.10.0, 1.10.1, 1.10.2, 1.11.0)
> ERROR: No matching distribution found for torch==1.8.1+cu111
安装时使用如下命令:
```shell
pip install -f https://download.pytorch.org/whl/torch_stable.html -i https://pypi.tuna.tsinghua.edu.cn/simple -r requirements.txt
```
### 4. zsh: no matches found: modelscope-0.2.2-py3-none-any.whl[all]
mac终端的zsh 对于[]需要做转义,执行如下命令
```shell
pip install modelscope\[all\] -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```

View File

@@ -3,25 +3,24 @@
contain the root `toctree` directive.
ModelScope DOCUMENTATION
=======================================
ModelScope doc
========================
.. toctree::
:maxdepth: 2
:caption: USER GUIDE
:caption: DEVELOPER GUIDE
quick_start.md
develop.md
faq.md
.. toctree::
:maxdepth: 2
:caption: Tutorials
tutorials/index
:caption: API Doc
Hub <api/modelscope.hub>
Model <api/modelscope.models>
Preprocessor <api/modelscope.preprocessors>
Pipeline <api/modelscope.pipelines>
Trainer <api/modelscope.trainers>
MsDataset <api/modelscope.msdatasets>
.. toctree::
:maxdepth: 2
@@ -29,21 +28,6 @@ ModelScope doc
change_log.md
.. toctree::
.. :maxdepth: 10
.. :caption: API Doc
.. api/modelscope.preprocessors
.. api/modelscope.models
.. api/modelscope.pipelines
.. api/modelscope.fileio
.. api/modelscope.utils
.. api/modelscope.hub
.. api/modelscope.msdatasets
.. api/modelscope.tools
.. api/modelscope.trainers
Indices and tables
==================
* :ref:`genindex`

View File

@@ -1,118 +0,0 @@
ModelScope Library目前支持tensorflowpytorch深度学习框架进行模型训练、推理 在Python 3.7+, Pytorch 1.8+, Tensorflow1.15Tensorflow 2.x上测试可运行。
**注: **`**语音相关**`**的功能仅支持 python3.7,tensorflow1.15的**`**linux**`**环境使用。 其他功能可以在windows、mac上安装使用。**
## python环境配置
首先,参考[文档](https://docs.anaconda.com/anaconda/install/) 安装配置Anaconda环境。
安装完成后执行如下命令为modelscope library创建对应的python环境。
```shell
conda create -n modelscope python=3.7
conda activate modelscope
```
## 安装深度学习框架
- 安装pytorch[参考链接](https://pytorch.org/get-started/locally/)。
```shell
pip3 install torch torchvision torchaudio
```
- 安装Tensorflow[参考链接](https://www.tensorflow.org/install/pip)。
```shell
pip install --upgrade tensorflow
```
## ModelScope library 安装
注: 如果在安装过程中遇到错误,请前往[常见问题](faq.md)查找解决方案。
### pip安装
执行如下命令可以安装所有领域依赖:
```shell
pip install "modelscope[cv,nlp,audio,multi-modal]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验`语音功能`,请执行如下命令:
```shell
pip install "modelscope[audio]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验CV功能可执行如下命令安装依赖
```shell
pip install "modelscope[cv]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验NLP功能可执行如下命令安装依赖
```shell
pip install "modelscope[nlp]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验多模态功能,可执行如下命令安装依赖:
```shell
pip install "modelscope[multi-modal]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
**注**
1. `**语音相关**`**的功能仅支持 python3.7,tensorflow1.15的**`**linux**`**环境使用。 其他功能可以在windows、mac上安装使用。**
2. 语音领域中一部分模型使用了三方库SoundFile进行wav文件处理**在Linux系统上用户需要手动安装SoundFile的底层依赖库libsndfile**在Windows和MacOS上会自动安装不需要用户操作。详细信息可参考[SoundFile官网](https://github.com/bastibe/python-soundfile#installation)。以Ubuntu系统为>例,用户需要执行如下命令:
```shell
sudo apt-get update
sudo apt-get install libsndfile1
```
3. **CV功能使用需要安装mmcv-full 请参考mmcv**[**安装手册**](https://github.com/open-mmlab/mmcv#installation)**进行安装**
### 使用源码安装
适合本地开发调试使用,修改源码后可以直接执行。
ModelScope的源码可以直接clone到本地
```shell
git clone git@github.com:modelscope/modelscope.git
cd modelscope
git fetch origin master
git checkout master
```
安装依赖
如需安装所有依赖,请执行如下命令
```shell
pip install -e ".[cv,nlp,audio,multi-modal]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如需体验`语音功能`,请单独执行如下命令:
```shell
pip install -e ".[audio]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验CV功能可执行如下命令安装依赖
```shell
pip install -e ".[cv]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验NLP功能可执行如下命令安装依赖
```shell
pip install -e ".[nlp]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
如仅需体验多模态功能,可执行如下命令安装依赖:
```shell
pip install -e ".[multi-modal]" -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
```
### 安装验证
安装成功后,可以执行如下命令进行验证安装是否正确:
```shell
python -c "from modelscope.pipelines import pipeline;print(pipeline('word-segmentation')('今天天气不错,适合 出去游玩'))"
```

View File

@@ -1,6 +0,0 @@
.. toctree::
:maxdepth: 2
:caption: Tutorials
pipeline.md
trainer.md

View File

@@ -1,61 +0,0 @@
# Pipeline使用教程
本文简单介绍如何使用`pipeline`函数加载模型进行推理。`pipeline`函数支持按照任务类型、模型名称从模型仓库拉取模型进行进行推理,包含以下几个方面:
* 使用pipeline()函数进行推理
* 指定特定预处理、特定模型进行推理
* 不同场景推理任务示例
## 环境准备
详细步骤可以参考 [快速开始](../quick_start.md)
## Pipeline基本用法
下面以中文分词任务为例说明pipeline函数的基本用法
1. pipeline函数支持指定特定任务名称加载任务默认模型创建对应pipeline对象
执行如下python代码
```python
from modelscope.pipelines import pipeline
word_segmentation = pipeline('word-segmentation')
```
2. 输入文本
``` python
input = '今天天气不错,适合出去游玩'
print(word_segmentation(input))
{'output': '今天 天气 不错 适合 出去 游玩'}
```
3. 输入多条样本
pipeline对象也支持传入多个样本列表输入返回对应输出列表每个元素对应输入样本的返回结果
```python
inputs = ['今天天气不错,适合出去游玩','这本书很好,建议你看看']
print(word_segmentation(inputs))
[{'output': '今天 天气 不错 适合 出去 游玩'}, {'output': '这 本 书 很 好 建议 你 看看'}]
```
## 指定预处理、模型进行推理
pipeline函数支持传入实例化的预处理对象、模型对象从而支持用户在推理过程中定制化预处理、模型。
1. 首先,创建预处理方法和模型
```python
from modelscope.models import Model
from modelscope.preprocessors import TokenClassificationPreprocessor
model = Model.from_pretrained('damo/nlp_structbert_word-segmentation_chinese-base')
tokenizer = TokenClassificationPreprocessor(model.model_dir)
```
2. 使用tokenizer和模型对象创建pipeline
```python
from modelscope.pipelines import pipeline
word_seg = pipeline('word-segmentation', model=model, preprocessor=tokenizer)
input = '今天天气不错,适合出去游玩'
print(word_seg(input))
{'output': '今天 天气 不错 适合 出去 游玩'}
```
## 不同场景任务推理示例
下面以一个图像任务:人像抠图('image-matting'为例进一步说明pipeline的用法
```python
import cv2
from modelscope.pipelines import pipeline
img_matting = pipeline('image-matting')
result = img_matting('https://modelscope.oss-cn-beijing.aliyuncs.com/test/images/image_matting.png')
cv2.imwrite('result.png', result['output_png'])
```

View File

@@ -1,54 +0,0 @@
# Trainer使用教程
Modelscope提供了众多预训练模型你可以使用其中任意一个利用公开数据集或者私有数据集针对特定任务进行模型训练在本篇文章中将介绍如何使用Modelscope的`Trainer`模块进行Finetuning和评估。
## 环境准备
详细步骤可以参考 [快速开始](../quick_start.md)
### 准备数据集
在开始Finetuning前需要准备一个数据集用以训练和评估详细可以参考数据集使用教程。
```python
from datasets import Dataset
train_dataset = MsDataset.load'afqmc_small', namespace='modelscope', split='train')
eval_dataset = MsDataset.load('afqmc_small', namespace='modelscope', split='validation')
```
### 训练
ModelScope把所有训练相关的配置信息全部放到了模型仓库下的`configuration.json`因此我们只需要创建Trainer加载配置文件传入数据集即可完成训练。
首先通过工厂方法创建Trainer 需要传入模型仓库路径, 训练数据集对象,评估数据集对象,训练目录
```python
kwargs = dict(
model='damo/nlp_structbert_sentiment-classification_chinese-base',
train_dataset=train_dataset,
eval_dataset=eval_dataset,
work_dir='work_dir')
trainer = build_trainer(default_args=kwargs)
```
启动训练。
```python
trainer.train()
```
如果需要调整训练参数,可以在模型仓库页面下载`configuration.json`文件到本地修改参数后指定配置文件路径创建trainer
```python
kwargs = dict(
model='damo/nlp_structbert_sentiment-classification_chinese-base',
train_dataset=train_dataset,
eval_dataset=eval_dataset,
cfg_file='你的配置文件路径'
work_dir='work_dir')
trainer = build_trainer(default_args=kwargs)
trainer.train()
```
### 评估
训练过程中会定期使用验证集进行评估测试, Trainer模块也支持指定特定轮次保存的checkpoint路径进行单次评估。
```python
eval_results = trainer.evaluate('work_dir/epoch_10.pth')
print(eval_results)
```

View File

@@ -1,6 +1,6 @@
# Copyright (c) Alibaba, Inc. and its affiliates.
# yapf: disable
import datetime
import functools
import os
@@ -54,38 +54,51 @@ logger = get_logger()
class HubApi:
"""Model hub api interface.
"""
def __init__(self, endpoint: Optional[str] = None):
"""The ModelScope HubApi。
def __init__(self, endpoint=None):
Args:
endpoint (str, optional): The modelscope server http|https address. Defaults to None.
"""
self.endpoint = endpoint if endpoint is not None else get_endpoint()
self.headers = {'user-agent': ModelScopeConfig.get_user_agent()}
self.session = Session()
retry = Retry(total=2, read=2, connect=2, backoff_factor=1,
status_forcelist=(500, 502, 503, 504),)
retry = Retry(
total=2,
read=2,
connect=2,
backoff_factor=1,
status_forcelist=(500, 502, 503, 504),
)
adapter = HTTPAdapter(max_retries=retry)
self.session.mount('http://', adapter)
self.session.mount('https://', adapter)
# set http timeout
for method in REQUESTS_API_HTTP_METHOD:
setattr(self.session,
method,
functools.partial(getattr(self.session, method), timeout=API_HTTP_CLIENT_TIMEOUT))
setattr(
self.session, method,
functools.partial(
getattr(self.session, method),
timeout=API_HTTP_CLIENT_TIMEOUT))
def login(
self,
access_token: str,
) -> tuple():
"""
Login with username and password
"""Login with your SDK access token, which can be obtained from
https://www.modelscope.cn user center.
Args:
access_token(`str`): user access token on modelscope.
access_token (str): user access token on modelscope.
Returns:
cookies: to authenticate yourself to ModelScope open-api
gitlab token: to access private repos
git_token: token to access your git repository.
<Tip>
Note:
You only have to login once within 30 days.
</Tip>
"""
path = f'{self.endpoint}/api/v1/login'
r = self.session.post(
@@ -107,27 +120,28 @@ class HubApi:
return d[API_RESPONSE_FIELD_DATA][
API_RESPONSE_FIELD_GIT_ACCESS_TOKEN], cookies
def create_model(
self,
model_id: str,
visibility: str,
license: str,
chinese_name: Optional[str] = None,
) -> str:
"""
Create model repo at ModelScopeHub
def create_model(self,
model_id: str,
visibility: Optional[int] = ModelVisibility.PUBLIC,
license: Optional[str] = Licenses.APACHE_V2,
chinese_name: Optional[str] = None) -> str:
"""Create model repo at ModelScopeHub.
Args:
model_id:(`str`): The model id
visibility(`int`): visibility of the model(1-private, 5-public), default public.
license(`str`): license of the model, default none.
chinese_name(`str`, *optional*): chinese name of the model
Returns:
name of the model created
model_id (str): The model id
visibility (int, optional): visibility of the model(1-private, 5-public), default 5.
license (str, optional): license of the model, default none.
chinese_name (str, optional): chinese name of the model.
<Tip>
Returns:
Name of the model created
Raises:
InvalidParameter: If model_id is invalid.
ValueError: If not login.
Note:
model_id = {owner}/{name}
</Tip>
"""
if model_id is None:
raise InvalidParameter('model_id is required!')
@@ -151,14 +165,17 @@ class HubApi:
model_repo_url = f'{get_endpoint()}/{model_id}'
return model_repo_url
def delete_model(self, model_id):
"""_summary_
def delete_model(self, model_id: str):
"""Delete model_id from ModelScope.
Args:
model_id (str): The model id.
<Tip>
Raises:
ValueError: If not login.
Note:
model_id = {owner}/{name}
</Tip>
"""
cookies = ModelScopeConfig.get_cookies()
if cookies is None:
@@ -169,27 +186,28 @@ class HubApi:
raise_for_http_status(r)
raise_on_error(r.json())
def get_model_url(self, model_id):
def get_model_url(self, model_id: str):
return f'{self.endpoint}/api/v1/models/{model_id}.git'
def get_model(
self,
model_id: str,
revision: str = DEFAULT_MODEL_REVISION,
revision: Optional[str] = DEFAULT_MODEL_REVISION,
) -> str:
"""
Get model information at modelscope_hub
"""Get model information at ModelScope
Args:
model_id(`str`): The model id.
revision(`str`): revision of model
model_id (str): The model id.
revision (str optional): revision of model.
Returns:
The model detail information.
Raises:
NotExistError: If the model is not exist, will throw NotExistError
<Tip>
Note:
model_id = {owner}/{name}
</Tip>
"""
cookies = ModelScopeConfig.get_cookies()
owner_or_group, name = model_id_to_group_owner_name(model_id)
@@ -211,13 +229,12 @@ class HubApi:
def push_model(self,
model_id: str,
model_dir: str,
visibility: int = ModelVisibility.PUBLIC,
license: str = Licenses.APACHE_V2,
visibility: Optional[int] = ModelVisibility.PUBLIC,
license: Optional[str] = Licenses.APACHE_V2,
chinese_name: Optional[str] = None,
commit_message: Optional[str] = 'upload model',
revision: Optional[str] = DEFAULT_REPOSITORY_REVISION):
"""
Upload model from a given directory to given repository. A valid model directory
"""Upload model from a given directory to given repository. A valid model directory
must contain a configuration.json file.
This function upload the files in given directory to given repository. If the
@@ -229,11 +246,11 @@ class HubApi:
which can be obtained from ModelScope's website.
Args:
model_id (`str`):
model_id (str):
The model id to be uploaded, caller must have write permission for it.
model_dir(`str`):
model_dir(str):
The Absolute Path of the finetune result.
visibility(`int`, defaults to `0`):
visibility(int, optional):
Visibility of the new created model(1-private, 5-public). If the model is
not exists in ModelScope, this function will create a new model with this
visibility and this parameter is required. You can ignore this parameter
@@ -250,6 +267,12 @@ class HubApi:
revision (`str`, *optional*, default to DEFAULT_MODEL_REVISION):
which branch to push. If the branch is not exists, It will create a new
branch and push to it.
Raises:
InvalidParameter: Parameter invalid.
NotLoginException: Not login
ValueError: No configuration.json
Exception: Create failed.
"""
if model_id is None:
raise InvalidParameter('model_id cannot be empty!')
@@ -305,7 +328,10 @@ class HubApi:
date = datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S')
commit_message = '[automsg] push model %s to hub at %s' % (
model_id, date)
repo.push(commit_message=commit_message, local_branch=revision, remote_branch=revision)
repo.push(
commit_message=commit_message,
local_branch=revision,
remote_branch=revision)
except Exception:
raise
finally:
@@ -313,14 +339,18 @@ class HubApi:
def list_models(self,
owner_or_group: str,
page_number=1,
page_size=10) -> dict:
page_number: Optional[int] = 1,
page_size: Optional[int] = 10) -> dict:
"""List models in owner or group.
Args:
owner_or_group(`str`): owner or group.
page_number(`int`): The page number, default: 1
page_size(`int`): The page size, default: 10
owner_or_group(str): owner or group.
page_number(int, optional): The page number, default: 1
page_size(int, optional): The page size, default: 10
Raises:
RequestError: The request error.
Returns:
dict: {"models": "list of models", "TotalCount": total_number_of_models_in_owner_or_group}
"""
@@ -358,7 +388,7 @@ class HubApi:
def list_model_revisions(
self,
model_id: str,
cutoff_timestamp: int = None,
cutoff_timestamp: Optional[int] = None,
use_cookies: Union[bool, CookieJar] = False) -> List[str]:
"""Get model branch and tags.
@@ -368,6 +398,7 @@ class HubApi:
The timestamp is represented by the seconds elasped from the epoch time.
use_cookies (Union[bool, CookieJar], optional): If is cookieJar, we will use this cookie, if True, will
will load cookie from local. Defaults to False.
Returns:
Tuple[List[str], List[str]]: Return list of branch name and tags
"""
@@ -385,7 +416,10 @@ class HubApi:
] if info['RevisionMap']['Tags'] else []
return tags
def get_valid_revision(self, model_id: str, revision=None, cookies: Optional[CookieJar] = None):
def get_valid_revision(self,
model_id: str,
revision=None,
cookies: Optional[CookieJar] = None):
release_timestamp = get_release_datetime()
current_timestamp = int(round(datetime.datetime.now().timestamp()))
# for active development in library codes (non-release-branches), release_timestamp
@@ -396,27 +430,37 @@ class HubApi:
model_id, use_cookies=False if cookies is None else cookies)
if revision is None:
revision = MASTER_MODEL_BRANCH
logger.info('Model revision not specified, use default: %s in development mode' % revision)
logger.info(
'Model revision not specified, use default: %s in development mode'
% revision)
if revision not in branches and revision not in tags:
raise NotExistError('The model: %s has no branch or tag : %s .' % revision)
raise NotExistError('The model: %s has no branch or tag : %s .'
% revision)
logger.info('Development mode use revision: %s' % revision)
else:
if revision is None: # user not specified revision, use latest revision before release time
revisions = self.list_model_revisions(
model_id, cutoff_timestamp=release_timestamp, use_cookies=False if cookies is None else cookies)
model_id,
cutoff_timestamp=release_timestamp,
use_cookies=False if cookies is None else cookies)
if len(revisions) == 0:
raise NoValidRevisionError('The model: %s has no valid revision!' % model_id)
raise NoValidRevisionError(
'The model: %s has no valid revision!' % model_id)
# tags (revisions) returned from backend are guaranteed to be ordered by create-time
# we shall obtain the latest revision created earlier than release version of this branch
revision = revisions[0]
logger.info('Model revision not specified, use the latest revision: %s' % revision)
logger.info(
'Model revision not specified, use the latest revision: %s'
% revision)
else:
# use user-specified revision
revisions = self.list_model_revisions(
model_id, cutoff_timestamp=current_timestamp, use_cookies=False if cookies is None else cookies)
model_id,
cutoff_timestamp=current_timestamp,
use_cookies=False if cookies is None else cookies)
if revision not in revisions:
raise NotExistError(
'The model: %s has no revision: %s !' % (model_id, revision))
raise NotExistError('The model: %s has no revision: %s !' %
(model_id, revision))
logger.info('Use user-specified model revision: %s' % revision)
return revision
@@ -431,6 +475,7 @@ class HubApi:
model_id (str): The model id
use_cookies (Union[bool, CookieJar], optional): If is cookieJar, we will use this cookie, if True, will
will load cookie from local. Defaults to False.
Returns:
Tuple[List[str], List[str]]: Return list of branch name and tags
"""
@@ -466,9 +511,6 @@ class HubApi:
will load cookie from local. Defaults to False.
headers: request headers
Raises:
ValueError: If user_cookies is True, but no local cookie.
Returns:
List[dict]: Model file list.
"""
@@ -532,7 +574,8 @@ class HubApi:
dataset_id = resp['Data']['Id']
dataset_type = resp['Data']['Type']
datahub_url = f'{self.endpoint}/api/v1/datasets/{dataset_id}/repo/tree?Revision={revision}'
r = self.session.get(datahub_url, cookies=cookies, headers=self.headers)
r = self.session.get(
datahub_url, cookies=cookies, headers=self.headers)
resp = r.json()
datahub_raise_on_error(datahub_url, resp)
file_list = resp['Data']
@@ -607,7 +650,8 @@ class HubApi:
datahub_url = f'{self.endpoint}/api/v1/datasets/{namespace}/{dataset_name}/' \
f'ststoken?Revision={revision}'
r = self.session.get(url=datahub_url, cookies=cookies, headers=self.headers)
r = self.session.get(
url=datahub_url, cookies=cookies, headers=self.headers)
resp = r.json()
raise_on_error(resp)
return resp['Data']
@@ -661,7 +705,10 @@ class HubApi:
def datahub_remote_call(self, url):
cookies = ModelScopeConfig.get_cookies()
r = self.session.get(url, cookies=cookies, headers={'user-agent': ModelScopeConfig.get_user_agent()})
r = self.session.get(
url,
cookies=cookies,
headers={'user-agent': ModelScopeConfig.get_user_agent()})
resp = r.json()
datahub_raise_on_error(url, resp)
return resp['Data']
@@ -763,7 +810,8 @@ class ModelScopeConfig:
with open(
os.path.join(ModelScopeConfig.path_credential,
ModelScopeConfig.USER_INFO_FILE_NAME),
'r', encoding='utf-8') as f:
'r',
encoding='utf-8') as f:
info = f.read()
return info.split(':')[0], info.split(':')[1]
except FileNotFoundError:
@@ -784,7 +832,8 @@ class ModelScopeConfig:
with open(
os.path.join(ModelScopeConfig.path_credential,
ModelScopeConfig.GIT_TOKEN_FILE_NAME),
'r', encoding='utf-8') as f:
'r',
encoding='utf-8') as f:
token = f.read()
except FileNotFoundError:
pass

View File

@@ -185,6 +185,8 @@ class DeleteServiceParameters(AttrsToQueryString):
class ServiceDeployer(object):
"""Faciliate model deployment on to supported service provider(s).
"""
def __init__(self, endpoint=None):
self.endpoint = endpoint if endpoint is not None else get_endpoint()
@@ -210,7 +212,6 @@ class ServiceDeployer(object):
provider (ServiceProviderParameters): The service provider parameter
Raises:
NotLoginException: To use this api, you need login first.
NotSupportError: Not supported platform.
RequestError: The server return error.
@@ -248,10 +249,9 @@ class ServiceDeployer(object):
Args:
instance_name (str): The deployed instance name.
provider (ServiceProviderParameters): The cloud provider information, for eas
need region(eg: ch-hangzhou), access_key_id and access_key_secret.
need region(eg: ch-hangzhou), access_key_id and access_key_secret.
Raises:
NotLoginException: To use this api, you need login first.
RequestError: The request is failed from server.
Returns:
@@ -279,10 +279,9 @@ class ServiceDeployer(object):
Args:
instance_name (str): The instance name you want to delete.
provider (ServiceProviderParameters): The cloud provider information, for eas
need region(eg: ch-hangzhou), access_key_id and access_key_secret.
need region(eg: ch-hangzhou), access_key_id and access_key_secret.
Raises:
NotLoginException: To call this api, you need login first.
RequestError: The request is failed.
Returns:
@@ -305,17 +304,17 @@ class ServiceDeployer(object):
def list(self,
provider: ServiceProviderParameters,
skip: int = 0,
limit: int = 100):
skip: Optional[int] = 0,
limit: Optional[int] = 100):
"""List deployed model instances.
Args:
provider (ServiceProviderParameters): The cloud service provider parameter,
for eas, need access_key_id and access_key_secret.
skip: start of the list, current not support.
limit: maximum number of instances return, current not support
for eas, need access_key_id and access_key_secret.
skip (int, optional): start of the list, current not support.
limit (int, optional): maximum number of instances return, current not support
Raises:
NotLoginException: To use this api, you need login first.
RequestError: The request is failed from server.
Returns:

View File

@@ -49,10 +49,10 @@ def is_ok(rsp):
""" Check the request is ok
Args:
rsp (_type_): The request response body
Failed: {'Code': 10010101004, 'Message': 'get model info failed, err: unauthorized permission',
'RequestId': '', 'Success': False}
Success: {'Code': 200, 'Data': {}, 'Message': 'success', 'RequestId': '', 'Success': True}
rsp (Response): The request response body
Returns:
bool: `True` if success otherwise `False`.
"""
return rsp['Code'] == HTTPStatus.OK and rsp['Success']
@@ -84,6 +84,12 @@ def raise_on_error(rsp):
Args:
rsp (_type_): The server response
Raises:
RequestError: the response error message.
Returns:
bool: True if request is OK, otherwise raise `RequestError` exception.
"""
if rsp['Code'] == HTTPStatus.OK:
return True
@@ -96,7 +102,14 @@ def datahub_raise_on_error(url, rsp):
"""If response error, raise exception
Args:
rsp (_type_): The server response
url (str): The request url
rsp (HTTPResponse): The server response.
Raises:
RequestError: the http request error.
Returns:
bool: `True` if request is OK, otherwise raise `RequestError` exception.
"""
if rsp.get('Code') == HTTPStatus.OK:
return True
@@ -107,10 +120,15 @@ def datahub_raise_on_error(url, rsp):
def raise_for_http_status(rsp):
"""
Attempt to decode utf-8 first since some servers
"""Attempt to decode utf-8 first since some servers
localize reason strings, for invalid utf-8, fall back
to decoding with iso-8859-1.
Args:
rsp: The http response.
Raises:
HTTPError: The http error info.
"""
http_error_msg = ''
if isinstance(rsp.reason, bytes):

View File

@@ -36,47 +36,40 @@ def model_file_download(
local_files_only: Optional[bool] = False,
cookies: Optional[CookieJar] = None,
) -> Optional[str]: # pragma: no cover
"""
Download from a given URL and cache it if it's not already present in the
local cache.
"""Download from a given URL and cache it if it's not already present in the local cache.
Given a URL, this function looks for the corresponding file in the local
cache. If it's not there, download it. Then return the path to the cached
file.
Args:
model_id (`str`):
The model to whom the file to be downloaded belongs.
file_path(`str`):
Path of the file to be downloaded, relative to the root of model repo
revision(`str`, *optional*):
revision of the model file to be downloaded.
Can be any of a branch, tag or commit hash
cache_dir (`str`, `Path`, *optional*):
Path to the folder where cached files are stored.
user_agent (`dict`, `str`, *optional*):
The user-agent info in the form of a dictionary or a string.
local_files_only (`bool`, *optional*, defaults to `False`):
If `True`, avoid downloading the file and return the path to the
local cached file if it exists.
if `False`, download the file anyway even it exists
model_id (str): The model to whom the file to be downloaded belongs.
file_path(str): Path of the file to be downloaded, relative to the root of model repo.
revision(str, optional): revision of the model file to be downloaded.
Can be any of a branch, tag or commit hash.
cache_dir (str, Path, optional): Path to the folder where cached files are stored.
user_agent (dict, str, optional): The user-agent info in the form of a dictionary or a string.
local_files_only (bool, optional: If `True`, avoid downloading the file and return the path to the
local cached file if it exists. if `False`, download the file anyway even it exists.
cookies (CookieJar, optional): The cookie of download request.
Returns:
Local path (string) of file or if networking is off, last version of
string: string of local file or if networking is off, last version of
file cached on disk.
<Tip>
Raises:
NotExistError: The file is not exist.
ValueError: The request parameter error.
Raises the following errors:
Note:
Raises the following errors:
- [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
if `use_auth_token=True` and the token cannot be found.
- [`OSError`](https://docs.python.org/3/library/exceptions.html#OSError)
if ETag cannot be determined.
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
if some parameter value is invalid
</Tip>
- [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
if `use_auth_token=True` and the token cannot be found.
- [`OSError`](https://docs.python.org/3/library/exceptions.html#OSError)
if ETag cannot be determined.
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
if some parameter value is invalid
"""
if cache_dir is None:
cache_dir = get_cache_dir()
@@ -165,10 +158,17 @@ def model_file_download(
def get_file_download_url(model_id: str, file_path: str, revision: str):
"""
Format file download url according to `model_id`, `revision` and `file_path`.
"""Format file download url according to `model_id`, `revision` and `file_path`.
e.g., Given `model_id=john/bert`, `revision=master`, `file_path=README.md`,
the resulted download url is: https://modelscope.co/api/v1/models/john/bert/repo?Revision=master&FilePath=README.md
Args:
model_id (str): The model_id.
file_path (str): File path
revision (str): File revision.
Returns:
str: The file url.
"""
download_url_template = '{endpoint}/api/v1/models/{model_id}/repo?Revision={revision}&FilePath={file_path}'
return download_url_template.format(
@@ -186,20 +186,23 @@ def http_get_file(
cookies: CookieJar,
headers: Optional[Dict[str, str]] = None,
):
"""
Download remote file, will retry 5 times before giving up on errors.
"""Download remote file, will retry 5 times before giving up on errors.
Args:
url(`str`):
url(str):
actual download url of the file
local_dir(`str`):
local_dir(str):
local directory where the downloaded file stores
file_name(`str`):
file_name(str):
name of the file stored in `local_dir`
cookies(`CookieJar`):
cookies(CookieJar):
cookies used to authentication the user, which is used for downloading private repos
headers(`Optional[Dict[str, str]] = None`):
headers(Dict[str, str], optional):
http headers to carry necessary info when requesting the remote file
Raises:
FileDownloadError: Failed download failed.
"""
total = -1
temp_file_manager = partial(

View File

@@ -2,7 +2,7 @@
import os
import subprocess
from typing import List
from typing import List, Optional
from modelscope.utils.logger import get_logger
from ..utils.constant import MASTER_MODEL_BRANCH
@@ -33,6 +33,9 @@ class GitCommandWrapper(metaclass=Singleton):
"""Run git command, if command return 0, return subprocess.response
otherwise raise GitError, message is stdout and stderr.
Args:
args: List of command args.
Raises:
GitError: Exception with stdout and stderr.
@@ -106,7 +109,7 @@ class GitCommandWrapper(metaclass=Singleton):
token: str,
url: str,
repo_name: str,
branch: str = None):
branch: Optional[str] = None):
""" git clone command wrapper.
For public project, token can None, private repo, there must token.
@@ -116,6 +119,9 @@ class GitCommandWrapper(metaclass=Singleton):
url (str): The remote url
repo_name (str): The local repository path name.
branch (str, optional): _description_. Defaults to None.
Returns:
The popen response.
"""
url = self._add_token(token, url)
if branch:
@@ -162,7 +168,11 @@ class GitCommandWrapper(metaclass=Singleton):
"""Run git commit command
Args:
repo_dir (str): the repository directory.
message (str): commit message.
Returns:
The command popen response.
"""
commit_args = ['-C', '%s' % repo_dir, 'commit', '-m', "'%s'" % message]
rsp = self._run_git_command(*commit_args)

View File

@@ -24,20 +24,20 @@ class Repository:
revision: Optional[str] = DEFAULT_REPOSITORY_REVISION,
auth_token: Optional[str] = None,
git_path: Optional[str] = None):
"""
Instantiate a Repository object by cloning the remote ModelScopeHub repo
"""Instantiate a Repository object by cloning the remote ModelScopeHub repo
Args:
model_dir(`str`):
The model root directory.
clone_from:
model id in ModelScope-hub from which git clone
revision(`Optional[str]`):
revision of the model you want to clone from. Can be any of a branch, tag or commit hash
auth_token(`Optional[str]`):
token obtained when calling `HubApi.login()`. Usually you can safely ignore the parameter
as the token is already saved when you login the first time, if None, we will use saved token.
git_path:(`Optional[str]`):
The git command line path, if None, we use 'git'
model_dir (str): The model root directory.
clone_from (str): model id in ModelScope-hub from which git clone
revision (str, optional): revision of the model you want to clone from.
Can be any of a branch, tag or commit hash
auth_token (str, optional): token obtained when calling `HubApi.login()`.
Usually you can safely ignore the parameter as the token is already
saved when you login the first time, if None, we will use saved token.
git_path (str, optional): The git command line path, if None, we use 'git'
Raises:
InvalidParameter: revision is None.
"""
self.model_dir = model_dir
self.model_base_dir = os.path.dirname(model_dir)
@@ -92,16 +92,19 @@ class Repository:
commit_message: str,
local_branch: Optional[str] = DEFAULT_REPOSITORY_REVISION,
remote_branch: Optional[str] = DEFAULT_REPOSITORY_REVISION,
force: bool = False):
force: Optional[bool] = False):
"""Push local files to remote, this method will do.
git pull
git add
git commit
git push
Execute git pull, git add, git commit, git push in order.
Args:
commit_message (str): commit message
branch (Optional[str], optional): which branch to push.
force (Optional[bool]): whether to use forced-push.
local_branch(str, optional): The local branch, default master.
remote_branch (str, optional): The remote branch to push, default master.
force (bool, optional): whether to use forced-push.
Raises:
InvalidParameter: no commit message.
NotLoginException: no auth token.
"""
if commit_message is None or not isinstance(commit_message, str):
msg = 'commit_message must be provided!'
@@ -128,12 +131,19 @@ class Repository:
local_branch=local_branch,
remote_branch=remote_branch)
def tag(self, tag_name: str, message: str, ref: str = MASTER_MODEL_BRANCH):
def tag(self,
tag_name: str,
message: str,
ref: Optional[str] = MASTER_MODEL_BRANCH):
"""Create a new tag.
Args:
tag_name (str): The name of the tag
message (str): The tag message.
ref (str): The tag reference, can be commit id or branch.
ref (str, optional): The tag reference, can be commit id or branch.
Raises:
InvalidParameter: no commit message.
"""
if tag_name is None or tag_name == '':
msg = 'We use tag-based revision, therefore tag_name cannot be None or empty.'
@@ -149,7 +159,7 @@ class Repository:
def tag_and_push(self,
tag_name: str,
message: str,
ref: str = MASTER_MODEL_BRANCH):
ref: Optional[str] = MASTER_MODEL_BRANCH):
"""Create tag and push to remote
Args:
@@ -174,18 +184,19 @@ class DatasetRepository:
git_path: Optional[str] = None):
"""
Instantiate a Dataset Repository object by cloning the remote ModelScope dataset repo
Args:
repo_work_dir(`str`):
The dataset repo root directory.
dataset_id:
dataset id in ModelScope from which git clone
revision(`Optional[str]`):
revision of the dataset you want to clone from. Can be any of a branch, tag or commit hash
auth_token(`Optional[str]`):
token obtained when calling `HubApi.login()`. Usually you can safely ignore the parameter
as the token is already saved when you login the first time, if None, we will use saved token.
git_path:(`Optional[str]`):
The git command line path, if None, we use 'git'
repo_work_dir (str): The dataset repo root directory.
dataset_id (str): dataset id in ModelScope from which git clone
revision (str, optional): revision of the dataset you want to clone from.
Can be any of a branch, tag or commit hash
auth_token (str, optional): token obtained when calling `HubApi.login()`.
Usually you can safely ignore the parameter as the token is
already saved when you login the first time, if None, we will use saved token.
git_path (str, optional): The git command line path, if None, we use 'git'
Raises:
InvalidParameter: parameter invalid.
"""
self.dataset_id = dataset_id
if not repo_work_dir or not isinstance(repo_work_dir, str):
@@ -229,16 +240,21 @@ class DatasetRepository:
def push(self,
commit_message: str,
branch: Optional[str] = DEFAULT_DATASET_REVISION,
force: bool = False):
force: Optional[bool] = False):
"""Push local files to remote, this method will do.
git pull
git add
git commit
git push
Args:
commit_message (str): commit message
branch (Optional[str], optional): which branch to push.
force (Optional[bool]): whether to use forced-push.
branch (str, optional): which branch to push.
force (bool, optional): whether to use forced-push.
Raises:
InvalidParameter: no commit message.
NotLoginException: no access token.
"""
if commit_message is None or not isinstance(commit_message, str):
msg = 'commit_message must be provided!'

View File

@@ -32,31 +32,31 @@ def snapshot_download(model_id: str,
An alternative would be to just clone a repo but this would require that the
user always has git and git-lfs installed, and properly configured.
Args:
model_id (`str`):
A user or an organization name and a repo name separated by a `/`.
revision (`str`, *optional*):
An optional Git revision id which can be a branch name, a tag, or a
commit hash. NOTE: currently only branch and tag name is supported
cache_dir (`str`, `Path`, *optional*):
Path to the folder where cached files are stored.
user_agent (`str`, `dict`, *optional*):
The user-agent info in the form of a dictionary or a string.
local_files_only (`bool`, *optional*, defaults to `False`):
If `True`, avoid downloading the file and return the path to the
local cached file if it exists.
Returns:
Local folder path (string) of repo snapshot
<Tip>
Raises the following errors:
- [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
if `use_auth_token=True` and the token cannot be found.
- [`OSError`](https://docs.python.org/3/library/exceptions.html#OSError) if
ETag cannot be determined.
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
if some parameter value is invalid
</Tip>
Args:
model_id (str): A user or an organization name and a repo name separated by a `/`.
revision (str, optional): An optional Git revision id which can be a branch name, a tag, or a
commit hash. NOTE: currently only branch and tag name is supported
cache_dir (str, Path, optional): Path to the folder where cached files are stored.
user_agent (str, dict, optional): The user-agent info in the form of a dictionary or a string.
local_files_only (bool, optional): If `True`, avoid downloading the file and return the path to the
local cached file if it exists.
cookies (CookieJar, optional): The cookie of the request, default None.
Raises:
ValueError: the value details.
Returns:
str: Local folder path (string) of repo snapshot
Note:
Raises the following errors:
- [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
if `use_auth_token=True` and the token cannot be found.
- [`OSError`](https://docs.python.org/3/library/exceptions.html#OSError) if
ETag cannot be determined.
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
if some parameter value is invalid
"""
if cache_dir is None:

View File

@@ -9,6 +9,8 @@ from shutil import move, rmtree
from modelscope.utils.logger import get_logger
logger = get_logger()
"""Implements caching functionality, used internally only
"""
class FileSystemCache(object):
@@ -21,11 +23,11 @@ class FileSystemCache(object):
cache_root_location: str,
**kwargs,
):
"""
Parameters
----------
cache_location: str
The root location to store files.
"""Base file system cache interface.
Args:
cache_root_location (str): The root location to store files.
kwargs(dict): The keyword arguments.
"""
os.makedirs(cache_root_location, exist_ok=True)
self.cache_root_location = cache_root_location
@@ -35,19 +37,6 @@ class FileSystemCache(object):
return self.cache_root_location
def load_cache(self):
"""Read set of stored blocks from file
Args:
owner(`str`): individual or group username at modelscope, can be empty for official models
name(`str`): name of the model
Returns:
The model details information.
Raises:
NotExistError: If the model is not exist, will throw NotExistError
TODO: Error based error code.
<Tip>
model_id = {owner}/{name}
</Tip>
"""
self.cached_files = []
cache_keys_file_path = os.path.join(self.cache_root_location,
FileSystemCache.KEY_FILE_NAME)
@@ -68,30 +57,24 @@ class FileSystemCache(object):
def get_file(self, key):
"""Check the key is in the cache, if exist, return the file, otherwise return None.
Args:
key(`str`): The cache key.
Returns:
If file exist, return the cached file location, otherwise None.
key(str): The cache key.
Raises:
None
<Tip>
model_id = {owner}/{name}
</Tip>
"""
pass
def put_file(self, key, location):
"""Put file to the cache,
"""Put file to the cache.
Args:
key(`str`): The cache key
location(`str`): Location of the file, we will move the file to cache.
Returns:
The cached file path of the file.
key (str): The cache key
location (str): Location of the file, we will move the file to cache.
Raises:
None
<Tip>
model_id = {owner}/{name}
</Tip>
"""
pass
@@ -113,8 +96,7 @@ class FileSystemCache(object):
return False
def clear_cache(self):
"""Remove all files and metadat from the cache
"""Remove all files and metadata from the cache
In the case of multiple cache locations, this clears only the last one,
which is assumed to be the read/write one.
"""
@@ -127,32 +109,26 @@ class FileSystemCache(object):
class ModelFileSystemCache(FileSystemCache):
"""Local cache file layout
cache_root/owner/model_name/|individual cached files
|.mk: file, The cache index file
cache_root/owner/model_name/individual cached files and cache index file '.mcs'
Save only one version for each file.
"""
def __init__(self, cache_root, owner, name):
"""Put file to the cache
Args:
cache_root(`str`): The modelscope local cache root(default: ~/.modelscope/cache/models/)
owner(`str`): The model owner.
name('str'): The name of the model
branch('str'): The branch of model
tag('str'): The tag of model
Returns:
Raises:
None
<Tip>
model_id = {owner}/{name}
</Tip>
cache_root(str): The modelscope local cache root(default: ~/.modelscope/cache/models/)
owner(str): The model owner.
name(str): The name of the model
"""
super().__init__(os.path.join(cache_root, owner, name))
def get_file_by_path(self, file_path):
"""Retrieve the cache if there is file match the path.
Args:
file_path (str): The file path in the model.
Returns:
path: the full path of the file.
"""
@@ -169,9 +145,11 @@ class ModelFileSystemCache(FileSystemCache):
def get_file_by_path_and_commit_id(self, file_path, commit_id):
"""Retrieve the cache if there is file match the path.
Args:
file_path (str): The file path in the model.
commit_id (str): The commit id of the file
Returns:
path: the full path of the file.
"""
@@ -194,7 +172,7 @@ class ModelFileSystemCache(FileSystemCache):
model_file_info (ModelFileInfo): The file information of the file.
Returns:
_type_: _description_
str: The file path.
"""
cache_key = self.__get_cache_key(model_file_info)
for cached_file in self.cached_files:
@@ -262,23 +240,8 @@ class ModelFileSystemCache(FileSystemCache):
"""Put model on model_file_location to cache, the model first download to /tmp, and move to cache.
Args:
model_file_info (str): The file description returned by get_model_files
sample:
{
"CommitMessage": "add model\n",
"CommittedDate": 1654857567,
"CommitterName": "mulin.lyh",
"IsLFS": false,
"Mode": "100644",
"Name": "resnet18.pth",
"Path": "resnet18.pth",
"Revision": "09b68012b27de0048ba74003690a890af7aff192",
"Size": 46827520,
"Type": "blob"
}
model_file_info (str): The file description returned by get_model_files.
model_file_location (str): The location of the temporary file.
Raises:
NotImplementedError: _description_
Returns:
str: The location of the cached file.

View File

@@ -29,9 +29,14 @@ def model_id_to_group_owner_name(model_id):
def get_cache_dir(model_id: Optional[str] = None):
"""
cache dir precedence:
function parameter > enviroment > ~/.cache/modelscope/hub
"""cache dir precedence:
function parameter > environment > ~/.cache/modelscope/hub
Args:
model_id (str, optional): The model id.
Returns:
str: the model_id dir if model_id not None, otherwise cache root dir.
"""
default_cache_dir = get_default_cache_dir()
base_path = os.getenv('MODELSCOPE_CACHE',

View File

@@ -13,8 +13,7 @@ Input = Union[Dict[str, Tensor], Model]
class Head(ABC):
"""
The head base class is for the tasks head method definition
"""The head base class is for the tasks head method definition
"""

View File

@@ -19,6 +19,8 @@ Tensor = Union['torch.Tensor', 'tf.Tensor']
class Model(ABC):
"""Base model interface.
"""
def __init__(self, model_dir, *args, **kwargs):
self.model_dir = model_dir

View File

@@ -155,11 +155,12 @@ class MsDataset:
**config_kwargs,
) -> Union[dict, 'MsDataset']:
"""Load a MsDataset from the ModelScope Hub, Hugging Face Hub, urls, or a local dataset.
Args:
Args:
dataset_name (str): Path or name of the dataset.
namespace(str, optional): Namespace of the dataset. It should not be None if you load a remote dataset
from Hubs.modelscope,
namespace (str, optional):
Namespace of the dataset. It should not be None if you load a remote dataset
from Hubs.modelscope,
target (str, optional): Name of the column to output.
version (str, optional): Version of the dataset script to load:
subset_name (str, optional): Defining the subset_name of the dataset.
@@ -167,12 +168,12 @@ class MsDataset:
data_files (str or Sequence or Mapping, optional): Path(s) to source data file(s).
split (str, optional): Which split of the data to load.
hub (Hubs or str, optional): When loading from a remote hub, where it is from. default Hubs.modelscope
download_mode (DownloadMode or str, optional): How to treat existing datasets. default
DownloadMode.REUSE_DATASET_IF_EXISTS
**config_kwargs (additional keyword arguments): Keyword arguments to be passed
download_mode (DownloadMode or str, optional):
How to treat existing datasets. default DownloadMode.REUSE_DATASET_IF_EXISTS
config_kwargs (additional keyword arguments): Keyword arguments to be passed
Returns:
MsDataset (obj:`MsDataset`): MsDataset object for a certain dataset.
MsDataset (MsDataset): MsDataset object for a certain dataset.
"""
download_mode = DownloadMode(download_mode
or DownloadMode.REUSE_DATASET_IF_EXISTS)
@@ -645,15 +646,16 @@ class MsDataset:
auth_token: Optional[str] = None,
git_path: Optional[str] = None) -> None:
"""Clone meta-file of dataset from the ModelScope Hub.
Args:
dataset_work_dir (str): Current git working directory.
dataset_id (str): Dataset id, in the form of your-namespace/your-dataset-name .
revision(`Optional[str]`):
revision (str, optional):
revision of the model you want to clone from. Can be any of a branch, tag or commit hash
auth_token(`Optional[str]`):
auth_token (str, optional):
token obtained when calling `HubApi.login()`. Usually you can safely ignore the parameter
as the token is already saved when you login the first time, if None, we will use saved token.
git_path:(`Optional[str]`):
git_path (str, optional):
The git command line path, if None, we use 'git'
Returns:
None

View File

@@ -39,6 +39,8 @@ logger = get_logger()
class Pipeline(ABC):
"""Pipeline base.
"""
def initiate_single_model(self, model):
if isinstance(model, str):
@@ -386,11 +388,10 @@ class DistributedPipeline(Pipeline):
2. Set the multiprocessing method to spawn
3. Open a multiprocessing pool of the world_size to instantiate model pieces.
4. Set the master port and ip
5. Call _instantiate_one to instantiate one model piece
This method should be implemented by the derived class.
6. After the forward method is called, do preprocess in main process
and call _forward_one to collect results, and do
post process in main process.
5. Call _instantiate_one to instantiate one model piece,
This method should be implemented by the derived class.
6. After the forward method is called, do preprocess in main process and
call _forward_one to collect results, and do post process in main process.
NOTE: _instantiate_one and _forward_one are class methods, any derived class should implement them and
store the model handler in the class field.

View File

@@ -137,6 +137,8 @@ PREPROCESSOR_MAP = {
class Preprocessor(ABC):
"""Base of preprocessors.
"""
def __init__(self, mode=ModeKeys.INFERENCE, *args, **kwargs):
self._mode = mode

View File

@@ -92,18 +92,21 @@ def _interval_based_sampling(vid_length, vid_fps, target_fps, clip_idx,
num_clips, num_frames, interval, minus_interval):
"""
Generates the frame index list using interval based sampling.
Args:
vid_length (int): the length of the whole video (valid selection range).
vid_fps (int): the original video fps
target_fps (int): the normalized video fps
clip_idx (int): -1 for random temporal sampling, and positive values for sampling specific
clip from the video
num_clips (int): the total clips to be sampled from each video.
combined with clip_idx, the sampled video is the "clip_idx-th" video from
"num_clips" videos.
num_frames (int): number of frames in each sampled clips.
interval (int): the interval to sample each frame.
vid_length (int): the length of the whole video (valid selection range).
vid_fps (int): the original video fps
target_fps (int): the normalized video fps
clip_idx (int):
-1 for random temporal sampling, and positive values for sampling specific
clip from the video
num_clips (int):
the total clips to be sampled from each video. combined with clip_idx,
the sampled video is the "clip_idx-th" video from "num_clips" videos.
num_frames (int): number of frames in each sampled clips.
interval (int): the interval to sample each frame.
minus_interval (bool): control the end index
Returns:
index (tensor): the sampled frame indexes
"""

View File

@@ -990,25 +990,20 @@ class EpochBasedTrainer(BaseTrainer):
def visualization(self, results, dataset, **kwargs):
""" visualization function for evaluation results.
Examples:
# draw list of images as numpy array
images = draw_images(num_of_visualization)
# set displayed name for each image
filenames = get_image_display_names()
vis_results = {'images': images, 'filenames' : filenames}
# visualization results will be displayed in group named eva_vis
self.visualization_buffer.output['eval_vis'] = vis_results
Args:
results (list(dict)): a list of result dict.
dataset (:obj:`Dataset`): torch dataset object to access original data.
Implementation Examples:
```python
# draw list of images as numpy array
images = draw_images(num_of_visualization)
# set displayed name for each image
filenames = get_image_display_names()
vis_results = {
'images': images,
'filenames' : filenames
}
# visualization results will be displayed in group named eva_vis
self.visualization_buffer.output['eval_vis'] = vis_results
```
dataset (Dataset): torch dataset object to access original data.
"""
# TODO @wenmeng.zwm add visualization support for cv evaluation
raise NotImplementedError(

View File

@@ -1,7 +1,7 @@
docutils>=0.16.0
myst_parser
recommonmark
sphinx>=4.0.2
sphinx>=5.3.0
sphinx-book-theme
sphinx-copybutton
sphinx_markdown_tables

View File

@@ -44,6 +44,7 @@ isolated: # test cases that may require excessive anmount of GPU memory or run
- test_image_skychange.py
- test_video_super_resolution.py
- test_kws_nearfield_trainer.py
- test_gpt3_text_generation.py
envs:
default: # default env, case not in other env will in default, pytorch.