mirror of
https://github.com/modelscope/modelscope.git
synced 2025-12-25 12:39:25 +01:00
Merge branch merge_master_github_0626 into master
Title: merge_master_github_0626 Link: https://code.alibaba-inc.com/Ali-MaaS/MaaS-lib/codereview/13071322
This commit is contained in:
1
.github/workflows/citest.yaml
vendored
1
.github/workflows/citest.yaml
vendored
@@ -40,6 +40,7 @@ jobs:
|
||||
unittest:
|
||||
# The type of runner that the job will run on
|
||||
runs-on: [modelscope-self-hosted]
|
||||
timeout-minutes: 240
|
||||
steps:
|
||||
- name: ResetFileMode
|
||||
shell: bash
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
recursive-include modelscope/configs *.py *.cu *.h *.cpp
|
||||
recursive-include modelscope/cli/template *.tpl
|
||||
|
||||
12
README.md
12
README.md
@@ -203,12 +203,20 @@ To allow out-of-box usage for all the models on ModelScope, official docker imag
|
||||
|
||||
CPU docker image
|
||||
```shell
|
||||
registry.cn-hangzhou.aliyuncs.com/modelscope-repo/modelscope:ubuntu20.04-py37-torch1.11.0-tf1.15.5-1.3.0
|
||||
# py37
|
||||
registry.cn-hangzhou.aliyuncs.com/modelscope-repo/modelscope:ubuntu20.04-py37-torch1.11.0-tf1.15.5-1.6.1
|
||||
|
||||
# py38
|
||||
registry.cn-hangzhou.aliyuncs.com/modelscope-repo/modelscope:ubuntu20.04-py38-torch1.11.0-tf1.15.5-1.6.1
|
||||
```
|
||||
|
||||
GPU docker image
|
||||
```shell
|
||||
registry.cn-hangzhou.aliyuncs.com/modelscope-repo/modelscope:ubuntu20.04-cuda11.3.0-py37-torch1.11.0-tf1.15.5-1.3.0
|
||||
# py37
|
||||
registry.cn-hangzhou.aliyuncs.com/modelscope-repo/modelscope:ubuntu20.04-cuda11.3.0-py37-torch1.11.0-tf1.15.5-1.6.1
|
||||
|
||||
# py38
|
||||
registry.cn-hangzhou.aliyuncs.com/modelscope-repo/modelscope:ubuntu20.04-cuda11.3.0-py38-torch1.11.0-tf1.15.5-1.6.1
|
||||
```
|
||||
|
||||
## Setup Local Python Environment
|
||||
|
||||
12
README_zh.md
12
README_zh.md
@@ -189,12 +189,20 @@ ModelScope Library目前支持tensorflow,pytorch深度学习框架进行模型
|
||||
|
||||
CPU镜像
|
||||
```shell
|
||||
registry.cn-hangzhou.aliyuncs.com/modelscope-repo/modelscope:ubuntu20.04-py37-torch1.11.0-tf1.15.5-1.3.0
|
||||
# py37
|
||||
registry.cn-hangzhou.aliyuncs.com/modelscope-repo/modelscope:ubuntu20.04-py37-torch1.11.0-tf1.15.5-1.6.1
|
||||
|
||||
# py38
|
||||
registry.cn-hangzhou.aliyuncs.com/modelscope-repo/modelscope:ubuntu20.04-py38-torch1.11.0-tf1.15.5-1.6.1
|
||||
```
|
||||
|
||||
GPU镜像
|
||||
```shell
|
||||
registry.cn-hangzhou.aliyuncs.com/modelscope-repo/modelscope:ubuntu20.04-cuda11.3.0-py37-torch1.11.0-tf1.15.5-1.3.0
|
||||
# py37
|
||||
registry.cn-hangzhou.aliyuncs.com/modelscope-repo/modelscope:ubuntu20.04-cuda11.3.0-py37-torch1.11.0-tf1.15.5-1.6.1
|
||||
|
||||
# py38
|
||||
registry.cn-hangzhou.aliyuncs.com/modelscope-repo/modelscope:ubuntu20.04-cuda11.3.0-py38-torch1.11.0-tf1.15.5-1.6.1
|
||||
```
|
||||
|
||||
## 搭建本地Python环境
|
||||
|
||||
@@ -13,8 +13,8 @@ from modelscope.utils.logger import get_logger
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
curren_path = os.path.dirname(os.path.abspath(__file__))
|
||||
template_path = os.path.join(curren_path, 'template')
|
||||
current_path = os.path.dirname(os.path.abspath(__file__))
|
||||
template_path = os.path.join(current_path, 'template')
|
||||
|
||||
|
||||
def subparser_func(args):
|
||||
|
||||
@@ -8,8 +8,8 @@ from modelscope.utils.logger import get_logger
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
curren_path = os.path.dirname(os.path.abspath(__file__))
|
||||
template_path = os.path.join(curren_path, 'template')
|
||||
current_path = os.path.dirname(os.path.abspath(__file__))
|
||||
template_path = os.path.join(current_path, 'template')
|
||||
|
||||
|
||||
def subparser_func(args):
|
||||
|
||||
@@ -50,7 +50,7 @@ def batch(reader, batch_size, drop_last=False):
|
||||
# Batch size check
|
||||
batch_size = int(batch_size)
|
||||
if batch_size <= 0:
|
||||
raise ValueError('batch_size should be a positive integeral value, '
|
||||
raise ValueError('batch_size should be a positive integer value, '
|
||||
'but got batch_size={}'.format(batch_size))
|
||||
|
||||
return batch_reader
|
||||
|
||||
@@ -344,7 +344,7 @@ class BPETextField(object):
|
||||
print(f'Saved {len(examples)} examples (elapsed {elapsed:.2f}s)')
|
||||
else:
|
||||
print(f"Saving examples to '{filename}' ...")
|
||||
raise ValueError(f'Unsport file format: {filename}')
|
||||
raise ValueError(f'Unsupport file format: {filename}')
|
||||
|
||||
def load_examples(self, filename):
|
||||
start = time.time()
|
||||
|
||||
@@ -39,7 +39,7 @@ def run_auto_label(input_wav,
|
||||
if not os.path.exists(work_dir):
|
||||
raise ValueError(f'work_dir: {work_dir} not exists')
|
||||
|
||||
def _download_and_unzip_resousrce(model, model_revision=None):
|
||||
def _download_and_unzip_resource(model, model_revision=None):
|
||||
if os.path.exists(model):
|
||||
model_cache_dir = model if os.path.isdir(
|
||||
model) else os.path.dirname(model)
|
||||
@@ -52,7 +52,7 @@ def run_auto_label(input_wav,
|
||||
revision=model_revision,
|
||||
user_agent={ThirdParty.KEY: 'speech_tts_autolabel'})
|
||||
if not os.path.exists(model_cache_dir):
|
||||
raise ValueError(f'mdoel_cache_dir: {model_cache_dir} not exists')
|
||||
raise ValueError(f'model_cache_dir: {model_cache_dir} not exists')
|
||||
zip_file = os.path.join(model_cache_dir, 'model.zip')
|
||||
if not os.path.exists(zip_file):
|
||||
raise ValueError(f'zip_file: {zip_file} not exists')
|
||||
@@ -61,8 +61,8 @@ def run_auto_label(input_wav,
|
||||
target_resource = os.path.join(model_cache_dir, 'model')
|
||||
return target_resource
|
||||
|
||||
model_resource = _download_and_unzip_resousrce(resource_model_id,
|
||||
resource_revision)
|
||||
model_resource = _download_and_unzip_resource(resource_model_id,
|
||||
resource_revision)
|
||||
auto_labeling = AutoLabeling(
|
||||
os.path.abspath(input_wav),
|
||||
model_resource,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# Copyright (c) Alibaba, Inc. and its affiliates.
|
||||
import random
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
@@ -27,40 +28,13 @@ class ControllableImageGenerationTest(unittest.TestCase):
|
||||
@unittest.skipUnless(test_level() >= 0, 'skip test in current test level')
|
||||
def test_run_with_model_from_modelhub(self):
|
||||
output_image_path = tempfile.NamedTemporaryFile(suffix='.png').name
|
||||
control_types = [
|
||||
'canny', 'hough', 'hed', 'depth', 'normal', 'pose', 'seg',
|
||||
'fake_scribble', 'scribble'
|
||||
]
|
||||
control_type = random.choice(control_types)
|
||||
pipeline_ins = pipeline(
|
||||
self.task, model=self.model_id, control_type='canny')
|
||||
output = pipeline_ins(input=self.input)[OutputKeys.OUTPUT_IMG]
|
||||
|
||||
pipeline_ins = pipeline(
|
||||
self.task, model=self.model_id, control_type='hough')
|
||||
output = pipeline_ins(input=self.input)[OutputKeys.OUTPUT_IMG]
|
||||
|
||||
pipeline_ins = pipeline(
|
||||
self.task, model=self.model_id, control_type='hed')
|
||||
output = pipeline_ins(input=self.input)[OutputKeys.OUTPUT_IMG]
|
||||
|
||||
pipeline_ins = pipeline(
|
||||
self.task, model=self.model_id, control_type='depth')
|
||||
output = pipeline_ins(input=self.input)[OutputKeys.OUTPUT_IMG]
|
||||
|
||||
pipeline_ins = pipeline(
|
||||
self.task, model=self.model_id, control_type='normal')
|
||||
output = pipeline_ins(input=self.input)[OutputKeys.OUTPUT_IMG]
|
||||
|
||||
pipeline_ins = pipeline(
|
||||
self.task, model=self.model_id, control_type='pose')
|
||||
output = pipeline_ins(input=self.input)[OutputKeys.OUTPUT_IMG]
|
||||
|
||||
pipeline_ins = pipeline(
|
||||
self.task, model=self.model_id, control_type='seg')
|
||||
output = pipeline_ins(input=self.input)[OutputKeys.OUTPUT_IMG]
|
||||
|
||||
pipeline_ins = pipeline(
|
||||
self.task, model=self.model_id, control_type='fake_scribble')
|
||||
output = pipeline_ins(input=self.input)[OutputKeys.OUTPUT_IMG]
|
||||
|
||||
pipeline_ins = pipeline(
|
||||
self.task, model=self.model_id, control_type='scribble')
|
||||
self.task, model=self.model_id, control_type=control_type)
|
||||
output = pipeline_ins(input=self.input)[OutputKeys.OUTPUT_IMG]
|
||||
cv2.imwrite(output_image_path, output)
|
||||
print(
|
||||
|
||||
Reference in New Issue
Block a user