from azure.ai.ml import MLClient, Input
from azure.ai.ml.entities import Model
from azure.ai.ml.constants import AssetTypes
from azure.identity import DefaultAzureCredential
$schema: https://azuremlschemas.azureedge.net/latest/model.schema.json
name: local-file-example
path: mlflow-model/model.pkl
description: Model created from local file.
from azure.ai.ml.entities import Model
from azure.ai.ml.constants import AssetTypes
file_model = Model(
path="mlflow-model/model.pkl",
type=AssetTypes.CUSTOM_MODEL,
name="local-file-example",
description="Model created from local file.",
)
ml_client.models.create_or_update(file_model)
可使用以下任一受支持的 URI 格式从云路径创建模型。
from azure.ai.ml.entities import Model
from azure.ai.ml.constants import AssetTypes
cloud_model = Model(
path=file_model.path,
# The above line basically provides a path in the format "azureml://subscriptions/XXXXXXXXXXXXXXXX/resourceGroups/XXXXXXXXXXX/workspaces/XXXXXXXXXXX/datastores/workspaceblobstore/paths/model.pkl"
# Users could also use,"azureml://datastores/workspaceblobstore/paths/model.pkl" as a shorthand to the same location
name="cloud-path-example",
type=AssetTypes.CUSTOM_MODEL,
description="Model created from cloud path.",
)
ml_client.models.create_or_update(cloud_model)
from azure.ai.ml.entities import Model
from azure.ai.ml.constants import ModelType
run_model = Model(
path="runs:/<run-id>/model/"
name="run-model-example",
description="Model created from run.",
type=ModelType.MLFLOW
)
ml_client.models.create_or_update(run_model)
azureml 作业
此选项是一种 azureml job 引用 URI 格式,可帮助你从任何作业输出中的项目注册模型。 此格式与现有的 azureml 数据存储引用 URI 格式一致,并且还支持从作业的命名输出引用项目(不仅仅是默认项目位置)。 如果未使用 MLflow 在训练脚本中直接注册模型,可以在注册的模型和训练该模型的作业之间建立世系。
from azure.ai.ml import command
from azure.ai.ml.entities import Model
from azure.ai.ml import Input
from azure.ai.ml.constants import AssetTypes
from azure.ai.ml import MLClient
# Possible Asset Types for Data:
# AssetTypes.MLFLOW_MODEL
# AssetTypes.CUSTOM_MODEL
# AssetTypes.TRITON_MODEL
# Possible Paths for Model:
# Local path: mlflow-model/model.pkl
# Azure Machine Learning Datastore: azureml://datastores/<datastore-name>/paths/<path_on_datastore>
# MLflow run: runs:/<run-id>/<path-to-model-relative-to-the-root-of-the-artifact-location>
# Job: azureml://jobs/<job-name>/outputs/<output-name>/paths/<path-to-model-relative-to-the-named-output-location>
# Model Asset: azureml:<my_model>:<version>
my_job_inputs = {
"input_model": Input(type=AssetTypes.MLFLOW_MODEL, path="mlflowmodel")
}
job = command(
code="./src", # local path where the code is stored
command="ls ${{inputs.input_model}}",
inputs=my_job_inputs,
environment="AzureML-sklearn-0.24-ubuntu18.04-py37-cpu:9",
compute="cpu-cluster",
)
# submit the command
returned_job = ml_client.jobs.create_or_update(job)
# get a URL for the status of the job
returned_job.services["Studio"].endpoint
from azure.ai.ml import command
from azure.ai.ml.entities import Model
from azure.ai.ml import Input, Output
from azure.ai.ml.constants import AssetTypes
# Possible Asset Types for Model:
# AssetTypes.MLFLOW_MODEL
# AssetTypes.CUSTOM_MODEL
# AssetTypes.TRITON_MODEL
# Possible Paths for Model:
# Local path: mlflow-model/model.pkl
# Azure Machine Learning Datastore: azureml://datastores/<datastore-name>/paths/<path_on_datastore>
# MLflow run: runs:/<run-id>/<path-to-model-relative-to-the-root-of-the-artifact-location>
# Job: azureml://jobs/<job-name>/outputs/<output-name>/paths/<path-to-model-relative-to-the-named-output-location>
# Model Asset: azureml:<my_model>:<version>
my_job_inputs = {
"input_model": Input(type=AssetTypes.MLFLOW_MODEL, path="mlflow-model"),
"input_data": Input(type=AssetTypes.URI_FILE, path="./mlflow-model/input_example.json"),
}
my_job_outputs = {
"output_folder": Output(type=AssetTypes.CUSTOM_MODEL)
}
job = command(
code="./src", # local path where the code is stored
command="python load_write_model.py --input_model ${{inputs.input_model}} --output_folder ${{outputs.output_folder}}",
inputs=my_job_inputs,
outputs=my_job_outputs,
environment="AzureML-sklearn-0.24-ubuntu18.04-py37-cpu:9",
compute="cpu-cluster",
)
# submit the command
returned_job = ml_client.create_or_update(job)
# get a URL for the status of the job
returned_job.services["Studio"].endpoint