from azure.ai.ml import MLClient, Input
from azure.ai.ml.entities import Model
from azure.ai.ml.constants import AssetTypes
from azure.identity import DefaultAzureCredential
$schema: https://azuremlschemas.azureedge.net/latest/model.schema.json
name: local-file-example
path: mlflow-model/model.pkl
description: Model created from local file.
from azure.ai.ml.entities import Model
from azure.ai.ml.constants import AssetTypes
file_model = Model(
path="mlflow-model/model.pkl",
type=AssetTypes.CUSTOM_MODEL,
name="local-file-example",
description="Model created from local file.",
)
ml_client.models.create_or_update(file_model)
您可以使用下列任何一種支援的 URI 格式,從雲端路徑建立模型。
from azure.ai.ml.entities import Model
from azure.ai.ml.constants import AssetTypes
cloud_model = Model(
path=file_model.path,
# The above line basically provides a path in the format "azureml://subscriptions/XXXXXXXXXXXXXXXX/resourceGroups/XXXXXXXXXXX/workspaces/XXXXXXXXXXX/datastores/workspaceblobstore/paths/model.pkl"
# Users could also use,"azureml://datastores/workspaceblobstore/paths/model.pkl" as a shorthand to the same location
name="cloud-path-example",
type=AssetTypes.CUSTOM_MODEL,
description="Model created from cloud path.",
)
ml_client.models.create_or_update(cloud_model)
from azure.ai.ml.entities import Model
from azure.ai.ml.constants import ModelType
run_model = Model(
path="runs:/<run-id>/model/"
name="run-model-example",
description="Model created from run.",
type=ModelType.MLFLOW
)
ml_client.models.create_or_update(run_model)
azureml 作業
此選項是 azureml job 參考 URI 格式,可協助您從任何作業輸出中的成品登錄模型。 此格式與現有 azureml 資料存放區參考 URI 格式一致,也支援從工作的具名輸出參考成品 (不只是預設成品位置)。 您可以在下列兩者之間建立譜系:已註冊的模型與您未使用 MLflow 在定型指令碼中直接登錄模型時從中定型的工作。
from azure.ai.ml import command
from azure.ai.ml.entities import Model
from azure.ai.ml import Input
from azure.ai.ml.constants import AssetTypes
from azure.ai.ml import MLClient
# Possible Asset Types for Data:
# AssetTypes.MLFLOW_MODEL
# AssetTypes.CUSTOM_MODEL
# AssetTypes.TRITON_MODEL
# Possible Paths for Model:
# Local path: mlflow-model/model.pkl
# Azure Machine Learning Datastore: azureml://datastores/<datastore-name>/paths/<path_on_datastore>
# MLflow run: runs:/<run-id>/<path-to-model-relative-to-the-root-of-the-artifact-location>
# Job: azureml://jobs/<job-name>/outputs/<output-name>/paths/<path-to-model-relative-to-the-named-output-location>
# Model Asset: azureml:<my_model>:<version>
my_job_inputs = {
"input_model": Input(type=AssetTypes.MLFLOW_MODEL, path="mlflowmodel")
}
job = command(
code="./src", # local path where the code is stored
command="ls ${{inputs.input_model}}",
inputs=my_job_inputs,
environment="AzureML-sklearn-0.24-ubuntu18.04-py37-cpu:9",
compute="cpu-cluster",
)
# submit the command
returned_job = ml_client.jobs.create_or_update(job)
# get a URL for the status of the job
returned_job.services["Studio"].endpoint
from azure.ai.ml import command
from azure.ai.ml.entities import Model
from azure.ai.ml import Input, Output
from azure.ai.ml.constants import AssetTypes
# Possible Asset Types for Model:
# AssetTypes.MLFLOW_MODEL
# AssetTypes.CUSTOM_MODEL
# AssetTypes.TRITON_MODEL
# Possible Paths for Model:
# Local path: mlflow-model/model.pkl
# Azure Machine Learning Datastore: azureml://datastores/<datastore-name>/paths/<path_on_datastore>
# MLflow run: runs:/<run-id>/<path-to-model-relative-to-the-root-of-the-artifact-location>
# Job: azureml://jobs/<job-name>/outputs/<output-name>/paths/<path-to-model-relative-to-the-named-output-location>
# Model Asset: azureml:<my_model>:<version>
my_job_inputs = {
"input_model": Input(type=AssetTypes.MLFLOW_MODEL, path="mlflow-model"),
"input_data": Input(type=AssetTypes.URI_FILE, path="./mlflow-model/input_example.json"),
}
my_job_outputs = {
"output_folder": Output(type=AssetTypes.CUSTOM_MODEL)
}
job = command(
code="./src", # local path where the code is stored
command="python load_write_model.py --input_model ${{inputs.input_model}} --output_folder ${{outputs.output_folder}}",
inputs=my_job_inputs,
outputs=my_job_outputs,
environment="AzureML-sklearn-0.24-ubuntu18.04-py37-cpu:9",
compute="cpu-cluster",
)
# submit the command
returned_job = ml_client.create_or_update(job)
# get a URL for the status of the job
returned_job.services["Studio"].endpoint