I would like to update the code and / or other files uploaded to the code folder being used by a specific scheduled job, keeping everything else, in particular the schedule details (time of day etc.), using python SDK v2.
# --------------------------------------------------------------------------------------------------
# Connect to AML and set tracking URI in mlflow
# --------------------------------------------------------------------------------------------------
from azure.ai.ml import MLClient
from azure.identity import DefaultAzureCredential, InteractiveBrowserCredential
from azureml.core.experiment import Experiment
from azureml.core.workspace import Workspace
@pipeline
def my_pipeline (
# some args...
):
my_first_component = command(
# some args ...
code="my_new_code_folder_to_upload" # this is where the new code, config files, etc. are placed
)
my_first_component = my_first_component (
# some args ...
)
def update_schedule (schedule_name, experiment_name=None):
# Connect to AML
ml_client = MLClient(
credential= InteractiveBrowserCredential(),
subscription_id="my-subscription-id",
resource_group_name="my-resource-group",
workspace_name="my-workspace"
)
job_object = my_pipeline(
# some args ...
)
job = (
ml_client.jobs.create_or_update(job_object, experiment_name=experiment_name)
if experiment_name is not None
else ml_client.jobs.create_or_update(job_object)
)
job_schedule = ml_client.schedules.get(schedule_name)
job_schedule = JobSchedule(
name=schedule_name, trigger=job_schedule.trigger, create_job=job_object
)
job_schedule = ml_client.schedules.begin_create_or_update(
schedule=job_schedule
).result()
# Uncomment below if running asynchronously (to avoid blocking execution until job finishes)
ml_client.jobs.stream(job.name)