forked from microsoft/MLHyperparameterTuning
-
Notifications
You must be signed in to change notification settings - Fork 0
/
azure-pipelines.yml
131 lines (113 loc) · 5.2 KB
/
azure-pipelines.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
# MLHyperparameterTuning Pipeline
trigger:
batch: true
branches:
include:
- master
variables:
- group: AzureKeyVault
# estimators: 1
jobs:
- job: MLHyperparameterTuningJob
timeoutInMinutes: 300
cancelTimeoutInMinutes: 2
pool:
vmImage: 'Ubuntu-16.04'
strategy:
maxParallel: 3
matrix: {"eastus": {"azlocation": "eastus", "azurergname" : "mlhyptuneeastus"},"southcentralus": {"azlocation": "southcentralus", "azurergname" : "mlhyptunesouthctrl" },"westus2": {"azlocation": "westus2", "azurergname" : "mlhyptunewestus"}}
steps:
- bash: |
source /usr/share/miniconda/etc/profile.d/conda.sh
which conda
conda env create -f environment.yml
conda env list
conda activate MLHyperparameterTuning
conda env list
echo Login Azure Account
az login -t $(sptenent) --service-principal -u $(spidentity) --password $(spsecret)
echo Try and figure out what account set takes
az account set -h
echo Try and set it.
az account set --subscription $(subscriptionid)
# papermill 01_Data_Prep.ipynb 01_Data_Prep_Output.ipynb --log-output --no-progress-bar -k python3
displayName: 'Configuration'
- bash: |
source /usr/share/miniconda/etc/profile.d/conda.sh
conda activate MLHyperparameterTuning
echo Executing 00_Data_Prep.ipynb
papermill 00_Data_Prep.ipynb 00_Data_Prep_Output.ipynb --log-output --no-progress-bar -k python3
displayName: '00_Data_Prep.ipynb'
- bash: |
source /usr/share/miniconda/etc/profile.d/conda.sh
conda activate MLHyperparameterTuning
echo Executing 01_Training_Script.ipynb
papermill 01_Training_Script.ipynb 01_Training_Script_Output.ipynb --log-output --no-progress-bar -k python3 -p estimators 1
displayName: '01_Training_Script.ipynb'
- bash: |
source /usr/share/miniconda/etc/profile.d/conda.sh
conda activate MLHyperparameterTuning
echo Executing 02_Testing_Script.ipynb
papermill 02_Testing_Script.ipynb 02_Testing_Script_Output.ipynb --log-output --no-progress-bar -k python3
displayName: '02_Testing_Script.ipynb'
- bash: |
source /usr/share/miniconda/etc/profile.d/conda.sh
conda activate MLHyperparameterTuning
echo Executing 03_Run_Locally.ipynb
papermill 03_Run_Locally.ipynb 03_Run_Locally_Output.ipynb --log-output --no-progress-bar -k python3 -p subscription_id $(subscriptionid) -p resource_group $(azurergname) -p location $(azlocation) -p estimators 1
displayName: '03_Run_Locally.ipynb'
- bash: |
source /usr/share/miniconda/etc/profile.d/conda.sh
conda activate MLHyperparameterTuning
echo Executing 04_Hyperparameter_Random_Search.ipynb
papermill 04_Hyperparameter_Random_Search.ipynb 04_Hyperparameter_Random_Search_Output.ipynb --log-output --no-progress-bar -k python3 -p max_total_runs $(dsmaxruns) -p estimators 1
displayName: '04_Hyperparameter_Random_Search.ipynb'
- bash: |
source /usr/share/miniconda/etc/profile.d/conda.sh
conda activate MLHyperparameterTuning
echo Executing 05_Train_Best_Model.ipynb
papermill 05_Train_Best_Model.ipynb 05_Train_Best_Model_Output.ipynb --log-output --no-progress-bar -k python3 -p estimators 1
displayName: '05_Train_Best_Model.ipynb'
- bash: |
source /usr/share/miniconda/etc/profile.d/conda.sh
conda activate MLHyperparameterTuning
echo Executing 06_Test_Best_Model.ipynb
papermill 06_Test_Best_Model.ipynb 06_Test_Best_Model_Output.ipynb --log-output --no-progress-bar -k python3
displayName: '06_Test_Best_Model.ipynb'
- bash: |
source /usr/share/miniconda/etc/profile.d/conda.sh
conda activate MLHyperparameterTuning
echo Executing 07_Train_With_AML_Pipeline.ipynb
papermill 07_Train_With_AML_Pipeline.ipynb 07_Train_With_AML_Pipeline_Output.ipynb --log-output --no-progress-bar -k python3 -p max_total_runs $(dsmaxruns)
displayName: '07_Train_With_AML_Pipeline.ipynb'
- bash: |
source /usr/share/miniconda/etc/profile.d/conda.sh
conda activate MLHyperparameterTuning
echo Executing 08_Tear_Down.ipynb
papermill 08_Tear_Down.ipynb 08_Tear_Down_Output.ipynb --log-output --no-progress-bar -k python3
displayName: '08_Tear_Down.ipynb'
- bash: |
source /usr/share/miniconda/etc/profile.d/conda.sh
conda activate MLHyperparameterTuning
echo Execute Resource Group Delete
existResponse=$(az group exists -n $(azurergname))
if [ "$existResponse" == "true" ]; then
echo Deleting project resource group
az group delete --name $(azurergname) --yes
else
echo Project resource group did not exist
fi
echo Done Cleanup
displayName: 'Backup Cleanup'
condition: or(canceled(),failed())
- task: CreateWorkItem@1
inputs:
workItemType: 'Issue'
title: $(System.TeamProject) - Build $(Build.BuildNumber) Failed
assignedTo: 'Mario Bourgoin <[email protected]>'
associate: true
teamProject: $(System.TeamProject)
fieldMappings: |
Description=Branch: Branch $(Build.SourceBranch) failed to build. Go to Boards>WorkItems and tag the failure type.
displayName: 'Create work item on failure'
condition: failed()