@@ -32,7 +32,7 @@ param principalId string = ''
32
32
param openAILocation string
33
33
34
34
@description ('Name of the OpenAI resource group. If not specified, the resource group name will be generated.' )
35
- param openAiResourceGroupName string = ''
35
+ param openAIResourceGroupName string = ''
36
36
37
37
@description ('Whether to deploy Azure OpenAI resources' )
38
38
param deployAzureOpenAI bool = true
@@ -47,15 +47,22 @@ param chatDeploymentName string = ''
47
47
// https://learn.microsoft.com/azure/ai-services/openai/concepts/models#gpt-4-and-gpt-4-turbo-preview-models
48
48
param chatDeploymentVersion string = ''
49
49
50
- param azureOpenAiAPIVersion string = '2024-03-01-preview'
50
+ param azureOpenAIAPIVersion string = '2024-03-01-preview'
51
+ @secure ()
52
+ param azureOpenAIKey string = ''
53
+ @description ('Azure OpenAI endpoint to use, if not using the one deployed here.' )
54
+ param azureOpenAIEndpoint string = ''
55
+
56
+ @description ('Whether to use Azure OpenAI (either deployed here or elsewhere) or OpenAI.com' )
57
+ var useAzureOpenAI = deployAzureOpenAI || !empty (azureOpenAIEndpoint )
51
58
52
59
@description ('Capacity of the GPT deployment' )
53
60
// You can increase this, but capacity is limited per model/region, so you will get errors if you go over
54
61
// https://learn.microsoft.com/en-us/azure/ai-services/openai/quotas-limits
55
62
param chatDeploymentCapacity int = 0
56
63
var chatConfig = {
57
- modelName : !empty (chatModelName ) ? chatModelName : deployAzureOpenAI ? 'gpt-35-turbo' : 'gpt-3.5-turbo'
58
- deploymentName : !empty (chatDeploymentName ) ? chatDeploymentName : 'chat '
64
+ modelName : !empty (chatModelName ) ? chatModelName : ( useAzureOpenAI ? 'gpt-35-turbo' : 'gpt-3.5-turbo' )
65
+ deploymentName : !empty (chatDeploymentName ) ? chatDeploymentName : 'gpt-35-turbo '
59
66
deploymentVersion : !empty (chatDeploymentVersion ) ? chatDeploymentVersion : '0125'
60
67
deploymentCapacity : chatDeploymentCapacity != 0 ? chatDeploymentCapacity : 30
61
68
}
@@ -68,7 +75,7 @@ param embedDimensions int = 0
68
75
69
76
var embedConfig = {
70
77
modelName : !empty (embedModelName ) ? embedModelName : 'text-embedding-ada-002'
71
- deploymentName : !empty (embedDeploymentName ) ? embedDeploymentName : 'embed '
78
+ deploymentName : !empty (embedDeploymentName ) ? embedDeploymentName : 'text-embedding-ada-002 '
72
79
deploymentVersion : !empty (embedDeploymentVersion ) ? embedDeploymentVersion : '2'
73
80
deploymentCapacity : embedDeploymentCapacity != 0 ? embedDeploymentCapacity : 30
74
81
dimensions : embedDimensions != 0 ? embedDimensions : 1536
@@ -183,64 +190,71 @@ module web 'web.bicep' = {
183
190
}
184
191
{
185
192
name : 'OPENAI_CHAT_HOST'
186
- value : deployAzureOpenAI ? 'azure' : 'openaicom'
193
+ value : useAzureOpenAI ? 'azure' : 'openaicom'
187
194
}
188
195
{
189
196
name : 'AZURE_OPENAI_CHAT_DEPLOYMENT'
190
- value : deployAzureOpenAI ? chatConfig .deploymentName : ''
197
+ value : useAzureOpenAI ? chatConfig .deploymentName : ''
191
198
}
192
199
{
193
200
name : 'AZURE_OPENAI_CHAT_MODEL'
194
- value : deployAzureOpenAI ? chatConfig .modelName : ''
201
+ value : useAzureOpenAI ? chatConfig .modelName : ''
195
202
}
196
203
{
197
204
name : 'OPENAICOM_CHAT_MODEL'
198
- value : deployAzureOpenAI ? '' : 'gpt-3.5-turbo'
205
+ value : useAzureOpenAI ? '' : 'gpt-3.5-turbo'
199
206
}
200
207
{
201
208
name : 'OPENAI_EMBED_HOST'
202
- value : deployAzureOpenAI ? 'azure' : 'openaicom'
209
+ value : useAzureOpenAI ? 'azure' : 'openaicom'
203
210
}
204
211
{
205
212
name : 'OPENAICOM_EMBED_MODEL_DIMENSIONS'
206
- value : deployAzureOpenAI ? '' : '1536'
213
+ value : useAzureOpenAI ? '' : '1536'
207
214
}
208
215
{
209
216
name : 'OPENAICOM_EMBED_MODEL'
210
- value : deployAzureOpenAI ? '' : 'text-embedding-ada-002'
217
+ value : useAzureOpenAI ? '' : 'text-embedding-ada-002'
211
218
}
212
219
{
213
220
name : 'AZURE_OPENAI_EMBED_MODEL'
214
- value : deployAzureOpenAI ? embedConfig .modelName : ''
221
+ value : useAzureOpenAI ? embedConfig .modelName : ''
215
222
}
216
223
{
217
224
name : 'AZURE_OPENAI_EMBED_DEPLOYMENT'
218
- value : deployAzureOpenAI ? embedConfig .deploymentName : ''
225
+ value : useAzureOpenAI ? embedConfig .deploymentName : ''
219
226
}
220
227
{
221
228
name : 'AZURE_OPENAI_EMBED_MODEL_DIMENSIONS'
222
- value : deployAzureOpenAI ? string (embedConfig .dimensions ) : ''
229
+ value : useAzureOpenAI ? string (embedConfig .dimensions ) : ''
223
230
}
224
231
{
225
232
name : 'AZURE_OPENAI_ENDPOINT'
226
- value : deployAzureOpenAI ? openAi .outputs .endpoint : ''
233
+ value : useAzureOpenAI ? ( deployAzureOpenAI ? openAI .outputs .endpoint : azureOpenAIEndpoint ) : ''
227
234
}
228
235
{
229
236
name : 'AZURE_OPENAI_VERSION'
230
- value : deployAzureOpenAI ? azureOpenAiAPIVersion : ''
237
+ value : useAzureOpenAI ? azureOpenAIAPIVersion : ''
238
+ }
239
+ {
240
+ name : 'AZURE_OPENAI_KEY'
241
+ secretRef : 'azure-openai-key'
231
242
}
232
243
]
244
+ secrets : {
245
+ 'azure-openai-key' : azureOpenAIKey
246
+ }
233
247
}
234
248
}
235
249
236
- resource openAiResourceGroup 'Microsoft.Resources/resourceGroups@2021-04-01' existing =
237
- if (!empty (openAiResourceGroupName )) {
238
- name : !empty (openAiResourceGroupName ) ? openAiResourceGroupName : resourceGroup .name
250
+ resource openAIResourceGroup 'Microsoft.Resources/resourceGroups@2021-04-01' existing =
251
+ if (!empty (openAIResourceGroupName )) {
252
+ name : !empty (openAIResourceGroupName ) ? openAIResourceGroupName : resourceGroup .name
239
253
}
240
254
241
- module openAi 'core/ai/cognitiveservices.bicep' = {
255
+ module openAI 'core/ai/cognitiveservices.bicep' = if ( deployAzureOpenAI ) {
242
256
name : 'openai'
243
- scope : openAiResourceGroup
257
+ scope : openAIResourceGroup
244
258
params : {
245
259
name : '${prefix }-openai'
246
260
location : openAILocation
@@ -279,9 +293,9 @@ module openAi 'core/ai/cognitiveservices.bicep' = {
279
293
}
280
294
281
295
// USER ROLES
282
- module openAiRoleUser 'core/security/role.bicep' =
296
+ module openAIRoleUser 'core/security/role.bicep' =
283
297
if (empty (runningOnGh )) {
284
- scope : openAiResourceGroup
298
+ scope : openAIResourceGroup
285
299
name : 'openai-role-user'
286
300
params : {
287
301
principalId : principalId
@@ -291,8 +305,8 @@ module openAiRoleUser 'core/security/role.bicep' =
291
305
}
292
306
293
307
// Backend roles
294
- module openAiRoleBackend 'core/security/role.bicep' = {
295
- scope : openAiResourceGroup
308
+ module openAIRoleBackend 'core/security/role.bicep' = {
309
+ scope : openAIResourceGroup
296
310
name : 'openai-role-backend'
297
311
params : {
298
312
principalId : web .outputs .SERVICE_WEB_IDENTITY_PRINCIPAL_ID
@@ -314,13 +328,13 @@ output SERVICE_WEB_NAME string = web.outputs.SERVICE_WEB_NAME
314
328
output SERVICE_WEB_URI string = web .outputs .SERVICE_WEB_URI
315
329
output SERVICE_WEB_IMAGE_NAME string = web .outputs .SERVICE_WEB_IMAGE_NAME
316
330
317
- output AZURE_OPENAI_ENDPOINT string = deployAzureOpenAI ? openAi .outputs .endpoint : ''
318
- output AZURE_OPENAI_VERSION string = deployAzureOpenAI ? azureOpenAiAPIVersion : ''
319
- output AZURE_OPENAI_CHAT_DEPLOYMENT string = deployAzureOpenAI ? chatConfig .deploymentName : ''
320
- output AZURE_OPENAI_EMBED_DEPLOYMENT string = deployAzureOpenAI ? embedConfig .deploymentName : ''
321
- output AZURE_OPENAI_CHAT_MODEL string = deployAzureOpenAI ? chatConfig .modelName : ''
322
- output AZURE_OPENAI_EMBED_MODEL string = deployAzureOpenAI ? embedConfig .modelName : ''
323
- output AZURE_OPENAI_EMBED_MODEL_DIMENSIONS int = deployAzureOpenAI ? embedConfig .dimensions : 0
331
+ output AZURE_OPENAI_ENDPOINT string = useAzureOpenAI ? ( deployAzureOpenAI ? openAI .outputs .endpoint : azureOpenAIEndpoint ) : ''
332
+ output AZURE_OPENAI_VERSION string = useAzureOpenAI ? azureOpenAIAPIVersion : ''
333
+ output AZURE_OPENAI_CHAT_DEPLOYMENT string = useAzureOpenAI ? chatConfig .deploymentName : ''
334
+ output AZURE_OPENAI_EMBED_DEPLOYMENT string = useAzureOpenAI ? embedConfig .deploymentName : ''
335
+ output AZURE_OPENAI_CHAT_MODEL string = useAzureOpenAI ? chatConfig .modelName : ''
336
+ output AZURE_OPENAI_EMBED_MODEL string = useAzureOpenAI ? embedConfig .modelName : ''
337
+ output AZURE_OPENAI_EMBED_MODEL_DIMENSIONS int = useAzureOpenAI ? embedConfig .dimensions : 0
324
338
325
339
output POSTGRES_HOST string = postgresServer .outputs .POSTGRES_DOMAIN_NAME
326
340
output POSTGRES_USERNAME string = postgresEntraAdministratorName
0 commit comments