I am new to using Azure App functions and i am exploring of using queue storage with my function. my runtime is python. i am deploying my function via Vscode. i am facing a challenge when i add my configuration such as my evns and endpoints at the global level and deploy my function, my function is no longer available. here is my code,
import os
import requests
import logging
from datetime import datetime
from azure.storage.blob.aio import BlobServiceClient
import azure.functions as func
import asyncio
import json
import aiohttp
from isodate import parse_duration
import numpy as np
from ruptures.detection import Pelt
import librosa
import tempfile
import time
# --- Azure Function Configuration ---
# Replace with your actual values
STORAGE_CONNECTION_STRING = os.environ["AzureWebJobsStorage"] # Use single connection string if same
SOURCE_CONTAINER_NAME = os.environ["STORAGE_CONNECTION_STRING"]
DESTINATION_CONTAINER_NAME = os.environ["DESTINATION_CONTAINER_NAME"]
SPEECH_API_KEY = os.environ["SPEECH_API_KEY"]
YOUR_SAS_TOKEN = os.environ["YOUS_SAS_TOKEN"]
# --- Logging Configuration ---
logging.basicConfig(level=logging.INFO)
# --- Speech to Text API Configuration (with v3.2) ---
endpoint = "https://my-speech-name.cognitiveservices.azure.com/speechtotext/v3.2/transcriptions"
headers = {
"Content-Type": "application/json",
"Ocp-Apim-Subscription-Key": SPEECH_API_KEY
}
app = func.FunctionApp()
@app.function_name(name="sttqueue")
@app.queue_trigger(arg_name="msg",
queue_name="transcription-queue",
connection="AzureWebJobsStorage")
async def sttqueue(msg: func.QueueMessage):
logging.info('Python Queue trigger function processed a queue item: %s', msg.get_body().decode('utf-8'))
try:
batch = json.loads(msg.get_body().decode('utf-8'))
logging.info(f"Processing batch: {batch}")
source_blob_client = BlobServiceClient.from_connection_string(STORAGE_CONNECTION_STRING)
destination_blob_client = BlobServiceClient.from_connection_string(STORAGE_CONNECTION_STRING)
async with source_blob_client, destination_blob_client:
source_container_client = source_blob_client.get_container_client(SOURCE_CONTAINER_NAME)
destination_container_client = destination_blob_client.get_container_client(DESTINATION_CONTAINER_NAME)
async with aiohttp.ClientSession() as session:
for blob_name in batch:
blob_url = f"https://{source_container_client.account_name}.blob.core.windows.net/{source_container_client.container_name}/{blob_name}?{YOUR_SAS_TOKEN}"
await transcribe_audio(blob_name, blob_url, session, destination_container_client, endpoint, headers)
except Exception as e:
logging.error(f"Error processing queue message: {e}")
async def transcribe_audio(blob_name, blob_url, session, destination_container_client, endpoint, headers):
try:
# . endpoint and headers)
except aiohttp.ClientError as e:
logging.error(f"Error communicating with Speech API: {e}")
except azure.core.exceptions.AzureError as e:
logging.error(f"Error accessing Azure Storage: {e}")
except Exception as e:
logging.error(f"Error transcribing {blob_name}: {e}")
return app # Return the Function App object
however, when i comment out my env and endpoints, my function is deployed without any issues.
as you can see after removing the config envs and endpoint, the function is available along with the trigger. what am i doing wrong ? Any suggestions to fix this issue ?
I was unable to see the function in the Azure Function App until I added the env details to the App Settings. Once I added the env details, I was able to see the function in the Azure Function App.
local.settings.json :
"AzureWebJobsStorage": "<storageConneString>",
"SOURCE_CONTAINER_NAME": "<sourceContainer>",
"DESTINATION_CONTAINER_NAME": "<destinationContainer>",
"YOU_SAS_TOKEN": "<SAStoken>",
"SPEECH_API_KEY": "<speechKey>"
Azure Function App > Environment Variables > App Settings.
Azure Function App :
Invocations :
I successfully got the output logs in the Function's Invocations.