Azure Functions Skill
Build serverless applications with Azure Functions for event-driven computing.
Triggers
Use this skill when you see:
- azure functions, function app, serverless azure
- http trigger, timer trigger, queue trigger
- durable functions, orchestration
- function binding, function.json
Instructions
Create Function App
bash
1# Create storage account (required)
2az storage account create \
3 --name myfuncstorage \
4 --resource-group mygroup \
5 --location eastus \
6 --sku Standard_LRS
7
8# Create Function App
9az functionapp create \
10 --name myfuncapp \
11 --resource-group mygroup \
12 --storage-account myfuncstorage \
13 --consumption-plan-location eastus \
14 --runtime python \
15 --runtime-version 3.11 \
16 --functions-version 4
17
18# Create with Premium plan
19az functionapp plan create \
20 --name mypremiumplan \
21 --resource-group mygroup \
22 --location eastus \
23 --sku EP1
24
25az functionapp create \
26 --name myfuncapp \
27 --resource-group mygroup \
28 --storage-account myfuncstorage \
29 --plan mypremiumplan \
30 --runtime node \
31 --runtime-version 20 \
32 --functions-version 4
Python Functions (v2 Programming Model)
HTTP Trigger
python
1import azure.functions as func
2import logging
3import json
4
5app = func.FunctionApp(http_auth_level=func.AuthLevel.FUNCTION)
6
7@app.route(route="hello")
8def hello_http(req: func.HttpRequest) -> func.HttpResponse:
9 logging.info('HTTP trigger function processed a request.')
10
11 name = req.params.get('name')
12 if not name:
13 try:
14 req_body = req.get_json()
15 name = req_body.get('name')
16 except ValueError:
17 pass
18
19 if name:
20 return func.HttpResponse(f"Hello, {name}!")
21 else:
22 return func.HttpResponse(
23 "Please pass a name on the query string or in the request body",
24 status_code=400
25 )
26
27@app.route(route="users/{id}", methods=["GET"])
28def get_user(req: func.HttpRequest) -> func.HttpResponse:
29 user_id = req.route_params.get('id')
30 # Fetch user from database
31 return func.HttpResponse(
32 json.dumps({"id": user_id, "name": "John"}),
33 mimetype="application/json"
34 )
Timer Trigger
python
1@app.timer_trigger(schedule="0 */5 * * * *", arg_name="timer")
2def timer_function(timer: func.TimerRequest) -> None:
3 if timer.past_due:
4 logging.info('The timer is past due!')
5
6 logging.info('Timer trigger function executed.')
7 # Run scheduled task
Queue Trigger
python
1@app.queue_trigger(arg_name="msg", queue_name="myqueue",
2 connection="AzureWebJobsStorage")
3def queue_processor(msg: func.QueueMessage) -> None:
4 logging.info(f'Queue trigger processed: {msg.get_body().decode()}')
5
6 data = json.loads(msg.get_body().decode())
7 process_message(data)
8
9# Output binding to queue
10@app.route(route="enqueue")
11@app.queue_output(arg_name="msg", queue_name="myqueue",
12 connection="AzureWebJobsStorage")
13def enqueue_message(req: func.HttpRequest, msg: func.Out[str]) -> func.HttpResponse:
14 message = req.get_json()
15 msg.set(json.dumps(message))
16 return func.HttpResponse("Message enqueued", status_code=202)
Blob Trigger
python
1@app.blob_trigger(arg_name="blob", path="container/{name}",
2 connection="AzureWebJobsStorage")
3def blob_processor(blob: func.InputStream) -> None:
4 logging.info(f'Blob trigger: {blob.name}, Size: {blob.length} bytes')
5 content = blob.read()
6 process_blob(content)
7
8# Output binding to blob
9@app.route(route="upload")
10@app.blob_output(arg_name="outputblob", path="container/{rand-guid}.txt",
11 connection="AzureWebJobsStorage")
12def upload_blob(req: func.HttpRequest, outputblob: func.Out[str]) -> func.HttpResponse:
13 content = req.get_body().decode()
14 outputblob.set(content)
15 return func.HttpResponse("Blob created", status_code=201)
Cosmos DB Trigger
python
1@app.cosmos_db_trigger(arg_name="documents",
2 container_name="items",
3 database_name="mydb",
4 connection="CosmosDBConnection",
5 lease_container_name="leases",
6 create_lease_container_if_not_exists=True)
7def cosmos_trigger(documents: func.DocumentList) -> None:
8 for doc in documents:
9 logging.info(f'Document id: {doc["id"]}')
10 process_document(doc)
TypeScript Functions (v4 Programming Model)
typescript
1import { app, HttpRequest, HttpResponseInit, InvocationContext } from "@azure/functions";
2
3// HTTP trigger
4app.http("hello", {
5 methods: ["GET", "POST"],
6 authLevel: "function",
7 handler: async (request: HttpRequest, context: InvocationContext): Promise<HttpResponseInit> => {
8 context.log(`Http function processed request for url "${request.url}"`);
9
10 const name = request.query.get("name") || (await request.text()) || "world";
11
12 return {
13 body: `Hello, ${name}!`,
14 };
15 },
16});
17
18// Timer trigger
19app.timer("timerTrigger", {
20 schedule: "0 */5 * * * *",
21 handler: async (timer: Timer, context: InvocationContext): Promise<void> => {
22 context.log("Timer trigger function executed");
23 },
24});
25
26// Queue trigger with output binding
27app.storageQueue("queueTrigger", {
28 queueName: "myqueue",
29 connection: "AzureWebJobsStorage",
30 handler: async (message: unknown, context: InvocationContext): Promise<void> => {
31 context.log(`Queue message: ${JSON.stringify(message)}`);
32 },
33});
Durable Functions
python
1import azure.functions as func
2import azure.durable_functions as df
3
4app = func.FunctionApp(http_auth_level=func.AuthLevel.FUNCTION)
5
6# Orchestrator
7@app.orchestration_trigger(context_name="context")
8def orchestrator(context: df.DurableOrchestrationContext):
9 # Fan-out/fan-in pattern
10 tasks = []
11 for i in range(5):
12 tasks.append(context.call_activity("activity_function", i))
13
14 results = yield context.task_all(tasks)
15
16 # Process results
17 total = sum(results)
18 return total
19
20# Activity
21@app.activity_trigger(input_name="input")
22def activity_function(input: int) -> int:
23 return input * 2
24
25# HTTP starter
26@app.route(route="orchestrators/{functionName}")
27@app.durable_client_input(client_name="client")
28async def http_start(req: func.HttpRequest, client: df.DurableOrchestrationClient) -> func.HttpResponse:
29 function_name = req.route_params.get('functionName')
30 instance_id = await client.start_new(function_name)
31
32 return client.create_check_status_response(req, instance_id)
Application Settings
bash
1# Set application settings
2az functionapp config appsettings set \
3 --name myfuncapp \
4 --resource-group mygroup \
5 --settings "DatabaseConnection=connection-string" \
6 "ApiKey=your-api-key"
7
8# Get application settings
9az functionapp config appsettings list \
10 --name myfuncapp \
11 --resource-group mygroup
12
13# Reference Key Vault secrets
14az functionapp config appsettings set \
15 --name myfuncapp \
16 --resource-group mygroup \
17 --settings "Secret=@Microsoft.KeyVault(SecretUri=https://myvault.vault.azure.net/secrets/mysecret/)"
Managed Identity
python
1from azure.identity import DefaultAzureCredential
2from azure.keyvault.secrets import SecretClient
3
4# Use managed identity
5credential = DefaultAzureCredential()
6secret_client = SecretClient(
7 vault_url="https://myvault.vault.azure.net",
8 credential=credential
9)
10
11secret = secret_client.get_secret("my-secret")
Deployment
bash
1# Deploy from local
2func azure functionapp publish myfuncapp
3
4# Deploy with zip
5az functionapp deployment source config-zip \
6 --name myfuncapp \
7 --resource-group mygroup \
8 --src app.zip
9
10# Continuous deployment from GitHub
11az functionapp deployment source config \
12 --name myfuncapp \
13 --resource-group mygroup \
14 --repo-url https://github.com/org/repo \
15 --branch main \
16 --manual-integration
Best Practices
- Cold Starts: Use Premium plan for latency-sensitive apps
- Bindings: Use input/output bindings instead of SDK calls when possible
- Secrets: Use Key Vault references for sensitive settings
- Logging: Use structured logging with Application Insights
- Scaling: Configure host.json for optimal scaling
Common Workflows
API Backend
- Create Function App with HTTP triggers
- Implement CRUD operations
- Add authentication (Azure AD, API keys)
- Configure CORS settings
- Enable Application Insights
Event Processing
- Set up queue/blob/Cosmos DB triggers
- Implement processing logic
- Configure dead-letter queues
- Add retry policies
- Monitor with alerts