OpenAI Function Calling Integration into Application
Function Calling in OpenAI API allows the model to invoke your functions in a structured way. The model determines when and with what arguments to call a function based on the description. This is the foundation for agent systems, connecting to databases, APIs, and any external services.
Basic Example
from openai import OpenAI
import json
client = OpenAI()
# Define functions
tools = [
{
"type": "function",
"function": {
"name": "get_order_status",
"description": "Get order status by its ID",
"parameters": {
"type": "object",
"properties": {
"order_id": {
"type": "string",
"description": "Order identifier"
}
},
"required": ["order_id"],
"additionalProperties": False,
},
"strict": True, # Guarantees exact schema compliance
}
},
{
"type": "function",
"function": {
"name": "cancel_order",
"description": "Cancel order (only if status is pending or processing)",
"parameters": {
"type": "object",
"properties": {
"order_id": {"type": "string"},
"reason": {"type": "string", "description": "Reason for cancellation"}
},
"required": ["order_id"],
"additionalProperties": False,
},
"strict": True,
}
}
]
# Function implementations
def get_order_status(order_id: str) -> dict:
# Database query
return {"order_id": order_id, "status": "processing", "items": 3}
def cancel_order(order_id: str, reason: str = "") -> dict:
return {"success": True, "message": f"Order {order_id} cancelled"}
FUNCTION_MAP = {
"get_order_status": get_order_status,
"cancel_order": cancel_order,
}
def run_with_tools(user_message: str, conversation_history: list = None) -> str:
messages = conversation_history or []
messages.append({"role": "user", "content": user_message})
while True:
response = client.chat.completions.create(
model="gpt-4o",
messages=messages,
tools=tools,
tool_choice="auto",
parallel_tool_calls=True, # Allow parallel calls
)
message = response.choices[0].message
if response.choices[0].finish_reason == "stop":
return message.content
# Process function calls
messages.append(message.model_dump())
for tool_call in message.tool_calls or []:
func_name = tool_call.function.name
func_args = json.loads(tool_call.function.arguments)
result = FUNCTION_MAP[func_name](**func_args)
messages.append({
"role": "tool",
"tool_call_id": tool_call.id,
"content": json.dumps(result, ensure_ascii=False),
})
Structured Outputs with Functions
from pydantic import BaseModel
from typing import Literal
class OrderAction(BaseModel):
action: Literal["status_check", "cancellation", "escalation"]
order_id: str
priority: Literal["low", "medium", "high"]
notes: str
# Enforce using a specific function with Pydantic
response = client.chat.completions.create(
model="gpt-4o",
messages=[{"role": "user", "content": "Order #12345 has been stuck in processing for 3 days"}],
tools=[openai.pydantic_function_tool(OrderAction)],
tool_choice="required",
)
# Automatically parses into Pydantic model
tool_call = response.choices[0].message.tool_calls[0]
action = OrderAction.model_validate_json(tool_call.function.arguments)
print(action.action, action.priority)
Parallel Calls for Data Aggregation
async def aggregate_customer_data(customer_id: str) -> dict:
"""Requests data from multiple sources in parallel"""
# GPT-4o with parallel_tool_calls=True will invoke all tools simultaneously
response = client.chat.completions.create(
model="gpt-4o",
messages=[{
"role": "user",
"content": f"Gather complete information about customer {customer_id}: profile, orders, tickets"
}],
tools=[get_profile_tool, get_orders_tool, get_tickets_tool],
tool_choice="required",
parallel_tool_calls=True,
)
# Model calls all three functions in one response
# Then aggregates results
...
Practical Case Study: E-commerce Support Service
Functions: get_order, track_shipment, process_refund, update_address, get_product_info.
Result: 64% of requests are handled autonomously (statuses, tracking, simple refunds). Average resolution time: 45 min → 2 min.
Timeline
- Basic function calling loop: 1-2 days
- Parallel calls + Pydantic: 2-3 days
- Production with error handling: 1 week







