Performance Tuning
Azu is designed for high performance, but understanding how to optimize your application can significantly improve response times and throughput. This guide covers profiling, caching strategies, database optimization, and scaling techniques.
Overview
Performance Profiling
Built-in Profiling
class PerformanceProfiler
@@metrics = {} of String => Array(Float64)
@@start_times = {} of String => Time
def self.start_timer(name : String)
@@start_times[name] = Time.utc
end
def self.end_timer(name : String) : Float64
if start_time = @@start_times[name]?
duration = (Time.utc - start_time).total_milliseconds
@@metrics[name] ||= [] of Float64
@@metrics[name] << duration
@@start_times.delete(name)
duration
else
0.0
end
end
def self.get_metrics(name : String) : Hash(String, Float64)?
if measurements = @@metrics[name]?
{
"count" => measurements.size.to_f,
"avg" => measurements.sum / measurements.size,
"min" => measurements.min,
"max" => measurements.max,
"p95" => percentile(measurements, 0.95),
"p99" => percentile(measurements, 0.99)
}
end
end
private def self.percentile(values : Array(Float64), p : Float64) : Float64
sorted = values.sort
index = (p * (sorted.size - 1)).round.to_i
sorted[index]
end
end
# Profiling middleware
class ProfilingMiddleware < Azu::Handler::Base
def call(context : HTTP::Server::Context)
PerformanceProfiler.start_timer("request_#{context.request.path}")
begin
call_next(context)
ensure
PerformanceProfiler.end_timer("request_#{context.request.path}")
end
end
end
# Usage in endpoints
struct ProfiledEndpoint
include Endpoint(ProfiledRequest, ProfiledResponse)
get "/api/profiled"
def call : ProfiledResponse
PerformanceProfiler.start_timer("database_query")
# Simulate database query
sleep(0.1)
data = fetch_data_from_database
PerformanceProfiler.end_timer("database_query")
PerformanceProfiler.start_timer("data_processing")
# Process data
processed_data = process_data(data)
PerformanceProfiler.end_timer("data_processing")
ProfiledResponse.new(processed_data)
end
private def fetch_data_from_database : Array(String)
["item1", "item2", "item3"]
end
private def process_data(data : Array(String)) : Hash(String, String)
{
"count" => data.size.to_s,
"items" => data.join(", "),
"timestamp" => Time.utc.to_unix.to_s
}
end
end
struct ProfiledRequest
include Request
def initialize
end
end
struct ProfiledResponse
include Response
getter data : Hash(String, String)
def initialize(@data)
end
def render : String
data.to_json
end
endPerformance Metrics Endpoint
Caching Strategies
Memory Caching
Redis Caching
Database Optimization
Connection Pooling
Query Optimization
Concurrency and Parallelism
Async Processing
Background Jobs
Memory Optimization
Object Pooling
Performance Testing
Load Testing
Best Practices
1. Use Connection Pooling
2. Implement Caching Strategically
3. Use Parallel Processing
4. Monitor Performance
Next Steps
Environment Management - Configure performance settings per environment
File Uploads - Optimize file upload performance
Content Negotiation - Performance considerations for content types
API Reference - Explore performance-related APIs
Last updated
Was this helpful?
