11worker_processes auto;
22pid /var/run/nginx.pid ;
3-
43events {
54 worker_connections 1024 ;
65}
7-
86# Health-checker URL to get the latest provider list
97env CONFIG_HEALTH_CHECKER_URL;
108# custom local DNS servers to use for resolving the provider list
@@ -17,25 +15,41 @@ env GO_AUTH_SERVICE_URL;
1715env AUTH_CONFIG_FILE;
1816# Go Cache Service Socket (Unix socket)
1917env GO_CACHE_SOCKET;
20-
2118http {
2219 resolver 1.1.1.1 8.8.8.8 valid=300s ipv6=off;
2320 resolver_timeout 5s ;
2421 lua_package_path "/usr/local/openresty/nginx/lua/?.lua;/usr/local/openresty/lualib/?.lua;;" ;
25-
26- lua_shared_dict providers 10m ;
22+
23+ lua_shared_dict providers 10m ;
2724 lua_shared_dict jwt_tokens 10m ; # Shared memory for JWT token validation
25+ lua_shared_dict stats 10m ; # Shared memory for metrics
26+
2827 client_body_buffer_size 10M ;
2928 client_max_body_size 10M ;
30-
3129 access_log /dev/stdout;
3230 error_log /dev/stderr info;
3331
3432 # Schedule periodic reload of the provider list
3533 init_worker_by_lua_file lua/init_worker.lua;
36-
34+
3735 server {
3836 listen 8080 ;
37+
38+ # Track status codes for all requests
39+ log_by_lua_block {
40+ local stats = ngx.shared.stats
41+ local status = tonumber( ngx.var.status )
42+
43+ if status >= 200 and status < 300 then
44+ stats:incr( "requests_2xx" , 1, 0)
45+ elseif status >= 300 and status < 400 then
46+ stats:incr( "requests_3xx" , 1, 0)
47+ elseif status >= 400 and status < 500 then
48+ stats:incr( "requests_4xx" , 1, 0)
49+ elseif status >= 500 then
50+ stats:incr( "requests_5xx" , 1, 0)
51+ end
52+ }
3953
4054 # Auth endpoints - no authentication required, handled by Go service
4155 location /auth/ {
@@ -70,10 +84,76 @@ http {
7084 proxy_set_header Authorization $http_authorization ;
7185 }
7286
73- # Health check endpoint
87+ # Connections metrics endpoint - Nginx exposes by default
7488 location /stub_status {
89+ internal ;
7590 stub_status ;
7691 }
92+
93+ # Prometheus metrics endpoint (combines stub_status + status codes)
94+ location /metrics {
95+ access_log off;
96+
97+ content_by_lua_block {
98+ local stats = ngx.shared.stats
99+
100+ -- Read stub_status
101+ local res = ngx.location .capture( "/stub_status" )
102+
103+ if res.status == 200 then
104+ -- Parse stub_status output
105+ local active = res.body:match ( "Active connections:%s*(%d+)" )
106+ local accepts, handled, requests = res.body:match ( "%s*(%d+)%s+(%d+)%s+(%d+)" )
107+ local reading, writing, waiting = res.body:match ( "Reading:%s*(%d+)%s+Writing:%s*(%d+)%s+Waiting:%s*(%d+)" )
108+
109+ -- Output metrics in Prometheus format ( matching nginx-exporter format)
110+ ngx.say( "# HELP nginx_connections_accepted Accepted client connections" )
111+ ngx.say( "# TYPE nginx_connections_accepted counter" )
112+ ngx.say( "nginx_connections_accepted " , accepts or 0)
113+
114+ ngx.say( "# HELP nginx_connections_active Active client connections" )
115+ ngx.say( "# TYPE nginx_connections_active gauge" )
116+ ngx.say( "nginx_connections_active " , active or 0)
117+
118+ ngx.say( "# HELP nginx_connections_handled Handled client connections" )
119+ ngx.say( "# TYPE nginx_connections_handled counter" )
120+ ngx.say( "nginx_connections_handled " , handled or 0)
121+
122+ ngx.say( "# HELP nginx_connections_reading Connections where NGINX is reading the request header" )
123+ ngx.say( "# TYPE nginx_connections_reading gauge" )
124+ ngx.say( "nginx_connections_reading " , reading or 0)
125+
126+ ngx.say( "# HELP nginx_connections_waiting Idle client connections" )
127+ ngx.say( "# TYPE nginx_connections_waiting gauge" )
128+ ngx.say( "nginx_connections_waiting " , waiting or 0)
129+
130+ ngx.say( "# HELP nginx_connections_writing Connections where NGINX is writing the response back to the client" )
131+ ngx.say( "# TYPE nginx_connections_writing gauge" )
132+ ngx.say( "nginx_connections_writing " , writing or 0)
133+
134+ ngx.say( "# HELP nginx_http_requests_total Total http requests" )
135+ ngx.say( "# TYPE nginx_http_requests_total counter" )
136+ ngx.say( "nginx_http_requests_total " , requests or 0)
137+ end
138+
139+ -- Status code metrics
140+ ngx.say( "# HELP nginx_http_requests_by_status HTTP requests by status code class" )
141+ ngx.say( "# TYPE nginx_http_requests_by_status counter" )
142+ ngx.say( string.format( 'nginx_http_requests_by_status{status="2xx"} %d' ,
143+ stats:get( "requests_2xx" ) or 0))
144+ ngx.say( string.format( 'nginx_http_requests_by_status{status="3xx"} %d' ,
145+ stats:get( "requests_3xx" ) or 0))
146+ ngx.say( string.format( 'nginx_http_requests_by_status{status="4xx"} %d' ,
147+ stats:get( "requests_4xx" ) or 0))
148+ ngx.say( string.format( 'nginx_http_requests_by_status{status="5xx"} %d' ,
149+ stats:get( "requests_5xx" ) or 0))
150+
151+ -- Optional: Add a "scrape success" metric like nginx_up
152+ ngx.say( "# HELP nginx_up Status of the last metric scrape" )
153+ ngx.say( "# TYPE nginx_up gauge" )
154+ ngx.say( "nginx_up 1" )
155+ }
156+ }
77157
78158 # Cache metrics endpoints
79159 # Cache metrics endpoint - proxy to go-proxy-cache service
0 commit comments