-3

I'm testing my Nginx web server (configured with CentminMod) with https://www.blitz.io/ and the server returns timeouts when the connected users are about 40.

I can't understand why! It's Nginx, not Apache! Can anyone help me?

This i my nginx.conf:

user nginx nginx;
worker_processes 1;
worker_priority -10;

worker_rlimit_nofile 260000;
timer_resolution 100ms;

error_log         logs/error.log;

pid               logs/nginx.pid;
pcre_jit on;

events {
    worker_connections  2048;
    accept_mutex on;
    accept_mutex_delay 100ms;
    use epoll;
    #multi_accept on;
}

http {
    include /usr/local/nginx/conf/pagespeedadmin.conf;
    include /usr/local/nginx/conf/fastcgi_param_https_map.conf;

    log_format      main    '$remote_addr - $remote_user [$time_local] $request '
    '"$status" $body_bytes_sent "$http_referer" '
    '"$http_user_agent" "$http_x_forwarded_for" "$gzip_ratio"'
    ' "$connection" "$connection_requests" "$request_time"';

    access_log logs/access.log combined buffer=32k;

    index  index.php index.html index.htm;
    include       mime.types;
    default_type  application/octet-stream;
    charset utf-8;

    sendfile on;
    #sendfile_max_chunk 1m;
    tcp_nopush  on;
    tcp_nodelay off;
    server_tokens off;
    server_name_in_redirect off;

    keepalive_timeout  10;
    keepalive_requests 100;
    lingering_time 20s;
    lingering_timeout 5s;
    keepalive_disable msie6;

    gzip on;
    gzip_vary   on;
    gzip_disable "MSIE [1-6]\.";
    gzip_static on;
    gzip_min_length   1400;
    gzip_buffers      32 8k;
    gzip_http_version 1.0;
    gzip_comp_level 5;
    gzip_proxied    any;
    gzip_types text/plain text/css text/xml application/javascript application/x-javascript application/xml application/xml+rss application/ecmascript application/json image/svg+xml;

    client_body_buffer_size 256k;
    client_body_in_file_only off;
    client_body_timeout 60s;
    client_header_buffer_size 64k;
    ## how long a connection has to complete sending 
    ## it's headers for request to be processed
    client_header_timeout  20s;
    client_max_body_size 10m; 
    connection_pool_size  512;
    directio  4m;
    ignore_invalid_headers on;       
    large_client_header_buffers 8 64k;
    output_buffers   8 256k;
    postpone_output  1460;
    proxy_temp_path  /tmp/nginx_proxy/;
    request_pool_size  32k;
    reset_timedout_connection on;
    send_timeout     60s;
    types_hash_max_size 2048;
    server_names_hash_bucket_size 64;

    # for nginx proxy backends to prevent redirects to backend port 
    # port_in_redirect off;

    open_file_cache max=10000 inactive=30s;
    open_file_cache_valid 120s;
    open_file_cache_min_uses 2;
    open_file_cache_errors off;
    open_log_file_cache max=4096 inactive=30s min_uses=2;

    ## limit number of concurrency connections per ip to 16
    ## add to your server {} section the next line
    ## limit_conn limit_per_ip 16;
    ## uncomment below line allows 500K sessions
    # limit_conn_log_level error;
    #######################################
    # use limit_zone for Nginx <v1.1.7 and lower
    # limit_zone $binary_remote_addr zone=limit_per_ip:16m;
    #######################################
    # use limit_conn_zone for Nginx >v1.1.8 and higher
    # limit_conn_zone $binary_remote_addr zone=limit_per_ip:16m;
    #######################################

    include /usr/local/nginx/conf/conf.d/*.conf;
}

And this is my server block:

server {
    listen 8080;
    server_name beta.architetturaecosostenibile.it www.beta.architetturaecosostenibile.it;

    #pagespeed off;

    # ngx_pagespeed & ngx_pagespeed handler
    #include /usr/local/nginx/conf/pagespeed.conf;
    #include /usr/local/nginx/conf/pagespeedhandler.conf;
    #include /usr/local/nginx/conf/pagespeedstatslog.conf;

    # limit_conn limit_per_ip 16;
    # ssi  on;

    access_log /home/nginx/domains/beta.architetturaecosostenibile.it/log/access.log combined buffer=32k;
    error_log /home/nginx/domains/beta.architetturaecosostenibile.it/log/error.log debug;
    rewrite_log on;

    root /home/nginx/domains/beta.architetturaecosostenibile.it/public;

    # PageSpeed enhancements
    pagespeed EnableFilters remove_comments,collapse_whitespace;

    include /usr/local/nginx/conf/staticfiles.conf;
    include /usr/local/nginx/conf/php.conf;
    include /usr/local/nginx/conf/drop.conf;
    #include /usr/local/nginx/conf/errorpage.conf;
    }
fede91it
  • 1
  • 3

1 Answers1

2

nginx is merely a messenger vehicle and is unable to help your system to scale if the backend application itself is heavy. 40 concurrent users would be peanuts to any web server, after all.

Are you sure that the limit is not in your database / application level? I'm 100% sure the limit is there. Go find it.

Janne Pikkarainen
  • 31,454
  • 4
  • 56
  • 78
  • I can share the php info report? Mysql timeout is set on 60 seconds, PHP execution time set to 60... And I configured Varnish in front on Nginx, but timeouts reimains. Only 4 requests per second – fede91it Oct 01 '14 at 07:00
  • Just as an example: if processing the request takes two seconds and the requests come in at 100 requests per second, you will hit some limit, somewhere very soon. Varnish might or might not help you, depending on how dynamic your site is. phpinfo() itself does not help us very much. Does your site run some heavy stuff such as WordPress, Drupal, Magento...? Do you use PHP frameworks such as CakePHP? – Janne Pikkarainen Oct 01 '14 at 07:05
  • I run Joomla with Memcached. The homepage opening requires 400ms – fede91it Oct 01 '14 at 07:14