upstream ServiceInstance{ #nginx默认轮询下面的服务实例
server ***.**.***.***:9007;
server ***.**.***.***:9008;
server ***.**.***.***:9009;
}
server {
listen 80;
server_name localhost;
#charset koi8-r;
#access_log /var/log/nginx/host.access.log main;
location / {
#root /usr/share/nginx/html;
#index index.html index.htm; #请求到达后会进行转发
proxy_pass http://ServiceInstance;
}
#error_page 404 /404.html;
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /usr/share/nginx/html;
}
# proxy the PHP scripts to Apache listening on 127.0.0.1:80
#
#location ~ .php$ {
# proxy_pass http://127.0.0.1;
#}
# pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000
#
#location ~ .php$ {
# root html;
# fastcgi_pass 127.0.0.1:9000;
# fastcgi_index index.php;
# fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name;
# include fastcgi_params;
#}
# deny access to .htaccess files, if Apache"s document root
# concurs with nginx"s one
#
#location ~ /.ht {
# deny all;
#}
}
每一次都会轮询不同的服务实例,负载均衡的预期就实现了!
我们也可以设置权重比例,weight值越大,请求到达此实例的次数就越多!
upstream ServiceInstance{
#nginx默认轮询下面的服务实例
server ***.**.***.***:9007 weight=1;
server ***.**.***.***:9008 weight=2;
server ***.**.***.***:9009 weight=3;
}
各位同学也可慢慢研究,nginx很强大的!😎
总结
到此这篇关于.Net Core + Nginx实现项目负载均衡的文章就介绍到这了,更多相关.Net Core+Nginx项目负载均衡内容请搜索云海天教程以前的文章或继续浏览下面的相关文章希望大家以后多多支持云海天教程!