Добавлление gzip
Теперь ответы сжимаются , что увеличивает скорость работы.
This commit is contained in:
109
Backend/WebServer/compression.go
Normal file
109
Backend/WebServer/compression.go
Normal file
@@ -0,0 +1,109 @@
|
||||
package webserver
|
||||
|
||||
import (
|
||||
"compress/gzip"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
var compressibleTypes = map[string]bool{
|
||||
"text/html": true,
|
||||
"text/css": true,
|
||||
"text/javascript": true,
|
||||
"text/plain": true,
|
||||
"text/xml": true,
|
||||
"text/csv": true,
|
||||
"application/javascript": true,
|
||||
"application/json": true,
|
||||
"application/xml": true,
|
||||
"application/wasm": true,
|
||||
"application/xhtml+xml": true,
|
||||
"application/rss+xml": true,
|
||||
"application/atom+xml": true,
|
||||
"application/manifest+json": true,
|
||||
"image/svg+xml": true,
|
||||
}
|
||||
|
||||
var gzipWriterPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
w, _ := gzip.NewWriterLevel(io.Discard, gzip.DefaultCompression)
|
||||
return w
|
||||
},
|
||||
}
|
||||
|
||||
type gzipResponseWriter struct {
|
||||
http.ResponseWriter
|
||||
gzWriter *gzip.Writer
|
||||
headerWritten bool
|
||||
compressed bool
|
||||
}
|
||||
|
||||
func (g *gzipResponseWriter) Write(data []byte) (int, error) {
|
||||
if !g.headerWritten {
|
||||
g.detectAndSetHeaders()
|
||||
}
|
||||
if g.compressed {
|
||||
return g.gzWriter.Write(data)
|
||||
}
|
||||
return g.ResponseWriter.Write(data)
|
||||
}
|
||||
|
||||
func (g *gzipResponseWriter) WriteHeader(statusCode int) {
|
||||
if !g.headerWritten {
|
||||
g.detectAndSetHeaders()
|
||||
}
|
||||
g.ResponseWriter.WriteHeader(statusCode)
|
||||
}
|
||||
|
||||
func (g *gzipResponseWriter) detectAndSetHeaders() {
|
||||
g.headerWritten = true
|
||||
contentType := g.ResponseWriter.Header().Get("Content-Type")
|
||||
if contentType == "" {
|
||||
return
|
||||
}
|
||||
|
||||
mimeType := strings.SplitN(contentType, ";", 2)[0]
|
||||
mimeType = strings.TrimSpace(mimeType)
|
||||
|
||||
if compressibleTypes[mimeType] {
|
||||
g.ResponseWriter.Header().Set("Content-Encoding", "gzip")
|
||||
g.ResponseWriter.Header().Add("Vary", "Accept-Encoding")
|
||||
g.ResponseWriter.Header().Del("Content-Length")
|
||||
g.compressed = true
|
||||
}
|
||||
}
|
||||
|
||||
func (g *gzipResponseWriter) Flush() {
|
||||
if g.compressed {
|
||||
g.gzWriter.Flush()
|
||||
}
|
||||
if flusher, ok := g.ResponseWriter.(http.Flusher); ok {
|
||||
flusher.Flush()
|
||||
}
|
||||
}
|
||||
|
||||
func (g *gzipResponseWriter) close() {
|
||||
if g.compressed {
|
||||
g.gzWriter.Close()
|
||||
gzipWriterPool.Put(g.gzWriter)
|
||||
}
|
||||
}
|
||||
|
||||
func clientAcceptsGzip(r *http.Request) bool {
|
||||
return strings.Contains(r.Header.Get("Accept-Encoding"), "gzip")
|
||||
}
|
||||
|
||||
func isAlreadyCompressed(header http.Header) bool {
|
||||
return header.Get("Content-Encoding") != ""
|
||||
}
|
||||
|
||||
func newGzipResponseWriter(w http.ResponseWriter) *gzipResponseWriter {
|
||||
gz := gzipWriterPool.Get().(*gzip.Writer)
|
||||
gz.Reset(w)
|
||||
return &gzipResponseWriter{
|
||||
ResponseWriter: w,
|
||||
gzWriter: gz,
|
||||
}
|
||||
}
|
||||
@@ -170,10 +170,19 @@ func isRootFileRoutingEnabled(host string) bool {
|
||||
return site.Root_file_routing
|
||||
}
|
||||
}
|
||||
// По умолчанию роутинг выключен
|
||||
return false
|
||||
}
|
||||
|
||||
// Проверяет включено ли сжатие для сайта
|
||||
func isSiteCompressionEnabled(host string) bool {
|
||||
for _, site := range config.ConfigData.Site_www {
|
||||
if site.Host == host {
|
||||
return site.IsCompressionEnabled()
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Проверка vAccess с обработкой ошибки
|
||||
// Возвращает true если доступ разрешён, false если заблокирован
|
||||
func checkVAccessAndHandle(w http.ResponseWriter, r *http.Request, filePath string, host string) bool {
|
||||
@@ -246,6 +255,13 @@ func handler(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
}
|
||||
|
||||
// Сжатие ответа (gzip)
|
||||
if isSiteCompressionEnabled(host) && clientAcceptsGzip(r) {
|
||||
gzw := newGzipResponseWriter(w)
|
||||
defer gzw.close()
|
||||
w = gzw
|
||||
}
|
||||
|
||||
// Проверяем существование директории сайта
|
||||
if _, err := os.Stat("WebServer/www/" + host + "/public_www"); err != nil {
|
||||
http.ServeFile(w, r, "WebServer/tools/error_page/index.html")
|
||||
|
||||
@@ -306,26 +306,30 @@ func StartHandlerProxy(w http.ResponseWriter, r *http.Request) (valid bool) {
|
||||
}
|
||||
}
|
||||
|
||||
// Сжатие ответа (gzip) — только если бэкенд не сжал сам
|
||||
var gzw *gzipResponseWriter
|
||||
if proxyConfig.IsCompressionEnabled() && clientAcceptsGzip(r) && !isAlreadyCompressed(resp.Header) {
|
||||
gzw = newGzipResponseWriter(w)
|
||||
defer gzw.close()
|
||||
w = gzw
|
||||
}
|
||||
|
||||
// Устанавливаем статус код
|
||||
w.WriteHeader(resp.StatusCode)
|
||||
|
||||
// Копируем тело ответа с поддержкой streaming (SSE, chunked responses)
|
||||
// Используем буферизированное копирование с принудительной отправкой данных
|
||||
flusher, canFlush := w.(http.Flusher)
|
||||
|
||||
// Буфер для чанков (32KB - оптимальный размер для баланса производительности)
|
||||
buffer := make([]byte, 32*1024)
|
||||
|
||||
for {
|
||||
n, err := resp.Body.Read(buffer)
|
||||
if n > 0 {
|
||||
// Записываем прочитанные данные
|
||||
if _, writeErr := w.Write(buffer[:n]); writeErr != nil {
|
||||
log.Printf("Ошибка записи тела ответа: %v", writeErr)
|
||||
break
|
||||
}
|
||||
|
||||
// Принудительно отправляем данные клиенту (критично для SSE)
|
||||
if canFlush {
|
||||
flusher.Flush()
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ type ProxyInfo struct {
|
||||
ServiceHTTPSuse bool `json:"ServiceHTTPSuse"`
|
||||
AutoHTTPS bool `json:"AutoHTTPS"`
|
||||
AutoCreateSSL bool `json:"AutoCreateSSL"`
|
||||
Compression *bool `json:"Compression"`
|
||||
Status string `json:"Status"`
|
||||
}
|
||||
|
||||
|
||||
@@ -156,6 +156,7 @@ func AddSiteToConfig(siteData SiteInfo) error {
|
||||
Status: siteData.Status,
|
||||
Root_file: siteData.RootFile,
|
||||
Root_file_routing: siteData.RootFileRouting,
|
||||
Compression: siteData.Compression,
|
||||
}
|
||||
|
||||
// Добавляем в массив
|
||||
|
||||
@@ -16,6 +16,7 @@ func GetSitesList() []SiteInfo {
|
||||
RootFile: site.Root_file,
|
||||
RootFileRouting: site.Root_file_routing,
|
||||
AutoCreateSSL: site.AutoCreateSSL,
|
||||
Compression: site.Compression,
|
||||
}
|
||||
sites = append(sites, siteInfo)
|
||||
}
|
||||
|
||||
@@ -8,5 +8,6 @@ type SiteInfo struct {
|
||||
RootFile string `json:"root_file"`
|
||||
RootFileRouting bool `json:"root_file_routing"`
|
||||
AutoCreateSSL bool `json:"auto_create_ssl"`
|
||||
Compression *bool `json:"Compression"`
|
||||
}
|
||||
|
||||
|
||||
@@ -22,6 +22,14 @@ type Site_www struct {
|
||||
Root_file string `json:"root_file"`
|
||||
Root_file_routing bool `json:"root_file_routing"`
|
||||
AutoCreateSSL bool `json:"AutoCreateSSL"`
|
||||
Compression *bool `json:"Compression"`
|
||||
}
|
||||
|
||||
func (s Site_www) IsCompressionEnabled() bool {
|
||||
if s.Compression == nil {
|
||||
return true
|
||||
}
|
||||
return *s.Compression
|
||||
}
|
||||
|
||||
type Soft_Settings struct {
|
||||
@@ -42,6 +50,14 @@ type Proxy_Service struct {
|
||||
ServiceHTTPSuse bool `json:"ServiceHTTPSuse"`
|
||||
AutoHTTPS bool `json:"AutoHTTPS"`
|
||||
AutoCreateSSL bool `json:"AutoCreateSSL"`
|
||||
Compression *bool `json:"Compression"`
|
||||
}
|
||||
|
||||
func (p Proxy_Service) IsCompressionEnabled() bool {
|
||||
if p.Compression == nil {
|
||||
return true
|
||||
}
|
||||
return *p.Compression
|
||||
}
|
||||
|
||||
func LoadConfig() {
|
||||
@@ -88,6 +104,20 @@ func migrateConfig(originalData []byte) {
|
||||
}
|
||||
}
|
||||
|
||||
// Проверяем Site_www на наличие Compression
|
||||
if rawSites, ok := rawConfig["Site_www"]; ok {
|
||||
var sites2 []map[string]interface{}
|
||||
if err := json.Unmarshal(rawSites, &sites2); err == nil {
|
||||
for i, site := range sites2 {
|
||||
if _, exists := site["Compression"]; !exists {
|
||||
needsSave = true
|
||||
compressionTrue := true
|
||||
ConfigData.Site_www[i].Compression = &compressionTrue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Проверяем Proxy_Service
|
||||
if rawProxies, ok := rawConfig["Proxy_Service"]; ok {
|
||||
var proxies []map[string]interface{}
|
||||
@@ -101,6 +131,20 @@ func migrateConfig(originalData []byte) {
|
||||
}
|
||||
}
|
||||
|
||||
// Проверяем Proxy_Service на наличие Compression
|
||||
if rawProxies, ok := rawConfig["Proxy_Service"]; ok {
|
||||
var proxies2 []map[string]interface{}
|
||||
if err := json.Unmarshal(rawProxies, &proxies2); err == nil {
|
||||
for i, proxy := range proxies2 {
|
||||
if _, exists := proxy["Compression"]; !exists {
|
||||
needsSave = true
|
||||
compressionTrue := true
|
||||
ConfigData.Proxy_Service[i].Compression = &compressionTrue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Проверяем Soft_Settings на наличие ACME_enabled
|
||||
if rawSettings, ok := rawConfig["Soft_Settings"]; ok {
|
||||
var settings map[string]interface{}
|
||||
|
||||
Reference in New Issue
Block a user