Compare commits
8 Commits
Author | SHA1 | Date | |
---|---|---|---|
5e7599756f | |||
5db50c1ca2 | |||
884507b45a | |||
2574d0504e | |||
9535abe314 | |||
8e6a60f684 | |||
ead26ea16d | |||
3d66c01d7b |
26
CHANGELOG.md
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# v2.6.2 Jun 4 2023
|
||||||
|
|
||||||
|
+ Added advance stats operation tab
|
||||||
|
+ Added statistic reset #13
|
||||||
|
+ Added statistic export to csv and json (please use json)
|
||||||
|
+ Make subdomain clickable (not vdir) #12
|
||||||
|
+ Added TCP Proxy
|
||||||
|
+ Updates SMTP setup UI to make it more straight forward to setup
|
||||||
|
|
||||||
|
# v2.6.1 May 31 2023
|
||||||
|
|
||||||
|
+ Added reverse proxy TLS skip verification
|
||||||
|
+ Added basic auth
|
||||||
|
+ Edit proxy settings
|
||||||
|
+ Whitelist
|
||||||
|
+ TCP Proxy (experimental)
|
||||||
|
+ Info (Utilities page)
|
||||||
|
|
||||||
|
# v2.6 May 27 2023
|
||||||
|
|
||||||
|
+ Basic auth
|
||||||
|
+ Support TLS verification skip (for self signed certs)
|
||||||
|
+ Added trend analysis
|
||||||
|
+ Added referer and file type analysis
|
||||||
|
+ Added cert expire day display
|
||||||
|
+ Moved subdomain proxy logic to dpcore
|
24
README.md
@ -120,27 +120,13 @@ To start the module, go to System Settings > Modules > Subservice and enable it
|
|||||||
|
|
||||||

|

|
||||||
|
|
||||||

|
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||

|
More screenshots on the wikipage [Screenshots](https://github.com/tobychui/zoraxy/wiki/Screenshots)!
|
||||||
|
|
||||||

|
## FAQ
|
||||||
|
|
||||||

|
There is a wikipage with [Frequently-Asked-Questions](https://github.com/tobychui/zoraxy/wiki/FAQ---Frequently-Asked-Questions)!
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
## Global Area Network Controller
|
## Global Area Network Controller
|
||||||
|
|
||||||
@ -175,10 +161,6 @@ Loopback web ssh connection, by default, is disabled. This means that if you are
|
|||||||
./zoraxy -sshlb=true
|
./zoraxy -sshlb=true
|
||||||
```
|
```
|
||||||
|
|
||||||
## FAQ
|
|
||||||
- [How to run Zoraxy as system daemon?](https://github.com/tobychui/zoraxy/issues/8#issuecomment-1561539919)
|
|
||||||
-
|
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
This project is open source under AGPL. I open source this project so everyone can check for security issues and benefit all users. **If your plans to use this project in commercial environment which violate the AGPL terms, please contact toby@imuslab.com for an alternative commercial license.**
|
This project is open source under AGPL. I open source this project so everyone can check for security issues and benefit all users. **If your plans to use this project in commercial environment which violate the AGPL terms, please contact toby@imuslab.com for an alternative commercial license.**
|
||||||
|
Before Width: | Height: | Size: 66 KiB After Width: | Height: | Size: 90 KiB |
Before Width: | Height: | Size: 71 KiB After Width: | Height: | Size: 100 KiB |
BIN
img/screenshots/6.png
Normal file
After Width: | Height: | Size: 77 KiB |
@ -121,6 +121,8 @@ func initAPIs() {
|
|||||||
authRouter.HandleFunc("/api/analytic/list", AnalyticLoader.HandleSummaryList)
|
authRouter.HandleFunc("/api/analytic/list", AnalyticLoader.HandleSummaryList)
|
||||||
authRouter.HandleFunc("/api/analytic/load", AnalyticLoader.HandleLoadTargetDaySummary)
|
authRouter.HandleFunc("/api/analytic/load", AnalyticLoader.HandleLoadTargetDaySummary)
|
||||||
authRouter.HandleFunc("/api/analytic/loadRange", AnalyticLoader.HandleLoadTargetRangeSummary)
|
authRouter.HandleFunc("/api/analytic/loadRange", AnalyticLoader.HandleLoadTargetRangeSummary)
|
||||||
|
authRouter.HandleFunc("/api/analytic/exportRange", AnalyticLoader.HandleRangeExport)
|
||||||
|
authRouter.HandleFunc("/api/analytic/resetRange", AnalyticLoader.HandleRangeReset)
|
||||||
|
|
||||||
//Network utilities
|
//Network utilities
|
||||||
authRouter.HandleFunc("/api/tools/ipscan", HandleIpScan)
|
authRouter.HandleFunc("/api/tools/ipscan", HandleIpScan)
|
||||||
|
@ -9,8 +9,8 @@ require (
|
|||||||
github.com/gorilla/sessions v1.2.1
|
github.com/gorilla/sessions v1.2.1
|
||||||
github.com/gorilla/websocket v1.4.2
|
github.com/gorilla/websocket v1.4.2
|
||||||
github.com/grandcat/zeroconf v1.0.0
|
github.com/grandcat/zeroconf v1.0.0
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.24
|
||||||
github.com/oschwald/geoip2-golang v1.8.0
|
github.com/oschwald/geoip2-golang v1.8.0
|
||||||
github.com/satori/go.uuid v1.2.0
|
github.com/satori/go.uuid v1.2.0
|
||||||
golang.org/x/net v0.9.0 // indirect
|
golang.org/x/sys v0.8.0
|
||||||
golang.org/x/sys v0.7.0
|
|
||||||
)
|
)
|
||||||
|
16
src/go.sum
@ -1,3 +1,5 @@
|
|||||||
|
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
|
||||||
|
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
|
||||||
github.com/boltdb/bolt v1.3.1 h1:JQmyP4ZBrce+ZQu0dY660FMfatumYDLun9hBCUVIkF4=
|
github.com/boltdb/bolt v1.3.1 h1:JQmyP4ZBrce+ZQu0dY660FMfatumYDLun9hBCUVIkF4=
|
||||||
github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps=
|
github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps=
|
||||||
github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4=
|
github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4=
|
||||||
@ -10,6 +12,8 @@ github.com/go-ping/ping v1.1.0/go.mod h1:xIFjORFzTxqIV/tDVGO4eDy/bLuSyawEeojSm3G
|
|||||||
github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
|
github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY=
|
||||||
|
github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c=
|
||||||
github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=
|
github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=
|
||||||
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
|
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
|
||||||
github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI=
|
github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI=
|
||||||
@ -18,6 +22,8 @@ github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0U
|
|||||||
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
github.com/grandcat/zeroconf v1.0.0 h1:uHhahLBKqwWBV6WZUDAT71044vwOTL+McW0mBJvo6kE=
|
github.com/grandcat/zeroconf v1.0.0 h1:uHhahLBKqwWBV6WZUDAT71044vwOTL+McW0mBJvo6kE=
|
||||||
github.com/grandcat/zeroconf v1.0.0/go.mod h1:lTKmG1zh86XyCoUeIHSA4FJMBwCJiQmGfcP2PdzytEs=
|
github.com/grandcat/zeroconf v1.0.0/go.mod h1:lTKmG1zh86XyCoUeIHSA4FJMBwCJiQmGfcP2PdzytEs=
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.24 h1:NGQoPtwGVcbGkKfvyYk1yRqknzBuoMiUrO6R7uFTPlw=
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.24/go.mod h1:ArQySAMps0790cHSkdPEJ7bGkF2VePWH773hsJNSHf8=
|
||||||
github.com/miekg/dns v1.1.27 h1:aEH/kqUzUxGJ/UHcEKdJY+ugH6WEzsEBBSPa8zuy1aM=
|
github.com/miekg/dns v1.1.27 h1:aEH/kqUzUxGJ/UHcEKdJY+ugH6WEzsEBBSPa8zuy1aM=
|
||||||
github.com/miekg/dns v1.1.27/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
|
github.com/miekg/dns v1.1.27/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
|
||||||
github.com/oschwald/geoip2-golang v1.8.0 h1:KfjYB8ojCEn/QLqsDU0AzrJ3R5Qa9vFlx3z6SLNcKTs=
|
github.com/oschwald/geoip2-golang v1.8.0 h1:KfjYB8ojCEn/QLqsDU0AzrJ3R5Qa9vFlx3z6SLNcKTs=
|
||||||
@ -52,8 +58,8 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
|
|||||||
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
|
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
|
||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/net v0.9.0 h1:aWJ/m6xSmxWBx+V0XRHTlrYrPG56jKsLdTFmsSsCzOM=
|
golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=
|
||||||
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
@ -69,12 +75,12 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
|
|||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220804214406-8e32c043e418/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220804214406-8e32c043e418/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.7.0 h1:3jlCCIQZPdOYu1h8BkNvLz8Kgwtae2cagcG/VamtZRU=
|
golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=
|
||||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
|
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
|
@ -38,9 +38,9 @@ var ztAuthToken = flag.String("ztauth", "", "ZeroTier authtoken for the local no
|
|||||||
var ztAPIPort = flag.Int("ztport", 9993, "ZeroTier controller API port")
|
var ztAPIPort = flag.Int("ztport", 9993, "ZeroTier controller API port")
|
||||||
var (
|
var (
|
||||||
name = "Zoraxy"
|
name = "Zoraxy"
|
||||||
version = "2.6.1"
|
version = "2.6.3"
|
||||||
nodeUUID = "generic"
|
nodeUUID = "generic"
|
||||||
development = true //Set this to false to use embedded web fs
|
development = false //Set this to false to use embedded web fs
|
||||||
bootTime = time.Now().Unix()
|
bootTime = time.Now().Unix()
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -278,6 +278,12 @@ func addXForwardedForHeader(req *http.Request) {
|
|||||||
clientIP = strings.Join(prior, ", ") + ", " + clientIP
|
clientIP = strings.Join(prior, ", ") + ", " + clientIP
|
||||||
}
|
}
|
||||||
req.Header.Set("X-Forwarded-For", clientIP)
|
req.Header.Set("X-Forwarded-For", clientIP)
|
||||||
|
if req.TLS != nil {
|
||||||
|
req.Header.Set("X-Forwarded-Proto", "https")
|
||||||
|
} else {
|
||||||
|
req.Header.Set("X-Forwarded-Proto", "http")
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,6 +57,7 @@ func (router *Router) rewriteURL(rooturl string, requestURL string) string {
|
|||||||
// Handle subdomain request
|
// Handle subdomain request
|
||||||
func (h *ProxyHandler) subdomainRequest(w http.ResponseWriter, r *http.Request, target *ProxyEndpoint) {
|
func (h *ProxyHandler) subdomainRequest(w http.ResponseWriter, r *http.Request, target *ProxyEndpoint) {
|
||||||
r.Header.Set("X-Forwarded-Host", r.Host)
|
r.Header.Set("X-Forwarded-Host", r.Host)
|
||||||
|
r.Header.Set("X-Forwarded-Server", "zoraxy-"+h.Parent.Option.HostUUID)
|
||||||
requestURL := r.URL.String()
|
requestURL := r.URL.String()
|
||||||
if r.Header["Upgrade"] != nil && strings.ToLower(r.Header["Upgrade"][0]) == "websocket" {
|
if r.Header["Upgrade"] != nil && strings.ToLower(r.Header["Upgrade"][0]) == "websocket" {
|
||||||
//Handle WebSocket request. Forward the custom Upgrade header and rewrite origin
|
//Handle WebSocket request. Forward the custom Upgrade header and rewrite origin
|
||||||
@ -116,6 +117,7 @@ func (h *ProxyHandler) proxyRequest(w http.ResponseWriter, r *http.Request, targ
|
|||||||
r.URL, _ = url.Parse(rewriteURL)
|
r.URL, _ = url.Parse(rewriteURL)
|
||||||
|
|
||||||
r.Header.Set("X-Forwarded-Host", r.Host)
|
r.Header.Set("X-Forwarded-Host", r.Host)
|
||||||
|
r.Header.Set("X-Forwarded-Server", "zoraxy-"+h.Parent.Option.HostUUID)
|
||||||
if r.Header["Upgrade"] != nil && strings.ToLower(r.Header["Upgrade"][0]) == "websocket" {
|
if r.Header["Upgrade"] != nil && strings.ToLower(r.Header["Upgrade"][0]) == "websocket" {
|
||||||
//Handle WebSocket request. Forward the custom Upgrade header and rewrite origin
|
//Handle WebSocket request. Forward the custom Upgrade header and rewrite origin
|
||||||
r.Header.Set("A-Upgrade", "websocket")
|
r.Header.Set("A-Upgrade", "websocket")
|
||||||
|
@ -22,6 +22,7 @@ type ProxyHandler struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type RouterOption struct {
|
type RouterOption struct {
|
||||||
|
HostUUID string
|
||||||
Port int
|
Port int
|
||||||
UseTls bool
|
UseTls bool
|
||||||
ForceHttpsRedirect bool
|
ForceHttpsRedirect bool
|
||||||
|
91
src/mod/geodb/blacklist.go
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
package geodb
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
/*
|
||||||
|
Blacklist.go
|
||||||
|
|
||||||
|
This script store the blacklist related functions
|
||||||
|
*/
|
||||||
|
|
||||||
|
//Geo Blacklist
|
||||||
|
|
||||||
|
func (s *Store) AddCountryCodeToBlackList(countryCode string) {
|
||||||
|
countryCode = strings.ToLower(countryCode)
|
||||||
|
s.sysdb.Write("blacklist-cn", countryCode, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) RemoveCountryCodeFromBlackList(countryCode string) {
|
||||||
|
countryCode = strings.ToLower(countryCode)
|
||||||
|
s.sysdb.Delete("blacklist-cn", countryCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) IsCountryCodeBlacklisted(countryCode string) bool {
|
||||||
|
countryCode = strings.ToLower(countryCode)
|
||||||
|
var isBlacklisted bool = false
|
||||||
|
s.sysdb.Read("blacklist-cn", countryCode, &isBlacklisted)
|
||||||
|
return isBlacklisted
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) GetAllBlacklistedCountryCode() []string {
|
||||||
|
bannedCountryCodes := []string{}
|
||||||
|
entries, err := s.sysdb.ListTable("blacklist-cn")
|
||||||
|
if err != nil {
|
||||||
|
return bannedCountryCodes
|
||||||
|
}
|
||||||
|
for _, keypairs := range entries {
|
||||||
|
ip := string(keypairs[0])
|
||||||
|
bannedCountryCodes = append(bannedCountryCodes, ip)
|
||||||
|
}
|
||||||
|
|
||||||
|
return bannedCountryCodes
|
||||||
|
}
|
||||||
|
|
||||||
|
//IP Blacklsits
|
||||||
|
|
||||||
|
func (s *Store) AddIPToBlackList(ipAddr string) {
|
||||||
|
s.sysdb.Write("blacklist-ip", ipAddr, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) RemoveIPFromBlackList(ipAddr string) {
|
||||||
|
s.sysdb.Delete("blacklist-ip", ipAddr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) GetAllBlacklistedIp() []string {
|
||||||
|
bannedIps := []string{}
|
||||||
|
entries, err := s.sysdb.ListTable("blacklist-ip")
|
||||||
|
if err != nil {
|
||||||
|
return bannedIps
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, keypairs := range entries {
|
||||||
|
ip := string(keypairs[0])
|
||||||
|
bannedIps = append(bannedIps, ip)
|
||||||
|
}
|
||||||
|
|
||||||
|
return bannedIps
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) IsIPBlacklisted(ipAddr string) bool {
|
||||||
|
var isBlacklisted bool = false
|
||||||
|
s.sysdb.Read("blacklist-ip", ipAddr, &isBlacklisted)
|
||||||
|
if isBlacklisted {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check for IP wildcard and CIRD rules
|
||||||
|
AllBlacklistedIps := s.GetAllBlacklistedIp()
|
||||||
|
for _, blacklistRule := range AllBlacklistedIps {
|
||||||
|
wildcardMatch := MatchIpWildcard(ipAddr, blacklistRule)
|
||||||
|
if wildcardMatch {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
cidrMatch := MatchIpCIDR(ipAddr, blacklistRule)
|
||||||
|
if cidrMatch {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
@ -3,8 +3,8 @@ package geodb
|
|||||||
import (
|
import (
|
||||||
_ "embed"
|
_ "embed"
|
||||||
"log"
|
"log"
|
||||||
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"imuslab.com/zoraxy/mod/database"
|
"imuslab.com/zoraxy/mod/database"
|
||||||
)
|
)
|
||||||
@ -112,170 +112,6 @@ func (s *Store) Close() {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
Country code based black / white list
|
|
||||||
*/
|
|
||||||
|
|
||||||
func (s *Store) AddCountryCodeToBlackList(countryCode string) {
|
|
||||||
countryCode = strings.ToLower(countryCode)
|
|
||||||
s.sysdb.Write("blacklist-cn", countryCode, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) RemoveCountryCodeFromBlackList(countryCode string) {
|
|
||||||
countryCode = strings.ToLower(countryCode)
|
|
||||||
s.sysdb.Delete("blacklist-cn", countryCode)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) AddCountryCodeToWhitelist(countryCode string) {
|
|
||||||
countryCode = strings.ToLower(countryCode)
|
|
||||||
s.sysdb.Write("whitelist-cn", countryCode, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) RemoveCountryCodeFromWhitelist(countryCode string) {
|
|
||||||
countryCode = strings.ToLower(countryCode)
|
|
||||||
s.sysdb.Delete("whitelist-cn", countryCode)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) IsCountryCodeBlacklisted(countryCode string) bool {
|
|
||||||
countryCode = strings.ToLower(countryCode)
|
|
||||||
var isBlacklisted bool = false
|
|
||||||
s.sysdb.Read("blacklist-cn", countryCode, &isBlacklisted)
|
|
||||||
return isBlacklisted
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) IsCountryCodeWhitelisted(countryCode string) bool {
|
|
||||||
countryCode = strings.ToLower(countryCode)
|
|
||||||
var isWhitelisted bool = false
|
|
||||||
s.sysdb.Read("whitelist-cn", countryCode, &isWhitelisted)
|
|
||||||
return isWhitelisted
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) GetAllBlacklistedCountryCode() []string {
|
|
||||||
bannedCountryCodes := []string{}
|
|
||||||
entries, err := s.sysdb.ListTable("blacklist-cn")
|
|
||||||
if err != nil {
|
|
||||||
return bannedCountryCodes
|
|
||||||
}
|
|
||||||
for _, keypairs := range entries {
|
|
||||||
ip := string(keypairs[0])
|
|
||||||
bannedCountryCodes = append(bannedCountryCodes, ip)
|
|
||||||
}
|
|
||||||
|
|
||||||
return bannedCountryCodes
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) GetAllWhitelistedCountryCode() []string {
|
|
||||||
whitelistedCountryCode := []string{}
|
|
||||||
entries, err := s.sysdb.ListTable("whitelist-cn")
|
|
||||||
if err != nil {
|
|
||||||
return whitelistedCountryCode
|
|
||||||
}
|
|
||||||
for _, keypairs := range entries {
|
|
||||||
ip := string(keypairs[0])
|
|
||||||
whitelistedCountryCode = append(whitelistedCountryCode, ip)
|
|
||||||
}
|
|
||||||
|
|
||||||
return whitelistedCountryCode
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
IP based black / whitelist
|
|
||||||
*/
|
|
||||||
|
|
||||||
func (s *Store) AddIPToBlackList(ipAddr string) {
|
|
||||||
s.sysdb.Write("blacklist-ip", ipAddr, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) RemoveIPFromBlackList(ipAddr string) {
|
|
||||||
s.sysdb.Delete("blacklist-ip", ipAddr)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) AddIPToWhiteList(ipAddr string) {
|
|
||||||
s.sysdb.Write("whitelist-ip", ipAddr, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) RemoveIPFromWhiteList(ipAddr string) {
|
|
||||||
s.sysdb.Delete("whitelist-ip", ipAddr)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) IsIPBlacklisted(ipAddr string) bool {
|
|
||||||
var isBlacklisted bool = false
|
|
||||||
s.sysdb.Read("blacklist-ip", ipAddr, &isBlacklisted)
|
|
||||||
if isBlacklisted {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
//Check for IP wildcard and CIRD rules
|
|
||||||
AllBlacklistedIps := s.GetAllBlacklistedIp()
|
|
||||||
for _, blacklistRule := range AllBlacklistedIps {
|
|
||||||
wildcardMatch := MatchIpWildcard(ipAddr, blacklistRule)
|
|
||||||
if wildcardMatch {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
cidrMatch := MatchIpCIDR(ipAddr, blacklistRule)
|
|
||||||
if cidrMatch {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) IsIPWhitelisted(ipAddr string) bool {
|
|
||||||
var isBlacklisted bool = false
|
|
||||||
s.sysdb.Read("whitelist-ip", ipAddr, &isBlacklisted)
|
|
||||||
if isBlacklisted {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
//Check for IP wildcard and CIRD rules
|
|
||||||
AllBlacklistedIps := s.GetAllBlacklistedIp()
|
|
||||||
for _, blacklistRule := range AllBlacklistedIps {
|
|
||||||
wildcardMatch := MatchIpWildcard(ipAddr, blacklistRule)
|
|
||||||
if wildcardMatch {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
cidrMatch := MatchIpCIDR(ipAddr, blacklistRule)
|
|
||||||
if cidrMatch {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) GetAllBlacklistedIp() []string {
|
|
||||||
bannedIps := []string{}
|
|
||||||
entries, err := s.sysdb.ListTable("blacklist-ip")
|
|
||||||
if err != nil {
|
|
||||||
return bannedIps
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, keypairs := range entries {
|
|
||||||
ip := string(keypairs[0])
|
|
||||||
bannedIps = append(bannedIps, ip)
|
|
||||||
}
|
|
||||||
|
|
||||||
return bannedIps
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) GetAllWhitelistedIp() []string {
|
|
||||||
whitelistedIp := []string{}
|
|
||||||
entries, err := s.sysdb.ListTable("whitelist-ip")
|
|
||||||
if err != nil {
|
|
||||||
return whitelistedIp
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, keypairs := range entries {
|
|
||||||
ip := string(keypairs[0])
|
|
||||||
whitelistedIp = append(whitelistedIp, ip)
|
|
||||||
}
|
|
||||||
|
|
||||||
return whitelistedIp
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Check if a IP address is blacklisted, in either country or IP blacklist
|
Check if a IP address is blacklisted, in either country or IP blacklist
|
||||||
IsBlacklisted default return is false (allow access)
|
IsBlacklisted default return is false (allow access)
|
||||||
@ -341,6 +177,23 @@ func (s *Store) IsWhitelisted(ipAddr string) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// A helper function that check both blacklist and whitelist for access
|
||||||
|
// for both geoIP and ip / CIDR ranges
|
||||||
|
func (s *Store) AllowIpAccess(ipaddr string) bool {
|
||||||
|
if s.IsBlacklisted(ipaddr) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return s.IsWhitelisted(ipaddr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) AllowConnectionAccess(conn net.Conn) bool {
|
||||||
|
if addr, ok := conn.RemoteAddr().(*net.TCPAddr); ok {
|
||||||
|
return s.AllowIpAccess(addr.IP.String())
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
func (s *Store) GetRequesterCountryISOCode(r *http.Request) string {
|
func (s *Store) GetRequesterCountryISOCode(r *http.Request) string {
|
||||||
ipAddr := GetRequesterIP(r)
|
ipAddr := GetRequesterIP(r)
|
||||||
if ipAddr == "" {
|
if ipAddr == "" {
|
||||||
|
91
src/mod/geodb/whitelist.go
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
package geodb
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
/*
|
||||||
|
Whitelist.go
|
||||||
|
|
||||||
|
This script handles whitelist related functions
|
||||||
|
*/
|
||||||
|
|
||||||
|
//Geo Whitelist
|
||||||
|
|
||||||
|
func (s *Store) AddCountryCodeToWhitelist(countryCode string) {
|
||||||
|
countryCode = strings.ToLower(countryCode)
|
||||||
|
s.sysdb.Write("whitelist-cn", countryCode, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) RemoveCountryCodeFromWhitelist(countryCode string) {
|
||||||
|
countryCode = strings.ToLower(countryCode)
|
||||||
|
s.sysdb.Delete("whitelist-cn", countryCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) IsCountryCodeWhitelisted(countryCode string) bool {
|
||||||
|
countryCode = strings.ToLower(countryCode)
|
||||||
|
var isWhitelisted bool = false
|
||||||
|
s.sysdb.Read("whitelist-cn", countryCode, &isWhitelisted)
|
||||||
|
return isWhitelisted
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) GetAllWhitelistedCountryCode() []string {
|
||||||
|
whitelistedCountryCode := []string{}
|
||||||
|
entries, err := s.sysdb.ListTable("whitelist-cn")
|
||||||
|
if err != nil {
|
||||||
|
return whitelistedCountryCode
|
||||||
|
}
|
||||||
|
for _, keypairs := range entries {
|
||||||
|
ip := string(keypairs[0])
|
||||||
|
whitelistedCountryCode = append(whitelistedCountryCode, ip)
|
||||||
|
}
|
||||||
|
|
||||||
|
return whitelistedCountryCode
|
||||||
|
}
|
||||||
|
|
||||||
|
//IP Whitelist
|
||||||
|
|
||||||
|
func (s *Store) AddIPToWhiteList(ipAddr string) {
|
||||||
|
s.sysdb.Write("whitelist-ip", ipAddr, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) RemoveIPFromWhiteList(ipAddr string) {
|
||||||
|
s.sysdb.Delete("whitelist-ip", ipAddr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) IsIPWhitelisted(ipAddr string) bool {
|
||||||
|
var isWhitelisted bool = false
|
||||||
|
s.sysdb.Read("whitelist-ip", ipAddr, &isWhitelisted)
|
||||||
|
if isWhitelisted {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check for IP wildcard and CIRD rules
|
||||||
|
AllWhitelistedIps := s.GetAllWhitelistedIp()
|
||||||
|
for _, whitelistRules := range AllWhitelistedIps {
|
||||||
|
wildcardMatch := MatchIpWildcard(ipAddr, whitelistRules)
|
||||||
|
if wildcardMatch {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
cidrMatch := MatchIpCIDR(ipAddr, whitelistRules)
|
||||||
|
if cidrMatch {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) GetAllWhitelistedIp() []string {
|
||||||
|
whitelistedIp := []string{}
|
||||||
|
entries, err := s.sysdb.ListTable("whitelist-ip")
|
||||||
|
if err != nil {
|
||||||
|
return whitelistedIp
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, keypairs := range entries {
|
||||||
|
ip := string(keypairs[0])
|
||||||
|
whitelistedIp = append(whitelistedIp, ip)
|
||||||
|
}
|
||||||
|
|
||||||
|
return whitelistedIp
|
||||||
|
}
|
16
src/mod/sshprox/embed.go
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
//go:build (windows && amd64) || (linux && mipsle) || (linux && riscv64)
|
||||||
|
// +build windows,amd64 linux,mipsle linux,riscv64
|
||||||
|
|
||||||
|
package sshprox
|
||||||
|
|
||||||
|
import "embed"
|
||||||
|
|
||||||
|
/*
|
||||||
|
Bianry embedding
|
||||||
|
|
||||||
|
Make sure when compile, gotty binary exists in static.gotty
|
||||||
|
*/
|
||||||
|
var (
|
||||||
|
//go:embed gotty/LICENSE
|
||||||
|
gotty embed.FS
|
||||||
|
)
|
18
src/mod/sshprox/embed_linux_386.go
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
//go:build linux && 386
|
||||||
|
// +build linux,386
|
||||||
|
|
||||||
|
package sshprox
|
||||||
|
|
||||||
|
import "embed"
|
||||||
|
|
||||||
|
/*
|
||||||
|
Bianry embedding for i386 builds
|
||||||
|
|
||||||
|
Make sure when compile, gotty binary exists in static.gotty
|
||||||
|
*/
|
||||||
|
var (
|
||||||
|
//go:embed gotty/gotty_linux_386
|
||||||
|
//go:embed gotty/.gotty
|
||||||
|
//go:embed gotty/LICENSE
|
||||||
|
gotty embed.FS
|
||||||
|
)
|
18
src/mod/sshprox/embed_linux_amd64.go
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
//go:build linux && amd64
|
||||||
|
// +build linux,amd64
|
||||||
|
|
||||||
|
package sshprox
|
||||||
|
|
||||||
|
import "embed"
|
||||||
|
|
||||||
|
/*
|
||||||
|
Bianry embedding for AMD64 builds
|
||||||
|
|
||||||
|
Make sure when compile, gotty binary exists in static.gotty
|
||||||
|
*/
|
||||||
|
var (
|
||||||
|
//go:embed gotty/gotty_linux_amd64
|
||||||
|
//go:embed gotty/.gotty
|
||||||
|
//go:embed gotty/LICENSE
|
||||||
|
gotty embed.FS
|
||||||
|
)
|
18
src/mod/sshprox/embed_linux_arm.go
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
//go:build linux && arm
|
||||||
|
// +build linux,arm
|
||||||
|
|
||||||
|
package sshprox
|
||||||
|
|
||||||
|
import "embed"
|
||||||
|
|
||||||
|
/*
|
||||||
|
Bianry embedding for ARM(v6/7) builds
|
||||||
|
|
||||||
|
Make sure when compile, gotty binary exists in static.gotty
|
||||||
|
*/
|
||||||
|
var (
|
||||||
|
//go:embed gotty/gotty_linux_arm
|
||||||
|
//go:embed gotty/.gotty
|
||||||
|
//go:embed gotty/LICENSE
|
||||||
|
gotty embed.FS
|
||||||
|
)
|
18
src/mod/sshprox/embed_linux_arm64.go
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
//go:build linux && arm64
|
||||||
|
// +build linux,arm64
|
||||||
|
|
||||||
|
package sshprox
|
||||||
|
|
||||||
|
import "embed"
|
||||||
|
|
||||||
|
/*
|
||||||
|
Bianry embedding for ARM64 builds
|
||||||
|
|
||||||
|
Make sure when compile, gotty binary exists in static.gotty
|
||||||
|
*/
|
||||||
|
var (
|
||||||
|
//go:embed gotty/gotty_linux_arm64
|
||||||
|
//go:embed gotty/.gotty
|
||||||
|
//go:embed gotty/LICENSE
|
||||||
|
gotty embed.FS
|
||||||
|
)
|
@ -1,7 +1,6 @@
|
|||||||
package sshprox
|
package sshprox
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"embed"
|
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
@ -28,16 +27,6 @@ import (
|
|||||||
online ssh terminal
|
online ssh terminal
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/*
|
|
||||||
Bianry embedding
|
|
||||||
|
|
||||||
Make sure when compile, gotty binary exists in static.gotty
|
|
||||||
*/
|
|
||||||
var (
|
|
||||||
//go:embed gotty/*
|
|
||||||
gotty embed.FS
|
|
||||||
)
|
|
||||||
|
|
||||||
type Manager struct {
|
type Manager struct {
|
||||||
StartingPort int
|
StartingPort int
|
||||||
Instances []*Instance
|
Instances []*Instance
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
package analytic
|
package analytic
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"errors"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"imuslab.com/zoraxy/mod/database"
|
"imuslab.com/zoraxy/mod/database"
|
||||||
"imuslab.com/zoraxy/mod/statistic"
|
"imuslab.com/zoraxy/mod/statistic"
|
||||||
@ -24,105 +23,49 @@ func NewDataLoader(db *database.Database, sc *statistic.Collector) *DataLoader {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *DataLoader) HandleSummaryList(w http.ResponseWriter, r *http.Request) {
|
// GetAllStatisticSummaryInRange return all the statisics within the time frame. The second array is the key (dates) of the statistic
|
||||||
entries, err := d.Database.ListTable("stats")
|
func (d *DataLoader) GetAllStatisticSummaryInRange(start, end string) ([]*statistic.DailySummaryExport, []string, error) {
|
||||||
|
dailySummaries := []*statistic.DailySummaryExport{}
|
||||||
|
collectedDates := []string{}
|
||||||
|
//Generate all the dates in between the range
|
||||||
|
keys, err := generateDateRange(start, end)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.SendErrorResponse(w, "unable to load data from database")
|
return dailySummaries, collectedDates, err
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
entryDates := []string{}
|
//Load all the data from database
|
||||||
for _, keypairs := range entries {
|
for _, key := range keys {
|
||||||
entryDates = append(entryDates, string(keypairs[0]))
|
thisStat := statistic.DailySummaryExport{}
|
||||||
}
|
err = d.Database.Read("stats", key, &thisStat)
|
||||||
|
if err == nil {
|
||||||
js, _ := json.MarshalIndent(entryDates, "", " ")
|
dailySummaries = append(dailySummaries, &thisStat)
|
||||||
utils.SendJSONResponse(w, string(js))
|
collectedDates = append(collectedDates, key)
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DataLoader) HandleLoadTargetDaySummary(w http.ResponseWriter, r *http.Request) {
|
|
||||||
day, err := utils.GetPara(r, "id")
|
|
||||||
if err != nil {
|
|
||||||
utils.SendErrorResponse(w, "id cannot be empty")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if strings.Contains(day, "-") {
|
|
||||||
//Must be underscore
|
|
||||||
day = strings.ReplaceAll(day, "-", "_")
|
|
||||||
}
|
|
||||||
|
|
||||||
if !statistic.IsBeforeToday(day) {
|
|
||||||
utils.SendErrorResponse(w, "given date is in the future")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var targetDailySummary statistic.DailySummaryExport
|
|
||||||
|
|
||||||
if day == time.Now().Format("2006_01_02") {
|
|
||||||
targetDailySummary = *d.StatisticCollector.GetExportSummary()
|
|
||||||
} else {
|
|
||||||
//Not today data
|
|
||||||
err = d.Database.Read("stats", day, &targetDailySummary)
|
|
||||||
if err != nil {
|
|
||||||
utils.SendErrorResponse(w, "target day data not found")
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
js, _ := json.Marshal(targetDailySummary)
|
return dailySummaries, collectedDates, nil
|
||||||
utils.SendJSONResponse(w, string(js))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *DataLoader) HandleLoadTargetRangeSummary(w http.ResponseWriter, r *http.Request) {
|
func (d *DataLoader) GetStartAndEndDatesFromRequest(r *http.Request) (string, string, error) {
|
||||||
//Get the start date from POST para
|
// Get the start date from POST para
|
||||||
start, err := utils.GetPara(r, "start")
|
start, err := utils.GetPara(r, "start")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.SendErrorResponse(w, "start date cannot be empty")
|
return "", "", errors.New("start date cannot be empty")
|
||||||
return
|
|
||||||
}
|
}
|
||||||
if strings.Contains(start, "-") {
|
if strings.Contains(start, "-") {
|
||||||
//Must be underscore
|
//Must be underscore
|
||||||
start = strings.ReplaceAll(start, "-", "_")
|
start = strings.ReplaceAll(start, "-", "_")
|
||||||
}
|
}
|
||||||
//Get end date from POST para
|
// Get end date from POST para
|
||||||
end, err := utils.GetPara(r, "end")
|
end, err := utils.GetPara(r, "end")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.SendErrorResponse(w, "emd date cannot be empty")
|
return "", "", errors.New("end date cannot be empty")
|
||||||
return
|
|
||||||
}
|
}
|
||||||
if strings.Contains(end, "-") {
|
if strings.Contains(end, "-") {
|
||||||
//Must be underscore
|
//Must be underscore
|
||||||
end = strings.ReplaceAll(end, "-", "_")
|
end = strings.ReplaceAll(end, "-", "_")
|
||||||
}
|
}
|
||||||
|
|
||||||
//Generate all the dates in between the range
|
return start, end, nil
|
||||||
keys, err := generateDateRange(start, end)
|
|
||||||
if err != nil {
|
|
||||||
utils.SendErrorResponse(w, err.Error())
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
//Load all the data from database
|
|
||||||
dailySummaries := []*statistic.DailySummaryExport{}
|
|
||||||
for _, key := range keys {
|
|
||||||
thisStat := statistic.DailySummaryExport{}
|
|
||||||
err = d.Database.Read("stats", key, &thisStat)
|
|
||||||
if err == nil {
|
|
||||||
dailySummaries = append(dailySummaries, &thisStat)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//Merge the summaries into one
|
|
||||||
mergedSummary := mergeDailySummaryExports(dailySummaries)
|
|
||||||
|
|
||||||
js, _ := json.Marshal(struct {
|
|
||||||
Summary *statistic.DailySummaryExport
|
|
||||||
Records []*statistic.DailySummaryExport
|
|
||||||
}{
|
|
||||||
Summary: mergedSummary,
|
|
||||||
Records: dailySummaries,
|
|
||||||
})
|
|
||||||
|
|
||||||
utils.SendJSONResponse(w, string(js))
|
|
||||||
}
|
}
|
||||||
|
218
src/mod/statistic/analytic/handlers.go
Normal file
@ -0,0 +1,218 @@
|
|||||||
|
package analytic
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/csv"
|
||||||
|
"encoding/json"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"imuslab.com/zoraxy/mod/statistic"
|
||||||
|
"imuslab.com/zoraxy/mod/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *DataLoader) HandleSummaryList(w http.ResponseWriter, r *http.Request) {
|
||||||
|
entries, err := d.Database.ListTable("stats")
|
||||||
|
if err != nil {
|
||||||
|
utils.SendErrorResponse(w, "unable to load data from database")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
entryDates := []string{}
|
||||||
|
for _, keypairs := range entries {
|
||||||
|
entryDates = append(entryDates, string(keypairs[0]))
|
||||||
|
}
|
||||||
|
|
||||||
|
js, _ := json.MarshalIndent(entryDates, "", " ")
|
||||||
|
utils.SendJSONResponse(w, string(js))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *DataLoader) HandleLoadTargetDaySummary(w http.ResponseWriter, r *http.Request) {
|
||||||
|
day, err := utils.GetPara(r, "id")
|
||||||
|
if err != nil {
|
||||||
|
utils.SendErrorResponse(w, "id cannot be empty")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(day, "-") {
|
||||||
|
//Must be underscore
|
||||||
|
day = strings.ReplaceAll(day, "-", "_")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !statistic.IsBeforeToday(day) {
|
||||||
|
utils.SendErrorResponse(w, "given date is in the future")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var targetDailySummary statistic.DailySummaryExport
|
||||||
|
|
||||||
|
if day == time.Now().Format("2006_01_02") {
|
||||||
|
targetDailySummary = *d.StatisticCollector.GetExportSummary()
|
||||||
|
} else {
|
||||||
|
//Not today data
|
||||||
|
err = d.Database.Read("stats", day, &targetDailySummary)
|
||||||
|
if err != nil {
|
||||||
|
utils.SendErrorResponse(w, "target day data not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
js, _ := json.Marshal(targetDailySummary)
|
||||||
|
utils.SendJSONResponse(w, string(js))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *DataLoader) HandleLoadTargetRangeSummary(w http.ResponseWriter, r *http.Request) {
|
||||||
|
start, end, err := d.GetStartAndEndDatesFromRequest(r)
|
||||||
|
if err != nil {
|
||||||
|
utils.SendErrorResponse(w, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
dailySummaries, _, err := d.GetAllStatisticSummaryInRange(start, end)
|
||||||
|
if err != nil {
|
||||||
|
utils.SendErrorResponse(w, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
//Merge the summaries into one
|
||||||
|
mergedSummary := mergeDailySummaryExports(dailySummaries)
|
||||||
|
|
||||||
|
js, _ := json.Marshal(struct {
|
||||||
|
Summary *statistic.DailySummaryExport
|
||||||
|
Records []*statistic.DailySummaryExport
|
||||||
|
}{
|
||||||
|
Summary: mergedSummary,
|
||||||
|
Records: dailySummaries,
|
||||||
|
})
|
||||||
|
|
||||||
|
utils.SendJSONResponse(w, string(js))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle exporting of a given range statistics
|
||||||
|
func (d *DataLoader) HandleRangeExport(w http.ResponseWriter, r *http.Request) {
|
||||||
|
start, end, err := d.GetStartAndEndDatesFromRequest(r)
|
||||||
|
if err != nil {
|
||||||
|
utils.SendErrorResponse(w, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
dailySummaries, dates, err := d.GetAllStatisticSummaryInRange(start, end)
|
||||||
|
if err != nil {
|
||||||
|
utils.SendErrorResponse(w, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
format, err := utils.GetPara(r, "format")
|
||||||
|
if err != nil {
|
||||||
|
format = "json"
|
||||||
|
}
|
||||||
|
|
||||||
|
if format == "csv" {
|
||||||
|
// Create a buffer to store CSV content
|
||||||
|
var csvContent strings.Builder
|
||||||
|
|
||||||
|
// Create a CSV writer
|
||||||
|
writer := csv.NewWriter(&csvContent)
|
||||||
|
|
||||||
|
// Write the header row
|
||||||
|
header := []string{"Date", "TotalRequest", "ErrorRequest", "ValidRequest", "ForwardTypes", "RequestOrigin", "RequestClientIp", "Referer", "UserAgent", "RequestURL"}
|
||||||
|
err := writer.Write(header)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write each data row
|
||||||
|
for i, item := range dailySummaries {
|
||||||
|
row := []string{
|
||||||
|
dates[i],
|
||||||
|
strconv.FormatInt(item.TotalRequest, 10),
|
||||||
|
strconv.FormatInt(item.ErrorRequest, 10),
|
||||||
|
strconv.FormatInt(item.ValidRequest, 10),
|
||||||
|
// Convert map values to a comma-separated string
|
||||||
|
strings.Join(mapToStringSlice(item.ForwardTypes), ","),
|
||||||
|
strings.Join(mapToStringSlice(item.RequestOrigin), ","),
|
||||||
|
strings.Join(mapToStringSlice(item.RequestClientIp), ","),
|
||||||
|
strings.Join(mapToStringSlice(item.Referer), ","),
|
||||||
|
strings.Join(mapToStringSlice(item.UserAgent), ","),
|
||||||
|
strings.Join(mapToStringSlice(item.RequestURL), ","),
|
||||||
|
}
|
||||||
|
err = writer.Write(row)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flush the CSV writer
|
||||||
|
writer.Flush()
|
||||||
|
|
||||||
|
// Check for any errors during writing
|
||||||
|
if err := writer.Error(); err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the response headers
|
||||||
|
w.Header().Set("Content-Type", "text/csv")
|
||||||
|
w.Header().Set("Content-Disposition", "attachment; filename=analytics_"+start+"_to_"+end+".csv")
|
||||||
|
|
||||||
|
// Write the CSV content to the response writer
|
||||||
|
_, err = w.Write([]byte(csvContent.String()))
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
} else if format == "json" {
|
||||||
|
type exportData struct {
|
||||||
|
Stats []*statistic.DailySummaryExport
|
||||||
|
Dates []string
|
||||||
|
}
|
||||||
|
|
||||||
|
results := exportData{
|
||||||
|
Stats: dailySummaries,
|
||||||
|
Dates: dates,
|
||||||
|
}
|
||||||
|
|
||||||
|
js, _ := json.MarshalIndent(results, "", " ")
|
||||||
|
w.Header().Set("Content-Disposition", "attachment; filename=analytics_"+start+"_to_"+end+".json")
|
||||||
|
utils.SendJSONResponse(w, string(js))
|
||||||
|
} else {
|
||||||
|
utils.SendErrorResponse(w, "Unsupported export format")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset all the keys within the given time period
|
||||||
|
func (d *DataLoader) HandleRangeReset(w http.ResponseWriter, r *http.Request) {
|
||||||
|
start, end, err := d.GetStartAndEndDatesFromRequest(r)
|
||||||
|
if err != nil {
|
||||||
|
utils.SendErrorResponse(w, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Method != http.MethodDelete {
|
||||||
|
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
keys, err := generateDateRange(start, end)
|
||||||
|
if err != nil {
|
||||||
|
utils.SendErrorResponse(w, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, key := range keys {
|
||||||
|
log.Println("DELETING statistics " + key)
|
||||||
|
d.Database.Delete("stats", key)
|
||||||
|
|
||||||
|
if isTodayDate(key) {
|
||||||
|
//It is today's date. Also reset statistic collector value
|
||||||
|
log.Println("RESETING today's in-memory statistics")
|
||||||
|
d.StatisticCollector.ResetSummaryOfDay()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
utils.SendOK(w)
|
||||||
|
}
|
@ -70,3 +70,25 @@ func mergeDailySummaryExports(exports []*statistic.DailySummaryExport) *statisti
|
|||||||
|
|
||||||
return mergedExport
|
return mergedExport
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func mapToStringSlice(m map[string]int) []string {
|
||||||
|
slice := make([]string, 0, len(m))
|
||||||
|
for k := range m {
|
||||||
|
slice = append(slice, k)
|
||||||
|
}
|
||||||
|
return slice
|
||||||
|
}
|
||||||
|
|
||||||
|
func isTodayDate(dateStr string) bool {
|
||||||
|
today := time.Now().Local().Format("2006-01-02")
|
||||||
|
inputDate, err := time.Parse("2006-01-02", dateStr)
|
||||||
|
if err != nil {
|
||||||
|
inputDate, err = time.Parse("2006_01_02", dateStr)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Invalid date format")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return inputDate.Format("2006-01-02") == today
|
||||||
|
}
|
||||||
|
@ -6,6 +6,7 @@ import (
|
|||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/microcosm-cc/bluemonday"
|
||||||
"imuslab.com/zoraxy/mod/database"
|
"imuslab.com/zoraxy/mod/database"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -96,6 +97,11 @@ func (c *Collector) LoadSummaryOfDay(year int, month time.Month, day int) *Daily
|
|||||||
return &targetSummary
|
return &targetSummary
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Reset today summary, for debug or restoring injections
|
||||||
|
func (c *Collector) ResetSummaryOfDay() {
|
||||||
|
c.DailySummary = newDailySummary()
|
||||||
|
}
|
||||||
|
|
||||||
// This function gives the current slot in the 288- 5 minutes interval of the day
|
// This function gives the current slot in the 288- 5 minutes interval of the day
|
||||||
func (c *Collector) GetCurrentRealtimeStatIntervalId() int {
|
func (c *Collector) GetCurrentRealtimeStatIntervalId() int {
|
||||||
now := time.Now()
|
now := time.Now()
|
||||||
@ -160,11 +166,15 @@ func (c *Collector) RecordRequest(ri RequestInfo) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//Record the referer
|
//Record the referer
|
||||||
rf, ok := c.DailySummary.Referer.Load(ri.Referer)
|
p := bluemonday.StripTagsPolicy()
|
||||||
|
filteredReferer := p.Sanitize(
|
||||||
|
ri.Referer,
|
||||||
|
)
|
||||||
|
rf, ok := c.DailySummary.Referer.Load(filteredReferer)
|
||||||
if !ok {
|
if !ok {
|
||||||
c.DailySummary.Referer.Store(ri.Referer, 1)
|
c.DailySummary.Referer.Store(filteredReferer, 1)
|
||||||
} else {
|
} else {
|
||||||
c.DailySummary.Referer.Store(ri.Referer, rf.(int)+1)
|
c.DailySummary.Referer.Store(filteredReferer, rf.(int)+1)
|
||||||
}
|
}
|
||||||
|
|
||||||
//Record the UserAgent
|
//Record the UserAgent
|
||||||
|
@ -58,11 +58,23 @@ func forward(conn1 net.Conn, conn2 net.Conn, aTob *int64, bToa *int64) {
|
|||||||
wg.Wait()
|
wg.Wait()
|
||||||
}
|
}
|
||||||
|
|
||||||
func accept(listener net.Listener) (net.Conn, error) {
|
func (c *ProxyRelayConfig) accept(listener net.Listener) (net.Conn, error) {
|
||||||
|
|
||||||
conn, err := listener.Accept()
|
conn, err := listener.Accept()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//Check if connection in blacklist or whitelist
|
||||||
|
if addr, ok := conn.RemoteAddr().(*net.TCPAddr); ok {
|
||||||
|
if !c.parent.Options.AccessControlHandler(conn) {
|
||||||
|
time.Sleep(300 * time.Millisecond)
|
||||||
|
conn.Close()
|
||||||
|
log.Println("[x]", "Connection from "+addr.IP.String()+" rejected by access control policy")
|
||||||
|
return nil, errors.New("Connection from " + addr.IP.String() + " rejected by access control policy")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
log.Println("[√]", "accept a new client. remote address:["+conn.RemoteAddr().String()+"], local address:["+conn.LocalAddr().String()+"]")
|
log.Println("[√]", "accept a new client. remote address:["+conn.RemoteAddr().String()+"], local address:["+conn.LocalAddr().String()+"]")
|
||||||
return conn, err
|
return conn, err
|
||||||
}
|
}
|
||||||
@ -203,7 +215,7 @@ func (c *ProxyRelayConfig) Port2port(port1 string, port2 string, stopChan chan b
|
|||||||
}()
|
}()
|
||||||
|
|
||||||
for {
|
for {
|
||||||
conn1, err := accept(listen1)
|
conn1, err := c.accept(listen1)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if !c.Running {
|
if !c.Running {
|
||||||
return nil
|
return nil
|
||||||
@ -211,7 +223,7 @@ func (c *ProxyRelayConfig) Port2port(port1 string, port2 string, stopChan chan b
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
conn2, err := accept(listen2)
|
conn2, err := c.accept(listen2)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if !c.Running {
|
if !c.Running {
|
||||||
return nil
|
return nil
|
||||||
@ -224,7 +236,7 @@ func (c *ProxyRelayConfig) Port2port(port1 string, port2 string, stopChan chan b
|
|||||||
time.Sleep(time.Duration(c.Timeout) * time.Second)
|
time.Sleep(time.Duration(c.Timeout) * time.Second)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
forward(conn1, conn2, &c.aTobAccumulatedByteTransfer, &c.bToaAccumulatedByteTransfer)
|
go forward(conn1, conn2, &c.aTobAccumulatedByteTransfer, &c.bToaAccumulatedByteTransfer)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -248,7 +260,7 @@ func (c *ProxyRelayConfig) Port2host(allowPort string, targetAddress string, sto
|
|||||||
|
|
||||||
//Start blocking loop for accepting connections
|
//Start blocking loop for accepting connections
|
||||||
for {
|
for {
|
||||||
conn, err := accept(server)
|
conn, err := c.accept(server)
|
||||||
if conn == nil || err != nil {
|
if conn == nil || err != nil {
|
||||||
if !c.Running {
|
if !c.Running {
|
||||||
//Terminate by stop chan. Exit listener loop
|
//Terminate by stop chan. Exit listener loop
|
||||||
@ -322,7 +334,7 @@ func (c *ProxyRelayConfig) Host2host(address1, address2 string, stopChan chan bo
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
forward(host1, host2, &c.aTobAccumulatedByteTransfer, &c.bToaAccumulatedByteTransfer)
|
go forward(host1, host2, &c.aTobAccumulatedByteTransfer, &c.bToaAccumulatedByteTransfer)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -2,6 +2,7 @@ package tcpprox
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
"net"
|
||||||
|
|
||||||
uuid "github.com/satori/go.uuid"
|
uuid "github.com/satori/go.uuid"
|
||||||
"imuslab.com/zoraxy/mod/database"
|
"imuslab.com/zoraxy/mod/database"
|
||||||
@ -40,11 +41,14 @@ type ProxyRelayConfig struct {
|
|||||||
stopChan chan bool //Stop channel to stop the listener
|
stopChan chan bool //Stop channel to stop the listener
|
||||||
aTobAccumulatedByteTransfer int64 //Accumulated byte transfer from A to B
|
aTobAccumulatedByteTransfer int64 //Accumulated byte transfer from A to B
|
||||||
bToaAccumulatedByteTransfer int64 //Accumulated byte transfer from B to A
|
bToaAccumulatedByteTransfer int64 //Accumulated byte transfer from B to A
|
||||||
|
|
||||||
|
parent *Manager `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Options struct {
|
type Options struct {
|
||||||
Database *database.Database
|
Database *database.Database
|
||||||
DefaultTimeout int
|
DefaultTimeout int
|
||||||
|
AccessControlHandler func(net.Conn) bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type Manager struct {
|
type Manager struct {
|
||||||
@ -59,16 +63,34 @@ type Manager struct {
|
|||||||
func NewTCProxy(options *Options) *Manager {
|
func NewTCProxy(options *Options) *Manager {
|
||||||
options.Database.NewTable("tcprox")
|
options.Database.NewTable("tcprox")
|
||||||
|
|
||||||
|
//Load relay configs from db
|
||||||
previousRules := []*ProxyRelayConfig{}
|
previousRules := []*ProxyRelayConfig{}
|
||||||
if options.Database.KeyExists("tcprox", "rules") {
|
if options.Database.KeyExists("tcprox", "rules") {
|
||||||
options.Database.Read("tcprox", "rules", &previousRules)
|
options.Database.Read("tcprox", "rules", &previousRules)
|
||||||
}
|
}
|
||||||
|
|
||||||
return &Manager{
|
//Check if the AccessControlHandler is empty. If yes, set it to always allow access
|
||||||
|
if options.AccessControlHandler == nil {
|
||||||
|
options.AccessControlHandler = func(conn net.Conn) bool {
|
||||||
|
//Always allow access
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Create a new proxy manager for TCP
|
||||||
|
thisManager := Manager{
|
||||||
Options: options,
|
Options: options,
|
||||||
Configs: previousRules,
|
|
||||||
Connections: 0,
|
Connections: 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//Inject manager into the rules
|
||||||
|
for _, rule := range previousRules {
|
||||||
|
rule.parent = &thisManager
|
||||||
|
}
|
||||||
|
|
||||||
|
thisManager.Configs = previousRules
|
||||||
|
|
||||||
|
return &thisManager
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *Manager) NewConfig(config *ProxyRelayOptions) string {
|
func (m *Manager) NewConfig(config *ProxyRelayOptions) string {
|
||||||
@ -85,6 +107,8 @@ func (m *Manager) NewConfig(config *ProxyRelayOptions) string {
|
|||||||
stopChan: nil,
|
stopChan: nil,
|
||||||
aTobAccumulatedByteTransfer: 0,
|
aTobAccumulatedByteTransfer: 0,
|
||||||
bToaAccumulatedByteTransfer: 0,
|
bToaAccumulatedByteTransfer: 0,
|
||||||
|
|
||||||
|
parent: m,
|
||||||
}
|
}
|
||||||
m.Configs = append(m.Configs, &thisConfig)
|
m.Configs = append(m.Configs, &thisConfig)
|
||||||
m.SaveConfigToDatabase()
|
m.SaveConfigToDatabase()
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"imuslab.com/zoraxy/mod/utils"
|
"imuslab.com/zoraxy/mod/utils"
|
||||||
@ -220,7 +221,24 @@ func getWebsiteStatusWithLatency(url string) (bool, int64, int) {
|
|||||||
func getWebsiteStatus(url string) (int, error) {
|
func getWebsiteStatus(url string) (int, error) {
|
||||||
resp, err := http.Get(url)
|
resp, err := http.Get(url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
//Try replace the http with https and vise versa
|
||||||
|
rewriteURL := ""
|
||||||
|
if strings.Contains(url, "https://") {
|
||||||
|
rewriteURL = strings.ReplaceAll(url, "https://", "http://")
|
||||||
|
} else if strings.Contains(url, "http://") {
|
||||||
|
rewriteURL = strings.ReplaceAll(url, "http://", "https://")
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err = http.Get(rewriteURL)
|
||||||
|
if err != nil {
|
||||||
|
if strings.Contains(err.Error(), "http: server gave HTTP response to HTTPS client") {
|
||||||
|
//Invalid downstream reverse proxy settings, but it is online
|
||||||
|
//return SSL handshake failed
|
||||||
|
return 525, nil
|
||||||
|
}
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
status_code := resp.StatusCode
|
status_code := resp.StatusCode
|
||||||
resp.Body.Close()
|
resp.Body.Close()
|
||||||
|
@ -47,6 +47,7 @@ func ReverseProxtInit() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dprouter, err := dynamicproxy.NewDynamicProxy(dynamicproxy.RouterOption{
|
dprouter, err := dynamicproxy.NewDynamicProxy(dynamicproxy.RouterOption{
|
||||||
|
HostUUID: nodeUUID,
|
||||||
Port: inboundPort,
|
Port: inboundPort,
|
||||||
UseTls: useTls,
|
UseTls: useTls,
|
||||||
ForceHttpsRedirect: forceHttpsRedirect,
|
ForceHttpsRedirect: forceHttpsRedirect,
|
||||||
@ -604,6 +605,10 @@ func HandleUpdateHttpsRedirect(w http.ResponseWriter, r *http.Request) {
|
|||||||
js, _ := json.Marshal(currentRedirectToHttps)
|
js, _ := json.Marshal(currentRedirectToHttps)
|
||||||
utils.SendJSONResponse(w, string(js))
|
utils.SendJSONResponse(w, string(js))
|
||||||
} else {
|
} else {
|
||||||
|
if dynamicProxyRouter.Option.Port == 80 {
|
||||||
|
utils.SendErrorResponse(w, "This option is not available when listening on port 80")
|
||||||
|
return
|
||||||
|
}
|
||||||
if useRedirect == "true" {
|
if useRedirect == "true" {
|
||||||
sysdb.Write("settings", "redirect", true)
|
sysdb.Write("settings", "redirect", true)
|
||||||
log.Println("Updating force HTTPS redirection to true")
|
log.Println("Updating force HTTPS redirection to true")
|
||||||
|
@ -163,7 +163,8 @@ func startupSequence() {
|
|||||||
|
|
||||||
//Create TCP Proxy Manager
|
//Create TCP Proxy Manager
|
||||||
tcpProxyManager = tcpprox.NewTCProxy(&tcpprox.Options{
|
tcpProxyManager = tcpprox.NewTCProxy(&tcpprox.Options{
|
||||||
Database: sysdb,
|
Database: sysdb,
|
||||||
|
AccessControlHandler: geodbStore.AllowConnectionAccess,
|
||||||
})
|
})
|
||||||
|
|
||||||
//Create WoL MAC storage table
|
//Create WoL MAC storage table
|
||||||
|
@ -15,8 +15,8 @@
|
|||||||
<input type="text" id="statsRangeEnd" placeholder="End date">
|
<input type="text" id="statsRangeEnd" placeholder="End date">
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<button onclick="handleLoadStatisticButtonPress();" class="ui basic button"><i class="blue search icon"></i> Search</button>
|
<button onclick="handleLoadStatisticButtonPress();" class="ui basic button"><i class="blue search icon"></i> Load</button>
|
||||||
<button onclick="clearStatisticDateRange();" class="ui yellow basic button"><i class="eraser icon"></i> Clear Range</button>
|
<button onclick="clearStatisticDateRange();" class="ui basic button"><i class="eraser icon"></i> Clear Search</button>
|
||||||
<br>
|
<br>
|
||||||
<small>Leave end range as empty for showing starting day only statistic</small>
|
<small>Leave end range as empty for showing starting day only statistic</small>
|
||||||
</div>
|
</div>
|
||||||
@ -193,7 +193,9 @@
|
|||||||
<canvas id="requestTrends"></canvas>
|
<canvas id="requestTrends"></canvas>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<button onclick="showSideWrapper('snippet/advanceStatsOprs.html?t=' + Date.now() + '#' + encodeURIComponent(JSON.stringify(getStatisticDateRange())));" class="ui basic right floated black button"><i class="external square alternate icon"></i> Advance Operations</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- <button class="ui icon right floated basic button" onclick="initStatisticSummery();"><i class="green refresh icon"></i> Refresh</button> -->
|
<!-- <button class="ui icon right floated basic button" onclick="initStatisticSummery();"><i class="green refresh icon"></i> Refresh</button> -->
|
||||||
<br><br>
|
<br><br>
|
||||||
</div>
|
</div>
|
||||||
@ -361,6 +363,28 @@
|
|||||||
initStatisticSummery(sd, ed);
|
initStatisticSummery(sd, ed);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getStatisticDateRange(){
|
||||||
|
var sd = $("#statsRangeStart").val();
|
||||||
|
var ed = $("#statsRangeEnd").val();
|
||||||
|
|
||||||
|
if (ed == ""){
|
||||||
|
ed = sd;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sd == "" && ed == ""){
|
||||||
|
var sk = getTodayStatisticKey();
|
||||||
|
sd = sk;
|
||||||
|
ed = sk;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Swap them if sd is later than ed
|
||||||
|
if (sd != "" && ed != "" && sd > ed) {
|
||||||
|
ed = [sd, sd = ed][0];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [sd, ed];
|
||||||
|
}
|
||||||
|
|
||||||
function clearStatisticDateRange(){
|
function clearStatisticDateRange(){
|
||||||
$("#statsRangeStart").val("");
|
$("#statsRangeStart").val("");
|
||||||
$("#statsRangeEnd").val("");
|
$("#statsRangeEnd").val("");
|
||||||
|
@ -316,7 +316,16 @@
|
|||||||
data: {set: thisValue},
|
data: {set: thisValue},
|
||||||
success: function(data){
|
success: function(data){
|
||||||
if (data.error != undefined){
|
if (data.error != undefined){
|
||||||
alert(data.error);
|
msgbox(data.error, false, 8000);
|
||||||
|
|
||||||
|
//Restore backend value to make sure the UI is always in sync
|
||||||
|
$.get("/api/proxy/useHttpsRedirect", function(data){
|
||||||
|
if (data == true){
|
||||||
|
$("#redirect").checkbox("set checked");
|
||||||
|
}else{
|
||||||
|
$("#redirect").checkbox("set unchecked");
|
||||||
|
}
|
||||||
|
});
|
||||||
}else{
|
}else{
|
||||||
//Updated
|
//Updated
|
||||||
msgbox("Setting Updated");
|
msgbox("Setting Updated");
|
||||||
|
@ -42,8 +42,9 @@
|
|||||||
if (subd.RequireTLS){
|
if (subd.RequireTLS){
|
||||||
tlsIcon = `<i class="green lock icon" title="TLS Mode"></i>`;
|
tlsIcon = `<i class="green lock icon" title="TLS Mode"></i>`;
|
||||||
}
|
}
|
||||||
|
|
||||||
$("#subdList").append(`<tr eptuuid="${subd.RootOrMatchingDomain}" payload="${subdData}" class="subdEntry">
|
$("#subdList").append(`<tr eptuuid="${subd.RootOrMatchingDomain}" payload="${subdData}" class="subdEntry">
|
||||||
<td data-label="" editable="false">${subd.RootOrMatchingDomain}</td>
|
<td data-label="" editable="false"><a href="//${subd.RootOrMatchingDomain}" target="_blank">${subd.RootOrMatchingDomain}</a></td>
|
||||||
<td data-label="" editable="true" datatype="domain">${subd.Domain} ${tlsIcon}</td>
|
<td data-label="" editable="true" datatype="domain">${subd.Domain} ${tlsIcon}</td>
|
||||||
<td data-label="" editable="true" datatype="skipver">${!subd.SkipCertValidations?`<i class="ui green check icon"></i>`:`<i class="ui yellow exclamation circle icon" title="TLS/SSL Verification will be skipped on this host"></i>`}</td>
|
<td data-label="" editable="true" datatype="skipver">${!subd.SkipCertValidations?`<i class="ui green check icon"></i>`:`<i class="ui yellow exclamation circle icon" title="TLS/SSL Verification will be skipped on this host"></i>`}</td>
|
||||||
<td data-label="" editable="true" datatype="basicauth">${subd.RequireBasicAuth?`<i class="ui green check icon"></i>`:`<i class="ui grey remove icon"></i>`}</td>
|
<td data-label="" editable="true" datatype="basicauth">${subd.RequireBasicAuth?`<i class="ui green check icon"></i>`:`<i class="ui grey remove icon"></i>`}</td>
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
<p>Proxy traffic flow on layer 3 via TCP/IP</p>
|
<p>Proxy traffic flow on layer 3 via TCP/IP</p>
|
||||||
</div>
|
</div>
|
||||||
<button class="ui basic orange button" id="addProxyConfigButton"><i class="ui add icon"></i> Add Proxy Config</button>
|
<button class="ui basic orange button" id="addProxyConfigButton"><i class="ui add icon"></i> Add Proxy Config</button>
|
||||||
<button class="ui basic circular right floated icon button" title="Refresh List"><i class="ui green refresh icon"></i></button>
|
<button class="ui basic circular right floated icon button" onclick="initProxyConfigList();" title="Refresh List"><i class="ui green refresh icon"></i></button>
|
||||||
<div class="ui divider"></div>
|
<div class="ui divider"></div>
|
||||||
<div class="ui basic segment" id="addproxyConfig" style="display:none;">
|
<div class="ui basic segment" id="addproxyConfig" style="display:none;">
|
||||||
<h3>TCP Proxy Config</h3>
|
<h3>TCP Proxy Config</h3>
|
||||||
@ -230,11 +230,13 @@
|
|||||||
} else {
|
} else {
|
||||||
|
|
||||||
proxyConfigs.forEach(function(config) {
|
proxyConfigs.forEach(function(config) {
|
||||||
var runningLogo = '<i class="red circle icon"></i>';
|
var runningLogo = 'Stopped';
|
||||||
|
var runningClass = "stopped";
|
||||||
var startButton = `<button onclick="startTcpProx('${config.UUID}');" class="ui button" title="Start Proxy"><i class="play icon"></i> Start Proxy</button>`;
|
var startButton = `<button onclick="startTcpProx('${config.UUID}');" class="ui button" title="Start Proxy"><i class="play icon"></i> Start Proxy</button>`;
|
||||||
if (config.Running){
|
if (config.Running){
|
||||||
runningLogo = '<i class="green circle icon"></i>';
|
runningLogo = 'Running';
|
||||||
startButton = `<button onclick="stopTcpProx('${config.UUID}');" class="ui button" title="Start Proxy"><i class="red stop icon"></i> Stop Proxy</button>`;
|
startButton = `<button onclick="stopTcpProx('${config.UUID}');" class="ui button" title="Start Proxy"><i class="red stop icon"></i> Stop Proxy</button>`;
|
||||||
|
runningClass = "running"
|
||||||
}
|
}
|
||||||
|
|
||||||
var modeText = "Unknown";
|
var modeText = "Unknown";
|
||||||
@ -248,8 +250,10 @@
|
|||||||
|
|
||||||
var thisConfig = encodeURIComponent(JSON.stringify(config));
|
var thisConfig = encodeURIComponent(JSON.stringify(config));
|
||||||
|
|
||||||
var row = $(`<tr class="tcproxConfig" uuid="${config.UUID}" config="${thisConfig}">`);
|
var row = $(`<tr class="tcproxConfig ${runningClass}" uuid="${config.UUID}" config="${thisConfig}">`);
|
||||||
row.append($('<td>').html(runningLogo + config.Name));
|
row.append($('<td>').html(`
|
||||||
|
${config.Name}
|
||||||
|
<div class="statusText">${runningLogo}</div>`));
|
||||||
row.append($('<td>').text(config.PortA));
|
row.append($('<td>').text(config.PortA));
|
||||||
row.append($('<td>').text(config.PortB));
|
row.append($('<td>').text(config.PortB));
|
||||||
row.append($('<td>').text(modeText));
|
row.append($('<td>').text(modeText));
|
||||||
|
@ -109,7 +109,13 @@
|
|||||||
}
|
}
|
||||||
ontimeRate++;
|
ontimeRate++;
|
||||||
}else{
|
}else{
|
||||||
dotType = "offline";
|
if (thisStatus.StatusCode >= 500 && thisStatus.StatusCode < 600){
|
||||||
|
//Special type of error, cause by downstream reverse proxy
|
||||||
|
dotType = "error";
|
||||||
|
}else{
|
||||||
|
dotType = "offline";
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let datetime = format_time(thisStatus.Timestamp);
|
let datetime = format_time(thisStatus.Timestamp);
|
||||||
@ -126,12 +132,20 @@
|
|||||||
//Check of online status now
|
//Check of online status now
|
||||||
let currentOnlineStatus = "Unknown";
|
let currentOnlineStatus = "Unknown";
|
||||||
let onlineStatusCss = ``;
|
let onlineStatusCss = ``;
|
||||||
|
let reminderEle = ``;
|
||||||
if (value[value.length - 1].Online){
|
if (value[value.length - 1].Online){
|
||||||
currentOnlineStatus = `<i class="circle icon"></i> Online`;
|
currentOnlineStatus = `<i class="circle icon"></i> Online`;
|
||||||
onlineStatusCss = `color: #3bd671;`;
|
onlineStatusCss = `color: #3bd671;`;
|
||||||
}else{
|
}else{
|
||||||
currentOnlineStatus = `<i class="circle icon"></i> Offline`;
|
if (value[value.length - 1].StatusCode >= 500 && value[value.length - 1].StatusCode < 600){
|
||||||
onlineStatusCss = `color: #df484a;`;
|
currentOnlineStatus = `<i class="exclamation circle icon"></i> Misconfigured`;
|
||||||
|
onlineStatusCss = `color: #f38020;`;
|
||||||
|
reminderEle = `<small style="${onlineStatusCss}">Downstream proxy server is online with misconfigured settings</small>`;
|
||||||
|
}else{
|
||||||
|
currentOnlineStatus = `<i class="circle icon"></i> Offline`;
|
||||||
|
onlineStatusCss = `color: #df484a;`;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//Generate the html
|
//Generate the html
|
||||||
@ -151,6 +165,7 @@
|
|||||||
<div class="status" style="marign-top: 1em;">
|
<div class="status" style="marign-top: 1em;">
|
||||||
${statusDotList}
|
${statusDotList}
|
||||||
</div>
|
</div>
|
||||||
|
${reminderEle}
|
||||||
<div class="ui divider"></div>
|
<div class="ui divider"></div>
|
||||||
</div>`);
|
</div>`);
|
||||||
}
|
}
|
||||||
|
@ -117,6 +117,7 @@
|
|||||||
<p>Results: <div id="ipRangeOutput">N/A</div></p>
|
<p>Results: <div id="ipRangeOutput">N/A</div></p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- System Information -->
|
||||||
<div class="ui divider"></div>
|
<div class="ui divider"></div>
|
||||||
<div id="zoraxyinfo">
|
<div id="zoraxyinfo">
|
||||||
<h3 class="ui header">
|
<h3 class="ui header">
|
||||||
|
BIN
src/web/img/public/bg.jpg
Normal file
After Width: | Height: | Size: 818 KiB |
Before Width: | Height: | Size: 4.5 MiB |
BIN
src/web/img/public/bg2.jpg
Normal file
After Width: | Height: | Size: 1.1 MiB |
Before Width: | Height: | Size: 9.4 MiB |
@ -23,7 +23,7 @@
|
|||||||
width: 100%;
|
width: 100%;
|
||||||
opacity: 0.8;
|
opacity: 0.8;
|
||||||
z-index: -99;
|
z-index: -99;
|
||||||
background-image: url("img/public/bg.png");
|
background-image: url("img/public/bg.jpg");
|
||||||
background-size: auto 100%;
|
background-size: auto 100%;
|
||||||
background-position: right top;
|
background-position: right top;
|
||||||
background-repeat: no-repeat;
|
background-repeat: no-repeat;
|
||||||
|
@ -459,6 +459,33 @@ body{
|
|||||||
user-select: none;
|
user-select: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
TCP Proxy
|
||||||
|
*/
|
||||||
|
|
||||||
|
.tcproxConfig td:first-child{
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tcproxConfig.running td:first-child{
|
||||||
|
border-left: 0.6em solid #21ba45 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tcproxConfig.stopped td:first-child{
|
||||||
|
border-left: 0.6em solid #414141 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tcproxConfig td:first-child .statusText{
|
||||||
|
position: absolute;
|
||||||
|
bottom: 0.3em;
|
||||||
|
left: 0.2em;
|
||||||
|
font-size: 2em;
|
||||||
|
color:rgb(224, 224, 224);
|
||||||
|
opacity: 0.7;
|
||||||
|
pointer-events: none;
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Uptime Monitor
|
Uptime Monitor
|
||||||
*/
|
*/
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
width: 100%;
|
width: 100%;
|
||||||
opacity: 0.8;
|
opacity: 0.8;
|
||||||
z-index: -99;
|
z-index: -99;
|
||||||
background-image: url("img/public/bg2.png");
|
background-image: url("img/public/bg2.jpg");
|
||||||
background-size: auto 100%;
|
background-size: auto 100%;
|
||||||
background-position: right top;
|
background-position: right top;
|
||||||
background-repeat: no-repeat;
|
background-repeat: no-repeat;
|
||||||
|
148
src/web/snippet/advanceStatsOprs.html
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<!-- Notes: This should be open in its original path-->
|
||||||
|
<link rel="stylesheet" href="../script/semantic/semantic.min.css">
|
||||||
|
<script src="../script/jquery-3.6.0.min.js"></script>
|
||||||
|
<script src="../script/semantic/semantic.min.js"></script>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<br>
|
||||||
|
<div class="ui container">
|
||||||
|
<div class="ui header">
|
||||||
|
<div class="content">
|
||||||
|
Advance Statistics Operations
|
||||||
|
<div class="sub header">Selected Range: <span id="daterange"></span></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="ui divider"></div>
|
||||||
|
<h3>Export Data</h3>
|
||||||
|
<p>You can export the statistics collected by Zoraxy in the selected range for further analysis</p>
|
||||||
|
<button class="ui basic teal button" onclick="handleExportAsCSV();"><i class="download icon"></i> Export CSV</button>
|
||||||
|
<button class="ui basic pink button" onclick="handleExportAsJSON();"><i class="download icon"></i> Export JSON</button>
|
||||||
|
<div class="ui divider"></div>
|
||||||
|
<h3>Reset Statistics</h3>
|
||||||
|
<p>You can reset the statistics within the selected time range for debug purpose. Note that this operation is irreversible.</p>
|
||||||
|
<button class="ui basic red button" onclick="handleResetStats();"><i class="trash icon"></i> RESET STATISTICS</button>
|
||||||
|
<br><br>
|
||||||
|
<button class="ui basic button iframeOnly" style="float: right;" onclick="parent.hideSideWrapper();"><i class="remove icon"></i> Cancel</button>
|
||||||
|
</div>
|
||||||
|
<script>
|
||||||
|
let startDate = "";
|
||||||
|
let endDate = "";
|
||||||
|
|
||||||
|
/*
|
||||||
|
Actions Handler
|
||||||
|
*/
|
||||||
|
|
||||||
|
function handleExportAsJSON(){
|
||||||
|
window.open(`/api/analytic/exportRange?start=${startDate}&end=${endDate}&format=json`, 'download');
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleExportAsCSV(){
|
||||||
|
window.open(`/api/analytic/exportRange?start=${startDate}&end=${endDate}&format=csv`, 'download');
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleResetStats(){
|
||||||
|
if (confirm("Confirm remove statistics from " + startDate + " to " + endDate +"?")){
|
||||||
|
$.ajax({
|
||||||
|
url: "/api/analytic/resetRange?start=" + startDate + "&end=" + endDate,
|
||||||
|
method: "DELETE",
|
||||||
|
success: function(data){
|
||||||
|
if (data.error != undefined){
|
||||||
|
parent.msgbox(data.error, false, 5000);
|
||||||
|
}else{
|
||||||
|
parent.msgbox("Statistic Cleared");
|
||||||
|
parent.hideSideWrapper();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Data Loading
|
||||||
|
*/
|
||||||
|
function loadDateRange(){
|
||||||
|
if (window.location.hash.length > 1){
|
||||||
|
try{
|
||||||
|
var dateRange = JSON.parse(decodeURIComponent(window.location.hash.substr(1)));
|
||||||
|
startDate = dateRange[0].trim();
|
||||||
|
endDate = dateRange[1].trim();
|
||||||
|
|
||||||
|
//Check if they are valid dates
|
||||||
|
if (!isValidDateFormat(startDate)){
|
||||||
|
alert("Start date is not a valid date: " + startDate);
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isValidDateFormat(endDate)){
|
||||||
|
alert("End date is not a valid date: " + endDate);
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
//Sort the two dates if they are placed in invalid orders
|
||||||
|
var [s, e] = sortDates(startDate, endDate);
|
||||||
|
startDate = s;
|
||||||
|
endDate = e;
|
||||||
|
|
||||||
|
$("#daterange").html(startDate + ` <i class="arrow right icon" style="margin-right: 0;"></i> ` + endDate);
|
||||||
|
}catch(ex){
|
||||||
|
alert("Invalid usage: Invalid date range given");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
loadDateRange();
|
||||||
|
|
||||||
|
function isValidDateFormat(dateString) {
|
||||||
|
if (dateString.indexOf("_") >= 0){
|
||||||
|
//Replace all the _ to -
|
||||||
|
dateString = dateString.split("_").join("-");
|
||||||
|
}
|
||||||
|
// Create a regular expression pattern for the yyyy-mm-dd format
|
||||||
|
const pattern = /^\d{4}-\d{2}-\d{2}$/;
|
||||||
|
|
||||||
|
// Check if the input string matches the pattern
|
||||||
|
if (!pattern.test(dateString)) {
|
||||||
|
return false; // Invalid format
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the date components
|
||||||
|
const year = parseInt(dateString.substring(0, 4), 10);
|
||||||
|
const month = parseInt(dateString.substring(5, 7), 10);
|
||||||
|
const day = parseInt(dateString.substring(8, 10), 10);
|
||||||
|
|
||||||
|
// Check if the parsed components represent a valid date
|
||||||
|
const date = new Date(year, month - 1, day);
|
||||||
|
if (
|
||||||
|
date.getFullYear() !== year ||
|
||||||
|
date.getMonth() + 1 !== month ||
|
||||||
|
date.getDate() !== day
|
||||||
|
) {
|
||||||
|
return false; // Invalid date
|
||||||
|
}
|
||||||
|
|
||||||
|
return true; // Valid date in yyyy-mm-dd format
|
||||||
|
}
|
||||||
|
|
||||||
|
function sortDates(date1, date2) {
|
||||||
|
// Parse the date strings
|
||||||
|
const parsedDate1 = new Date(date1);
|
||||||
|
const parsedDate2 = new Date(date2);
|
||||||
|
|
||||||
|
// Compare the parsed dates
|
||||||
|
if (parsedDate1 > parsedDate2) {
|
||||||
|
// Swap the dates
|
||||||
|
const temp = date1;
|
||||||
|
date1 = date2;
|
||||||
|
date2 = temp;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the swapped dates
|
||||||
|
return [date1, date2];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
@ -117,6 +117,7 @@ func GetUptimeTargetsFromReverseProxyRules(dp *dynamicproxy.Router) []*uptime.Ta
|
|||||||
url = "https://" + target.Domain
|
url = "https://" + target.Domain
|
||||||
protocol = "https"
|
protocol = "https"
|
||||||
}
|
}
|
||||||
|
|
||||||
UptimeTargets = append(UptimeTargets, &uptime.Target{
|
UptimeTargets = append(UptimeTargets, &uptime.Target{
|
||||||
ID: subd,
|
ID: subd,
|
||||||
Name: subd,
|
Name: subd,
|
||||||
|