2019-08-21 04:39:37 -07:00
|
|
|
package home
|
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
|
|
|
"io/ioutil"
|
2019-11-06 05:13:31 -07:00
|
|
|
"net"
|
2019-08-21 04:39:37 -07:00
|
|
|
"net/http"
|
2019-09-04 04:12:00 -07:00
|
|
|
"net/url"
|
2019-08-21 04:39:37 -07:00
|
|
|
"os"
|
2020-03-05 04:37:43 -07:00
|
|
|
"path/filepath"
|
2019-08-21 04:39:37 -07:00
|
|
|
"strings"
|
2019-09-04 04:12:00 -07:00
|
|
|
"time"
|
2019-08-21 04:39:37 -07:00
|
|
|
|
2020-10-30 03:32:02 -07:00
|
|
|
"github.com/AdguardTeam/AdGuardHome/internal/util"
|
2019-08-21 04:39:37 -07:00
|
|
|
"github.com/AdguardTeam/golibs/log"
|
2019-11-06 05:13:31 -07:00
|
|
|
"github.com/miekg/dns"
|
2019-08-21 04:39:37 -07:00
|
|
|
)
|
|
|
|
|
2020-08-03 04:09:47 -07:00
|
|
|
// isValidURL - return TRUE if URL or file path is valid
|
|
|
|
func isValidURL(rawurl string) bool {
|
2020-03-05 04:37:43 -07:00
|
|
|
if filepath.IsAbs(rawurl) {
|
|
|
|
// this is a file path
|
|
|
|
return util.FileExists(rawurl)
|
|
|
|
}
|
|
|
|
|
2019-09-04 04:12:00 -07:00
|
|
|
url, err := url.ParseRequestURI(rawurl)
|
2019-08-21 04:39:37 -07:00
|
|
|
if err != nil {
|
2020-11-05 03:59:57 -07:00
|
|
|
return false // Couldn't even parse the rawurl
|
2019-08-21 04:39:37 -07:00
|
|
|
}
|
2019-09-04 04:12:00 -07:00
|
|
|
if len(url.Scheme) == 0 {
|
2020-11-05 03:59:57 -07:00
|
|
|
return false // No Scheme found
|
2019-08-21 04:39:37 -07:00
|
|
|
}
|
2019-09-04 04:12:00 -07:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
type filterAddJSON struct {
|
2020-02-26 09:58:25 -07:00
|
|
|
Name string `json:"name"`
|
|
|
|
URL string `json:"url"`
|
|
|
|
Whitelist bool `json:"whitelist"`
|
2019-08-21 04:39:37 -07:00
|
|
|
}
|
|
|
|
|
2020-03-17 05:00:40 -07:00
|
|
|
func (f *Filtering) handleFilteringAddURL(w http.ResponseWriter, r *http.Request) {
|
2019-09-04 04:12:00 -07:00
|
|
|
fj := filterAddJSON{}
|
|
|
|
err := json.NewDecoder(r.Body).Decode(&fj)
|
2019-08-21 04:39:37 -07:00
|
|
|
if err != nil {
|
|
|
|
httpError(w, http.StatusBadRequest, "Failed to parse request body json: %s", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-08-03 04:09:47 -07:00
|
|
|
if !isValidURL(fj.URL) {
|
2020-03-05 04:37:43 -07:00
|
|
|
http.Error(w, "Invalid URL or file path", http.StatusBadRequest)
|
2019-08-21 04:39:37 -07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check for duplicates
|
2019-09-04 04:12:00 -07:00
|
|
|
if filterExists(fj.URL) {
|
|
|
|
httpError(w, http.StatusBadRequest, "Filter URL already added -- %s", fj.URL)
|
2019-08-21 04:39:37 -07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Set necessary properties
|
2020-03-17 05:00:40 -07:00
|
|
|
filt := filter{
|
2019-09-04 04:12:00 -07:00
|
|
|
Enabled: true,
|
|
|
|
URL: fj.URL,
|
|
|
|
Name: fj.Name,
|
2020-02-26 09:58:25 -07:00
|
|
|
white: fj.Whitelist,
|
2019-09-04 04:12:00 -07:00
|
|
|
}
|
2020-03-17 05:00:40 -07:00
|
|
|
filt.ID = assignUniqueFilterID()
|
2019-08-21 04:39:37 -07:00
|
|
|
|
|
|
|
// Download the filter contents
|
2020-03-17 05:00:40 -07:00
|
|
|
ok, err := f.update(&filt)
|
2019-08-21 04:39:37 -07:00
|
|
|
if err != nil {
|
2020-03-17 05:00:40 -07:00
|
|
|
httpError(w, http.StatusBadRequest, "Couldn't fetch filter from url %s: %s", filt.URL, err)
|
2019-08-21 04:39:37 -07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if !ok {
|
2020-03-17 05:00:40 -07:00
|
|
|
httpError(w, http.StatusBadRequest, "Filter at the url %s is invalid (maybe it points to blank page?)", filt.URL)
|
2019-08-21 04:39:37 -07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// URL is deemed valid, append it to filters, update config, write new filter file and tell dns to reload it
|
2020-03-17 05:00:40 -07:00
|
|
|
if !filterAdd(filt) {
|
|
|
|
httpError(w, http.StatusBadRequest, "Filter URL already added -- %s", filt.URL)
|
2019-08-21 04:39:37 -07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-10-09 09:51:26 -07:00
|
|
|
onConfigModified()
|
|
|
|
enableFilters(true)
|
2019-08-21 04:39:37 -07:00
|
|
|
|
2020-03-17 05:00:40 -07:00
|
|
|
_, err = fmt.Fprintf(w, "OK %d rules\n", filt.RulesCount)
|
2019-08-21 04:39:37 -07:00
|
|
|
if err != nil {
|
|
|
|
httpError(w, http.StatusInternalServerError, "Couldn't write body: %s", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-17 05:00:40 -07:00
|
|
|
func (f *Filtering) handleFilteringRemoveURL(w http.ResponseWriter, r *http.Request) {
|
2019-08-21 04:39:37 -07:00
|
|
|
type request struct {
|
2020-02-26 09:58:25 -07:00
|
|
|
URL string `json:"url"`
|
|
|
|
Whitelist bool `json:"whitelist"`
|
2019-08-21 04:39:37 -07:00
|
|
|
}
|
2020-11-05 03:59:57 -07:00
|
|
|
|
2019-08-21 04:39:37 -07:00
|
|
|
req := request{}
|
|
|
|
err := json.NewDecoder(r.Body).Decode(&req)
|
|
|
|
if err != nil {
|
2020-11-05 03:59:57 -07:00
|
|
|
httpError(w, http.StatusBadRequest, "failed to parse request body json: %s", err)
|
2019-08-21 04:39:37 -07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
config.Lock()
|
2020-02-26 09:58:25 -07:00
|
|
|
filters := &config.Filters
|
|
|
|
if req.Whitelist {
|
|
|
|
filters = &config.WhitelistFilters
|
|
|
|
}
|
2020-11-05 03:59:57 -07:00
|
|
|
|
|
|
|
var deleted filter
|
|
|
|
var newFilters []filter
|
|
|
|
for _, f := range *filters {
|
|
|
|
if f.URL != req.URL {
|
|
|
|
newFilters = append(newFilters, f)
|
|
|
|
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
deleted = f
|
|
|
|
path := f.Path()
|
|
|
|
err = os.Rename(path, path+".old")
|
|
|
|
if err != nil {
|
|
|
|
log.Error("deleting filter %q: %s", path, err)
|
2019-08-21 04:39:37 -07:00
|
|
|
}
|
|
|
|
}
|
2020-11-05 03:59:57 -07:00
|
|
|
|
2020-02-26 09:58:25 -07:00
|
|
|
*filters = newFilters
|
2019-08-21 04:39:37 -07:00
|
|
|
config.Unlock()
|
2019-10-09 09:51:26 -07:00
|
|
|
|
|
|
|
onConfigModified()
|
|
|
|
enableFilters(true)
|
|
|
|
|
2020-11-05 03:59:57 -07:00
|
|
|
// NOTE: The old files "filter.txt.old" aren't deleted. It's not really
|
|
|
|
// necessary, but will require the additional complicated code to run
|
|
|
|
// after enableFilters is done.
|
|
|
|
//
|
|
|
|
// TODO(a.garipov): Make sure the above comment is true.
|
|
|
|
|
|
|
|
_, err = fmt.Fprintf(w, "OK %d rules\n", deleted.RulesCount)
|
|
|
|
if err != nil {
|
|
|
|
httpError(w, http.StatusInternalServerError, "couldn't write body: %s", err)
|
|
|
|
}
|
2019-08-21 04:39:37 -07:00
|
|
|
}
|
|
|
|
|
2019-09-04 04:12:00 -07:00
|
|
|
type filterURLJSON struct {
|
2019-11-06 05:56:29 -07:00
|
|
|
Name string `json:"name"`
|
2019-09-04 04:12:00 -07:00
|
|
|
URL string `json:"url"`
|
|
|
|
Enabled bool `json:"enabled"`
|
|
|
|
}
|
2019-08-21 04:39:37 -07:00
|
|
|
|
2019-11-06 05:56:29 -07:00
|
|
|
type filterURLReq struct {
|
2020-02-26 09:58:25 -07:00
|
|
|
URL string `json:"url"`
|
|
|
|
Whitelist bool `json:"whitelist"`
|
|
|
|
Data filterURLJSON `json:"data"`
|
2019-11-06 05:56:29 -07:00
|
|
|
}
|
|
|
|
|
2020-03-17 05:00:40 -07:00
|
|
|
func (f *Filtering) handleFilteringSetURL(w http.ResponseWriter, r *http.Request) {
|
2019-11-06 05:56:29 -07:00
|
|
|
fj := filterURLReq{}
|
2019-09-04 04:12:00 -07:00
|
|
|
err := json.NewDecoder(r.Body).Decode(&fj)
|
|
|
|
if err != nil {
|
|
|
|
httpError(w, http.StatusBadRequest, "json decode: %s", err)
|
2019-08-21 04:39:37 -07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-08-03 04:09:47 -07:00
|
|
|
if !isValidURL(fj.Data.URL) {
|
2020-03-05 04:37:43 -07:00
|
|
|
http.Error(w, "invalid URL or file path", http.StatusBadRequest)
|
2019-08-21 04:39:37 -07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-03-17 05:00:40 -07:00
|
|
|
filt := filter{
|
2019-11-06 05:56:29 -07:00
|
|
|
Enabled: fj.Data.Enabled,
|
|
|
|
Name: fj.Data.Name,
|
|
|
|
URL: fj.Data.URL,
|
|
|
|
}
|
2020-03-17 05:00:40 -07:00
|
|
|
status := f.filterSetProperties(fj.URL, filt, fj.Whitelist)
|
2019-11-06 05:56:29 -07:00
|
|
|
if (status & statusFound) == 0 {
|
2019-09-04 04:12:00 -07:00
|
|
|
http.Error(w, "URL doesn't exist", http.StatusBadRequest)
|
2019-08-21 04:39:37 -07:00
|
|
|
return
|
|
|
|
}
|
2019-11-06 05:56:29 -07:00
|
|
|
if (status & statusURLExists) != 0 {
|
|
|
|
http.Error(w, "URL already exists", http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
2019-08-21 04:39:37 -07:00
|
|
|
|
2019-10-09 09:51:26 -07:00
|
|
|
onConfigModified()
|
2020-03-11 09:17:46 -07:00
|
|
|
restart := false
|
|
|
|
if (status & statusEnabledChanged) != 0 {
|
|
|
|
// we must add or remove filter rules
|
|
|
|
restart = true
|
|
|
|
}
|
|
|
|
if (status&statusUpdateRequired) != 0 && fj.Data.Enabled {
|
2020-02-28 02:40:16 -07:00
|
|
|
// download new filter and apply its rules
|
2020-12-07 06:04:53 -07:00
|
|
|
flags := filterRefreshBlocklists
|
2020-03-11 09:17:46 -07:00
|
|
|
if fj.Whitelist {
|
2020-12-07 06:04:53 -07:00
|
|
|
flags = filterRefreshAllowlists
|
2020-03-11 09:17:46 -07:00
|
|
|
}
|
2020-03-17 05:00:40 -07:00
|
|
|
nUpdated, _ := f.refreshFilters(flags, true)
|
2020-03-11 09:17:46 -07:00
|
|
|
// if at least 1 filter has been updated, refreshFilters() restarts the filtering automatically
|
|
|
|
// if not - we restart the filtering ourselves
|
|
|
|
restart = false
|
|
|
|
if nUpdated == 0 {
|
|
|
|
restart = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if restart {
|
|
|
|
enableFilters(true)
|
2019-11-06 05:56:29 -07:00
|
|
|
}
|
2019-08-21 04:39:37 -07:00
|
|
|
}
|
|
|
|
|
2020-03-17 05:00:40 -07:00
|
|
|
func (f *Filtering) handleFilteringSetRules(w http.ResponseWriter, r *http.Request) {
|
2020-11-23 04:14:08 -07:00
|
|
|
// This use of ReadAll is safe, because request's body is now limited.
|
2019-09-04 04:12:00 -07:00
|
|
|
body, err := ioutil.ReadAll(r.Body)
|
2019-08-21 04:39:37 -07:00
|
|
|
if err != nil {
|
2019-09-04 04:12:00 -07:00
|
|
|
httpError(w, http.StatusBadRequest, "Failed to read request body: %s", err)
|
2019-08-21 04:39:37 -07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-09-04 04:12:00 -07:00
|
|
|
config.UserRules = strings.Split(string(body), "\n")
|
2019-10-30 01:52:58 -07:00
|
|
|
onConfigModified()
|
2019-10-09 09:51:26 -07:00
|
|
|
enableFilters(true)
|
2019-09-04 04:12:00 -07:00
|
|
|
}
|
2019-08-21 04:39:37 -07:00
|
|
|
|
2020-03-17 05:00:40 -07:00
|
|
|
func (f *Filtering) handleFilteringRefresh(w http.ResponseWriter, r *http.Request) {
|
2020-02-28 02:40:16 -07:00
|
|
|
type Req struct {
|
|
|
|
White bool `json:"whitelist"`
|
|
|
|
}
|
2020-02-26 09:58:25 -07:00
|
|
|
type Resp struct {
|
|
|
|
Updated int `json:"updated"`
|
|
|
|
}
|
|
|
|
resp := Resp{}
|
|
|
|
var err error
|
|
|
|
|
2020-02-28 02:40:16 -07:00
|
|
|
req := Req{}
|
|
|
|
err = json.NewDecoder(r.Body).Decode(&req)
|
|
|
|
if err != nil {
|
|
|
|
httpError(w, http.StatusBadRequest, "json decode: %s", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-02-13 08:42:07 -07:00
|
|
|
Context.controlLock.Unlock()
|
2020-12-07 06:04:53 -07:00
|
|
|
flags := filterRefreshBlocklists
|
2020-03-11 09:17:46 -07:00
|
|
|
if req.White {
|
2020-12-07 06:04:53 -07:00
|
|
|
flags = filterRefreshAllowlists
|
2020-03-11 09:17:46 -07:00
|
|
|
}
|
2020-12-07 06:04:53 -07:00
|
|
|
resp.Updated, err = f.refreshFilters(flags|filterRefreshForce, false)
|
2020-02-13 08:42:07 -07:00
|
|
|
Context.controlLock.Lock()
|
2019-10-10 07:12:32 -07:00
|
|
|
if err != nil {
|
|
|
|
httpError(w, http.StatusInternalServerError, "%s", err)
|
|
|
|
return
|
|
|
|
}
|
2020-02-26 09:58:25 -07:00
|
|
|
|
|
|
|
js, err := json.Marshal(resp)
|
|
|
|
if err != nil {
|
|
|
|
httpError(w, http.StatusInternalServerError, "json encode: %s", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
_, _ = w.Write(js)
|
2019-09-04 04:12:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
type filterJSON struct {
|
|
|
|
ID int64 `json:"id"`
|
|
|
|
Enabled bool `json:"enabled"`
|
|
|
|
URL string `json:"url"`
|
|
|
|
Name string `json:"name"`
|
|
|
|
RulesCount uint32 `json:"rules_count"`
|
|
|
|
LastUpdated string `json:"last_updated"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type filteringConfig struct {
|
2020-02-26 09:58:25 -07:00
|
|
|
Enabled bool `json:"enabled"`
|
|
|
|
Interval uint32 `json:"interval"` // in hours
|
|
|
|
Filters []filterJSON `json:"filters"`
|
|
|
|
WhitelistFilters []filterJSON `json:"whitelist_filters"`
|
|
|
|
UserRules []string `json:"user_rules"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func filterToJSON(f filter) filterJSON {
|
|
|
|
fj := filterJSON{
|
|
|
|
ID: f.ID,
|
|
|
|
Enabled: f.Enabled,
|
|
|
|
URL: f.URL,
|
|
|
|
Name: f.Name,
|
|
|
|
RulesCount: uint32(f.RulesCount),
|
|
|
|
}
|
|
|
|
|
|
|
|
if !f.LastUpdated.IsZero() {
|
|
|
|
fj.LastUpdated = f.LastUpdated.Format(time.RFC3339)
|
|
|
|
}
|
|
|
|
|
|
|
|
return fj
|
2019-09-04 04:12:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// Get filtering configuration
|
2020-03-17 05:00:40 -07:00
|
|
|
func (f *Filtering) handleFilteringStatus(w http.ResponseWriter, r *http.Request) {
|
2019-09-04 04:12:00 -07:00
|
|
|
resp := filteringConfig{}
|
|
|
|
config.RLock()
|
|
|
|
resp.Enabled = config.DNS.FilteringEnabled
|
|
|
|
resp.Interval = config.DNS.FiltersUpdateIntervalHours
|
|
|
|
for _, f := range config.Filters {
|
2020-02-26 09:58:25 -07:00
|
|
|
fj := filterToJSON(f)
|
2019-09-04 04:12:00 -07:00
|
|
|
resp.Filters = append(resp.Filters, fj)
|
2019-08-21 04:39:37 -07:00
|
|
|
}
|
2020-02-26 09:58:25 -07:00
|
|
|
for _, f := range config.WhitelistFilters {
|
|
|
|
fj := filterToJSON(f)
|
|
|
|
resp.WhitelistFilters = append(resp.WhitelistFilters, fj)
|
|
|
|
}
|
2019-09-04 04:12:00 -07:00
|
|
|
resp.UserRules = config.UserRules
|
|
|
|
config.RUnlock()
|
2019-08-21 04:39:37 -07:00
|
|
|
|
2019-09-04 04:12:00 -07:00
|
|
|
jsonVal, err := json.Marshal(resp)
|
|
|
|
if err != nil {
|
|
|
|
httpError(w, http.StatusInternalServerError, "json encode: %s", err)
|
2019-08-21 04:39:37 -07:00
|
|
|
return
|
|
|
|
}
|
2019-09-04 04:12:00 -07:00
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
_, err = w.Write(jsonVal)
|
|
|
|
if err != nil {
|
|
|
|
httpError(w, http.StatusInternalServerError, "http write: %s", err)
|
|
|
|
}
|
2019-08-21 04:39:37 -07:00
|
|
|
}
|
|
|
|
|
2019-09-04 04:12:00 -07:00
|
|
|
// Set filtering configuration
|
2020-03-17 05:00:40 -07:00
|
|
|
func (f *Filtering) handleFilteringConfig(w http.ResponseWriter, r *http.Request) {
|
2019-09-04 04:12:00 -07:00
|
|
|
req := filteringConfig{}
|
|
|
|
err := json.NewDecoder(r.Body).Decode(&req)
|
2019-08-21 04:39:37 -07:00
|
|
|
if err != nil {
|
2019-09-04 04:12:00 -07:00
|
|
|
httpError(w, http.StatusBadRequest, "json decode: %s", err)
|
2019-08-21 04:39:37 -07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-09-04 04:12:00 -07:00
|
|
|
if !checkFiltersUpdateIntervalHours(req.Interval) {
|
|
|
|
httpError(w, http.StatusBadRequest, "Unsupported interval")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
config.DNS.FilteringEnabled = req.Enabled
|
|
|
|
config.DNS.FiltersUpdateIntervalHours = req.Interval
|
2019-10-09 09:51:26 -07:00
|
|
|
onConfigModified()
|
|
|
|
enableFilters(true)
|
2019-08-21 04:39:37 -07:00
|
|
|
}
|
|
|
|
|
2020-12-17 03:32:46 -07:00
|
|
|
type checkHostRespRule struct {
|
|
|
|
FilterListID int64 `json:"filter_list_id"`
|
|
|
|
Text string `json:"text"`
|
|
|
|
}
|
|
|
|
|
2019-11-06 05:13:31 -07:00
|
|
|
type checkHostResp struct {
|
2020-12-17 03:32:46 -07:00
|
|
|
Reason string `json:"reason"`
|
|
|
|
|
|
|
|
// FilterID is the ID of the rule's filter list.
|
|
|
|
//
|
|
|
|
// Deprecated: Use Rules[*].FilterListID.
|
|
|
|
FilterID int64 `json:"filter_id"`
|
|
|
|
|
2020-12-21 07:48:07 -07:00
|
|
|
// Rule is the text of the matched rule.
|
|
|
|
//
|
|
|
|
// Deprecated: Use Rules[*].Text.
|
2020-12-17 03:32:46 -07:00
|
|
|
Rule string `json:"rule"`
|
|
|
|
|
|
|
|
Rules []*checkHostRespRule `json:"rules"`
|
2019-11-06 05:13:31 -07:00
|
|
|
|
|
|
|
// for FilteredBlockedService:
|
|
|
|
SvcName string `json:"service_name"`
|
|
|
|
|
2020-12-29 09:53:56 -07:00
|
|
|
// for Rewrite:
|
2019-11-06 05:13:31 -07:00
|
|
|
CanonName string `json:"cname"` // CNAME value
|
|
|
|
IPList []net.IP `json:"ip_addrs"` // list of IP addresses
|
|
|
|
}
|
|
|
|
|
2020-03-17 05:00:40 -07:00
|
|
|
func (f *Filtering) handleCheckHost(w http.ResponseWriter, r *http.Request) {
|
2019-11-06 05:13:31 -07:00
|
|
|
q := r.URL.Query()
|
|
|
|
host := q.Get("name")
|
|
|
|
|
|
|
|
setts := Context.dnsFilter.GetConfig()
|
|
|
|
setts.FilteringEnabled = true
|
2020-02-18 10:17:35 -07:00
|
|
|
Context.dnsFilter.ApplyBlockedServices(&setts, nil, true)
|
2019-11-06 05:13:31 -07:00
|
|
|
result, err := Context.dnsFilter.CheckHost(host, dns.TypeA, &setts)
|
|
|
|
if err != nil {
|
|
|
|
httpError(w, http.StatusInternalServerError, "couldn't apply filtering: %s: %s", host, err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp := checkHostResp{}
|
|
|
|
resp.Reason = result.Reason.String()
|
|
|
|
resp.SvcName = result.ServiceName
|
|
|
|
resp.CanonName = result.CanonName
|
|
|
|
resp.IPList = result.IPList
|
2020-12-17 03:32:46 -07:00
|
|
|
|
2020-12-21 07:48:07 -07:00
|
|
|
if len(result.Rules) > 0 {
|
|
|
|
resp.FilterID = result.Rules[0].FilterListID
|
|
|
|
resp.Rule = result.Rules[0].Text
|
|
|
|
}
|
|
|
|
|
2020-12-17 03:32:46 -07:00
|
|
|
resp.Rules = make([]*checkHostRespRule, len(result.Rules))
|
|
|
|
for i, r := range result.Rules {
|
|
|
|
resp.Rules[i] = &checkHostRespRule{
|
|
|
|
FilterListID: r.FilterListID,
|
|
|
|
Text: r.Text,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-06 05:13:31 -07:00
|
|
|
js, err := json.Marshal(resp)
|
|
|
|
if err != nil {
|
|
|
|
httpError(w, http.StatusInternalServerError, "json encode: %s", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
_, _ = w.Write(js)
|
|
|
|
}
|
|
|
|
|
2019-09-04 04:12:00 -07:00
|
|
|
// RegisterFilteringHandlers - register handlers
|
2020-03-17 05:00:40 -07:00
|
|
|
func (f *Filtering) RegisterFilteringHandlers() {
|
|
|
|
httpRegister("GET", "/control/filtering/status", f.handleFilteringStatus)
|
|
|
|
httpRegister("POST", "/control/filtering/config", f.handleFilteringConfig)
|
|
|
|
httpRegister("POST", "/control/filtering/add_url", f.handleFilteringAddURL)
|
|
|
|
httpRegister("POST", "/control/filtering/remove_url", f.handleFilteringRemoveURL)
|
|
|
|
httpRegister("POST", "/control/filtering/set_url", f.handleFilteringSetURL)
|
|
|
|
httpRegister("POST", "/control/filtering/refresh", f.handleFilteringRefresh)
|
|
|
|
httpRegister("POST", "/control/filtering/set_rules", f.handleFilteringSetRules)
|
|
|
|
httpRegister("GET", "/control/filtering/check_host", f.handleCheckHost)
|
2019-09-04 04:12:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
func checkFiltersUpdateIntervalHours(i uint32) bool {
|
|
|
|
return i == 0 || i == 1 || i == 12 || i == 1*24 || i == 3*24 || i == 7*24
|
2019-08-21 04:39:37 -07:00
|
|
|
}
|