2014-06-11 16:40:54 -07:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2014-10-13 02:18:13 -07:00
|
|
|
"crypto/tls"
|
2014-06-11 16:40:54 -07:00
|
|
|
"encoding/json"
|
|
|
|
"flag"
|
|
|
|
"fmt"
|
2014-06-28 00:46:03 -07:00
|
|
|
"html/template"
|
2014-06-11 16:40:54 -07:00
|
|
|
"io"
|
2014-06-28 00:46:03 -07:00
|
|
|
"io/ioutil"
|
2014-06-11 16:40:54 -07:00
|
|
|
"log"
|
|
|
|
"net/http"
|
|
|
|
"os"
|
|
|
|
"path"
|
|
|
|
"path/filepath"
|
2014-06-28 00:46:03 -07:00
|
|
|
"regexp"
|
|
|
|
"strings"
|
2014-06-11 16:40:54 -07:00
|
|
|
"sync"
|
|
|
|
"time"
|
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
|
|
|
keyFile = flag.String("key", "", "Key file")
|
|
|
|
certFile = flag.String("cert", "", "Certificate file")
|
|
|
|
dbDir = flag.String("db", "", "Database directory")
|
|
|
|
port = flag.Int("port", 8443, "Listen port")
|
2014-06-28 00:46:03 -07:00
|
|
|
tpl *template.Template
|
2014-06-11 16:40:54 -07:00
|
|
|
)
|
|
|
|
|
2014-06-28 00:46:03 -07:00
|
|
|
var funcs = map[string]interface{}{
|
|
|
|
"commatize": commatize,
|
2014-06-28 02:24:25 -07:00
|
|
|
"number": number,
|
2014-06-28 00:46:03 -07:00
|
|
|
}
|
|
|
|
|
2014-06-11 16:40:54 -07:00
|
|
|
func main() {
|
|
|
|
flag.Parse()
|
|
|
|
log.SetFlags(log.Ldate | log.Ltime | log.Lshortfile)
|
|
|
|
|
2014-06-28 00:46:03 -07:00
|
|
|
fd, err := os.Open("static/index.html")
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
bs, err := ioutil.ReadAll(fd)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
fd.Close()
|
|
|
|
tpl = template.Must(template.New("index.html").Funcs(funcs).Parse(string(bs)))
|
|
|
|
|
|
|
|
http.HandleFunc("/", rootHandler)
|
2014-06-11 16:40:54 -07:00
|
|
|
http.HandleFunc("/newdata", newDataHandler)
|
|
|
|
http.HandleFunc("/report", reportHandler)
|
2014-06-28 00:46:03 -07:00
|
|
|
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
|
2014-06-11 16:40:54 -07:00
|
|
|
|
2014-10-13 02:18:13 -07:00
|
|
|
cert, err := tls.LoadX509KeyPair(*certFile, *keyFile)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
cfg := &tls.Config{
|
|
|
|
Certificates: []tls.Certificate{cert},
|
|
|
|
SessionTicketsDisabled: true,
|
|
|
|
}
|
|
|
|
|
|
|
|
listener, err := tls.Listen("tcp", fmt.Sprintf(":%d", *port), cfg)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
srv := http.Server{
|
|
|
|
ReadTimeout: 5 * time.Second,
|
|
|
|
WriteTimeout: 5 * time.Second,
|
|
|
|
}
|
|
|
|
|
|
|
|
err = srv.Serve(listener)
|
2014-06-11 16:40:54 -07:00
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-06-28 00:46:03 -07:00
|
|
|
func rootHandler(w http.ResponseWriter, r *http.Request) {
|
|
|
|
if r.URL.Path == "/" || r.URL.Path == "/index.html" {
|
|
|
|
k := timestamp()
|
|
|
|
rep := getReport(k)
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
|
|
|
err := tpl.Execute(w, rep)
|
|
|
|
if err != nil {
|
|
|
|
log.Println(err)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
http.Error(w, "Not found", 404)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func reportHandler(w http.ResponseWriter, r *http.Request) {
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
k := timestamp()
|
|
|
|
rep := getReport(k)
|
|
|
|
json.NewEncoder(w).Encode(rep)
|
|
|
|
}
|
|
|
|
|
2014-06-11 16:40:54 -07:00
|
|
|
func newDataHandler(w http.ResponseWriter, r *http.Request) {
|
|
|
|
today := time.Now().Format("20060102")
|
|
|
|
dir := filepath.Join(*dbDir, today)
|
|
|
|
ensureDir(dir, 0700)
|
|
|
|
|
|
|
|
var m map[string]interface{}
|
|
|
|
lr := &io.LimitedReader{R: r.Body, N: 10240}
|
|
|
|
err := json.NewDecoder(lr).Decode(&m)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
log.Println(err)
|
|
|
|
http.Error(w, err.Error(), 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
id, ok := m["uniqueID"]
|
|
|
|
if ok {
|
|
|
|
idStr, ok := id.(string)
|
|
|
|
if !ok {
|
|
|
|
if err != nil {
|
|
|
|
log.Println("No ID")
|
|
|
|
http.Error(w, "No ID", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-12-02 06:26:32 -07:00
|
|
|
// The ID is base64 encoded, so can contain slashes. Replace those with dots instead.
|
|
|
|
idStr = strings.Replace(idStr, "/", ".", -1)
|
|
|
|
|
2014-06-11 16:40:54 -07:00
|
|
|
f, err := os.Create(path.Join(dir, idStr+".json"))
|
|
|
|
if err != nil {
|
|
|
|
log.Println(err)
|
|
|
|
http.Error(w, err.Error(), 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
json.NewEncoder(f).Encode(m)
|
|
|
|
f.Close()
|
|
|
|
} else {
|
|
|
|
log.Println("No ID")
|
|
|
|
http.Error(w, "No ID", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type report struct {
|
|
|
|
UniqueID string
|
|
|
|
Version string
|
|
|
|
Platform string
|
|
|
|
NumRepos int
|
|
|
|
NumNodes int
|
|
|
|
TotFiles int
|
|
|
|
RepoMaxFiles int
|
|
|
|
TotMiB int
|
|
|
|
RepoMaxMiB int
|
|
|
|
MemoryUsageMiB int
|
|
|
|
SHA256Perf float64
|
2014-06-12 11:52:49 -07:00
|
|
|
MemorySize int
|
2014-06-11 16:40:54 -07:00
|
|
|
}
|
|
|
|
|
2014-06-16 02:14:01 -07:00
|
|
|
func fileList() ([]string, error) {
|
|
|
|
files := make(map[string]string)
|
|
|
|
t0 := time.Now().Add(-24 * time.Hour).Format("20060102")
|
|
|
|
t1 := time.Now().Format("20060102")
|
|
|
|
|
|
|
|
dir := filepath.Join(*dbDir, t0)
|
|
|
|
gr, err := filepath.Glob(filepath.Join(dir, "*.json"))
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
for _, f := range gr {
|
|
|
|
bn := filepath.Base(f)
|
|
|
|
files[bn] = f
|
|
|
|
}
|
|
|
|
|
|
|
|
dir = filepath.Join(*dbDir, t1)
|
|
|
|
gr, err = filepath.Glob(filepath.Join(dir, "*.json"))
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
for _, f := range gr {
|
|
|
|
bn := filepath.Base(f)
|
|
|
|
files[bn] = f
|
|
|
|
}
|
|
|
|
|
|
|
|
l := make([]string, 0, len(files))
|
|
|
|
for _, f := range files {
|
2014-06-28 02:24:25 -07:00
|
|
|
si, err := os.Stat(f)
|
|
|
|
if err != nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if time.Since(si.ModTime()) < 24*time.Hour {
|
|
|
|
l = append(l, f)
|
|
|
|
}
|
2014-06-16 02:14:01 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
return l, nil
|
|
|
|
}
|
|
|
|
|
2014-06-28 02:24:25 -07:00
|
|
|
type category struct {
|
|
|
|
Values [4]float64
|
|
|
|
Key string
|
|
|
|
Descr string
|
|
|
|
Unit string
|
|
|
|
Binary bool
|
|
|
|
}
|
|
|
|
|
2014-06-28 00:46:03 -07:00
|
|
|
var reportCache map[string]interface{}
|
|
|
|
var reportMutex sync.Mutex
|
|
|
|
|
|
|
|
func getReport(key string) map[string]interface{} {
|
|
|
|
reportMutex.Lock()
|
|
|
|
defer reportMutex.Unlock()
|
|
|
|
|
|
|
|
if k := reportCache["key"]; k == key {
|
|
|
|
return reportCache
|
|
|
|
}
|
2014-06-11 16:40:54 -07:00
|
|
|
|
2014-06-28 00:46:03 -07:00
|
|
|
files, err := fileList()
|
|
|
|
if err != nil {
|
|
|
|
return nil
|
|
|
|
}
|
2014-06-11 16:40:54 -07:00
|
|
|
|
2014-06-28 00:46:03 -07:00
|
|
|
nodes := 0
|
|
|
|
var versions []string
|
|
|
|
var platforms []string
|
|
|
|
var oses []string
|
|
|
|
var numRepos []int
|
|
|
|
var numNodes []int
|
|
|
|
var totFiles []int
|
|
|
|
var maxFiles []int
|
|
|
|
var totMiB []int
|
|
|
|
var maxMiB []int
|
|
|
|
var memoryUsage []int
|
|
|
|
var sha256Perf []float64
|
|
|
|
var memorySize []int
|
2014-06-11 16:40:54 -07:00
|
|
|
|
2014-06-28 00:46:03 -07:00
|
|
|
for _, fn := range files {
|
|
|
|
f, err := os.Open(fn)
|
2014-06-11 16:40:54 -07:00
|
|
|
if err != nil {
|
2014-06-28 00:46:03 -07:00
|
|
|
continue
|
2014-06-11 16:40:54 -07:00
|
|
|
}
|
|
|
|
|
2014-06-28 00:46:03 -07:00
|
|
|
var rep report
|
|
|
|
err = json.NewDecoder(f).Decode(&rep)
|
|
|
|
if err != nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
f.Close()
|
2014-06-11 16:40:54 -07:00
|
|
|
|
2014-06-28 00:46:03 -07:00
|
|
|
nodes++
|
|
|
|
versions = append(versions, transformVersion(rep.Version))
|
|
|
|
platforms = append(platforms, rep.Platform)
|
|
|
|
ps := strings.Split(rep.Platform, "-")
|
|
|
|
oses = append(oses, ps[0])
|
|
|
|
if rep.NumRepos > 0 {
|
|
|
|
numRepos = append(numRepos, rep.NumRepos)
|
|
|
|
}
|
|
|
|
if rep.NumNodes > 0 {
|
|
|
|
numNodes = append(numNodes, rep.NumNodes)
|
|
|
|
}
|
|
|
|
if rep.TotFiles > 0 {
|
|
|
|
totFiles = append(totFiles, rep.TotFiles)
|
|
|
|
}
|
|
|
|
if rep.RepoMaxFiles > 0 {
|
|
|
|
maxFiles = append(maxFiles, rep.RepoMaxFiles)
|
|
|
|
}
|
|
|
|
if rep.TotMiB > 0 {
|
2014-06-28 02:24:25 -07:00
|
|
|
totMiB = append(totMiB, rep.TotMiB*(1<<20))
|
2014-06-28 00:46:03 -07:00
|
|
|
}
|
|
|
|
if rep.RepoMaxMiB > 0 {
|
2014-06-28 02:24:25 -07:00
|
|
|
maxMiB = append(maxMiB, rep.RepoMaxMiB*(1<<20))
|
2014-06-11 16:40:54 -07:00
|
|
|
}
|
2014-06-28 00:46:03 -07:00
|
|
|
if rep.MemoryUsageMiB > 0 {
|
2014-06-28 02:24:25 -07:00
|
|
|
memoryUsage = append(memoryUsage, rep.MemoryUsageMiB*(1<<20))
|
2014-06-28 00:46:03 -07:00
|
|
|
}
|
|
|
|
if rep.SHA256Perf > 0 {
|
2014-06-28 02:24:25 -07:00
|
|
|
sha256Perf = append(sha256Perf, rep.SHA256Perf*(1<<20))
|
2014-06-28 00:46:03 -07:00
|
|
|
}
|
|
|
|
if rep.MemorySize > 0 {
|
2014-06-28 02:24:25 -07:00
|
|
|
memorySize = append(memorySize, rep.MemorySize*(1<<20))
|
2014-06-28 00:46:03 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-06-28 02:24:25 -07:00
|
|
|
var categories []category
|
|
|
|
categories = append(categories, category{
|
|
|
|
Values: statsForInts(totFiles),
|
|
|
|
Descr: "Files Managed per Node",
|
|
|
|
})
|
|
|
|
|
|
|
|
categories = append(categories, category{
|
|
|
|
Values: statsForInts(maxFiles),
|
|
|
|
Descr: "Files in Largest Repo",
|
|
|
|
})
|
|
|
|
|
|
|
|
categories = append(categories, category{
|
|
|
|
Values: statsForInts(totMiB),
|
|
|
|
Descr: "Data Managed per Node",
|
|
|
|
Unit: "B",
|
|
|
|
Binary: true,
|
|
|
|
})
|
|
|
|
|
|
|
|
categories = append(categories, category{
|
|
|
|
Values: statsForInts(maxMiB),
|
|
|
|
Descr: "Data in Largest Repo",
|
|
|
|
Unit: "B",
|
|
|
|
Binary: true,
|
|
|
|
})
|
|
|
|
|
|
|
|
categories = append(categories, category{
|
|
|
|
Values: statsForInts(numNodes),
|
|
|
|
Descr: "Number of Nodes in Cluster",
|
|
|
|
})
|
|
|
|
|
|
|
|
categories = append(categories, category{
|
|
|
|
Values: statsForInts(numRepos),
|
|
|
|
Descr: "Number of Repositories Configured",
|
|
|
|
})
|
|
|
|
|
|
|
|
categories = append(categories, category{
|
|
|
|
Values: statsForInts(memoryUsage),
|
|
|
|
Descr: "Memory Usage",
|
|
|
|
Unit: "B",
|
|
|
|
Binary: true,
|
|
|
|
})
|
|
|
|
|
|
|
|
categories = append(categories, category{
|
|
|
|
Values: statsForInts(memorySize),
|
|
|
|
Descr: "System Memory",
|
|
|
|
Unit: "B",
|
|
|
|
Binary: true,
|
|
|
|
})
|
|
|
|
|
|
|
|
categories = append(categories, category{
|
|
|
|
Values: statsForFloats(sha256Perf),
|
|
|
|
Descr: "SHA-256 Hashing Performance",
|
|
|
|
Unit: "B/s",
|
|
|
|
Binary: true,
|
|
|
|
})
|
|
|
|
|
2014-06-28 00:46:03 -07:00
|
|
|
r := make(map[string]interface{})
|
|
|
|
r["key"] = key
|
|
|
|
r["nodes"] = nodes
|
|
|
|
r["categories"] = categories
|
|
|
|
r["versions"] = analyticsFor(versions)
|
|
|
|
r["platforms"] = analyticsFor(platforms)
|
|
|
|
r["os"] = analyticsFor(oses)
|
|
|
|
|
|
|
|
reportCache = r
|
2014-06-11 16:40:54 -07:00
|
|
|
|
2014-06-28 00:46:03 -07:00
|
|
|
return r
|
|
|
|
}
|
|
|
|
|
2014-06-11 16:40:54 -07:00
|
|
|
func ensureDir(dir string, mode int) {
|
|
|
|
fi, err := os.Stat(dir)
|
|
|
|
if os.IsNotExist(err) {
|
|
|
|
os.MkdirAll(dir, 0700)
|
|
|
|
} else if mode >= 0 && err == nil && int(fi.Mode()&0777) != mode {
|
|
|
|
os.Chmod(dir, os.FileMode(mode))
|
|
|
|
}
|
|
|
|
}
|
2014-06-28 00:46:03 -07:00
|
|
|
|
2014-08-07 05:55:13 -07:00
|
|
|
var vRe = regexp.MustCompile(`^(v\d+\.\d+\.\d+(?:-[a-z]\w+)?)[+\.-]`)
|
2014-06-28 00:46:03 -07:00
|
|
|
|
|
|
|
// transformVersion returns a version number formatted correctly, with all
|
|
|
|
// development versions aggregated into one.
|
|
|
|
func transformVersion(v string) string {
|
2014-08-07 05:55:13 -07:00
|
|
|
if v == "unknown-dev" {
|
|
|
|
return v
|
|
|
|
}
|
2014-06-28 00:46:03 -07:00
|
|
|
if !strings.HasPrefix(v, "v") {
|
|
|
|
v = "v" + v
|
|
|
|
}
|
|
|
|
if m := vRe.FindStringSubmatch(v); len(m) > 0 {
|
|
|
|
return m[1] + " (+dev)"
|
|
|
|
}
|
|
|
|
return v
|
|
|
|
}
|
|
|
|
|
|
|
|
// timestamp returns a time stamp for the current hour, to be used as a cache key
|
|
|
|
func timestamp() string {
|
|
|
|
return time.Now().Format("20060102T15")
|
|
|
|
}
|