diff --git a/.gitignore b/.gitignore index 91abe37..556eb12 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,7 @@ artifacts *.dll *.so *.dylib +coverage # Test binary, built with `go test -c` *.test diff --git a/Makefile b/Makefile index 98771f9..6054b94 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,7 @@ # Inspired from https://dustinspecker.com/posts/go-combined-unit-integration-code-coverage/ and https://netdevops.me/2023/test-coverage-for-go-integration-tests/ BIN_DIR = $(CURDIR)/artifacts BINARY = $(BIN_DIR)/gocustomurls +COVERAGE_DIR = $(CURDIR)/coverage CURRENT_DIR = $(shell pwd) CUR_TAG = $(shell git tag | sort -g | tail -1 | cut -c 2-) VERSION_NUMBER ?= 0.0.0 @@ -29,4 +30,30 @@ lint: .PHONY: build build: - go build -o $(BINARY) \ No newline at end of file + go build -o $(BINARY) + +.PHONY: build-debug +build-debug: + mkdir -p $(BIN_DIR) + go build -cover -o $(BINARY) . + +.PHONY: test +test: build-debug + rm -rf $(COVERAGE_DIR) + mkdir -p $(COVERAGE_DIR) + go test -cover ./... -args -test.gocoverdir="$(COVERAGE_DIR)" + +.PHONY: coverage-full +coverage-full: test + go tool covdata textfmt -i=$(COVERAGE_DIR) -o $(COVERAGE_DIR)/coverage.out + go tool cover -func=$(COVERAGE_DIR)/coverage.out + +.PHONY: coverage-integration +coverage-integration: + go test ./... -run Integration -covermode=count -coverprofile=$(COVERAGE_DIR)/integration.out + go tool cover -func=$(COVERAGE_DIR)/integration.out + +.PHONY: coverage-html +coverage-html: coverage-full + go tool cover -html=./coverage/coverage.out -o ./coverage/coverage.html + # open ./coverage/coverage.html \ No newline at end of file diff --git a/README.md b/README.md index c35d74a..6f38c81 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ You can test with ```sh $ http --body "https://{domain.name}/{package}?go-get=1" -...truncated output +...Truncated output ``` (b) With the go-get command @@ -41,7 +41,7 @@ Initial ```json ~/.gocustomurls/logs/app.log { size 20M - copytruncate + copyTruncate compress notifempty delaycompress diff --git a/app.go b/app.go index 714876d..9913c2d 100644 --- a/app.go +++ b/app.go @@ -12,7 +12,7 @@ type Application struct { Log *LogFile } -func (app *Application) routes() { +func (app *Application) Routes() { m := http.NewServeMux() m.HandleFunc("/healthcheck", healthcheck) @@ -23,7 +23,7 @@ func (app *Application) routes() { } func (app *Application) Setup(port string) *http.Server { - app.routes() + app.Routes() return &http.Server{ Addr: fmt.Sprintf(":%s", port), Handler: app.Mux, diff --git a/bytesize.go b/bytesize.go new file mode 100644 index 0000000..33bcbc1 --- /dev/null +++ b/bytesize.go @@ -0,0 +1,79 @@ +package main + +import ( + "fmt" + "math" + "strconv" + "strings" +) + +var units []string = []string{"KB", "MB", "GB", "B"} + +// ByteSize represents a number of bytes +type ByteSize struct { + HumanRep string + NumberRep int64 +} + +// Byte size size suffixes. +const ( + B int64 = 1 + KB int64 = 1 << (10 * iota) + MB + GB +) + +// Used to convert user input to ByteSize +var unitMap = map[string]int64{ + "B": B, + "KB": KB, + "MB": MB, + "GB": GB, +} + +// Converts string representaiion of a sistring representaiion of sizeze into ByteSize +func (b *ByteSize) ParseFromString(s string) error { + s = strings.TrimSpace(s) + b.HumanRep = s + var fragments []string + unitFound := "" + + for _, unit := range units { + fragments = strings.Split(s, unit) + if len(fragments) == 2 { + unitFound = unit + break + } + } + + if len(unitFound) == 0 { + return fmt.Errorf("unrecognized size suffix") + } + + value, err := strconv.ParseFloat(fragments[0], 64) + if err != nil { + return err + } + + unit, ok := unitMap[strings.ToUpper(unitFound)] + if !ok { + return fmt.Errorf("unrecognized size suffix %s", fragments[1]) + } + + b.NumberRep = int64(value * float64(unit)) + return nil +} + +// Converts a number of bytes into ByteSize +func (b *ByteSize) ParseFromNumber(n int64) { + b.NumberRep = n + bf := float64(n) + for _, unit := range []string{"", "K", "M", "G"} { + if math.Abs(bf) < 1024.0 { + b.HumanRep = fmt.Sprintf("%3.1f%sB", bf, unit) + return + } + bf /= 1024.0 + } + b.HumanRep = fmt.Sprintf("%.1fTB", bf) +} diff --git a/bytesize_test.go b/bytesize_test.go new file mode 100644 index 0000000..c508b08 --- /dev/null +++ b/bytesize_test.go @@ -0,0 +1,46 @@ +package main + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseFromString(t *testing.T) { + tests := map[string]struct { + input1 string + input2 int64 + }{ + "KB": {input1: "5.5KB", input2: 5632}, + "MB": {input1: "6.7MB", input2: 7025459}, + "GB": {input1: "7.5GB", input2: 8053063680}, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + by := ByteSize{} + err := by.ParseFromString(tc.input1) + assert.Equal(t, err, nil) + assert.EqualValues(t, by.NumberRep, tc.input2) + }) + } +} + +func TestParseFromNumber(t *testing.T) { + tests := map[string]struct { + input1 int64 + input2 string + }{ + "KB": {input1: 528870, input2: "516.5KB"}, + "MB": {input1: 7025459, input2: "6.7MB"}, + "GB": {input1: 8053063680, input2: "7.5GB"}, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + by := ByteSize{} + by.ParseFromNumber(tc.input1) + assert.EqualValues(t, by.HumanRep, tc.input2) + }) + } +} diff --git a/conf.go b/conf.go index 65c0eea..181fc34 100644 --- a/conf.go +++ b/conf.go @@ -6,9 +6,10 @@ import ( "os" "path/filepath" "strings" + "sync" ) -type parsedConf struct { +type ParsedConf struct { RulesFp string `json:"rulesPath"` LogFp string `json:"logPath"` Compression bool `json:"compress"` @@ -19,6 +20,7 @@ type parsedConf struct { type Config struct { MappingFilePath string MappingRules ImportRulesMappings + sync.Mutex } type ImportRulesMappings struct { @@ -73,6 +75,7 @@ func (c *Config) LoadMappingFile(fp string) error { return nil } +// generate default locations for all the config files func getDefaults() (map[string]string, error) { m := make(map[string]string) confDir, err := os.UserConfigDir() @@ -85,13 +88,21 @@ func getDefaults() (map[string]string, error) { } m["rulesFp"] = filepath.Join(confDir, "gocustomcurls", "rules.json") m["confFp"] = filepath.Join(confDir, "gocustomcurls", "config.json") - m["logfp"] = filepath.Join(homeDir, ".gocustomurls", "logs", "app.log") + m["logFp"] = filepath.Join(homeDir, ".gocustomurls", "logs", "app.log") return m, nil } -func generateDefaultConfigFile() (parsedConf, error) { - var p parsedConf - defaults, err := getDefaults() +// generate and write to a location, the default values for the config.json file +func generateDefaultConfigFile(defaultObj map[string]string) (ParsedConf, error) { + var p ParsedConf + var err error + var defaults map[string]string + if len(defaultObj) == 0 { + defaults, err = getDefaults() + } else { + defaults = defaultObj + } + if err != nil { return p, err } @@ -127,21 +138,23 @@ func checkIfSizeIsConfigured(fsize string) (bool, error) { } } if len(found) == 0 { - return false, fmt.Errorf("%s has the incorrect suffix, Please use one of this suffixes {\"K\", \"KB\",\"M\", \"MB\", \"G\", \"GB\"}", fsize) + return false, fmt.Errorf("%s has the incorrect suffix, Please use one of this suffixes {\"KB\", \"MB\", \"GB\"}", fsize) } return true, nil } // load the main config file -func (c *Config) LoadMainConfigFile(fp string) (parsedConf, error) { - var conf parsedConf +func (c *Config) LoadMainConfigFile(fp string) (ParsedConf, error) { + var conf ParsedConf var err error + c.Lock() + defer c.Unlock() ok := isFile(fp) if !ok { // generate config file errorLog.Println("Warning, generating default config file") - conf, err = generateDefaultConfigFile() + conf, err = generateDefaultConfigFile(map[string]string{}) if err != nil { return conf, err } diff --git a/conf_test.go b/conf_test.go new file mode 100644 index 0000000..2b78a59 --- /dev/null +++ b/conf_test.go @@ -0,0 +1,154 @@ +package main + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestLoadMappingFile(t *testing.T) { + + t.Run("load the mapping file correctly - initial load", func(t *testing.T) { + tmpDir := t.TempDir() + + mkDirForTest(t, fmt.Sprintf("%s/tmp", tmpDir)) + rulesJsonFp := "testData/rules.json" + + expected := fmt.Sprintf("%s/tmp/rules.json", tmpDir) + + cpFileForTest(t, rulesJsonFp, expected) + + cfg := &Config{} + + assert.Equal(t, cfg.MappingFilePath, "") + assert.Empty(t, cfg.MappingRules) + err := cfg.LoadMappingFile(expected) + assert.Equal(t, err, nil) + assert.Equal(t, cfg.MappingFilePath, expected) + assert.NotEmpty(t, cfg.MappingRules) + }) + + t.Run("load the mapping file correctly - reload", func(t *testing.T) { + tmpDir := t.TempDir() + + mkDirForTest(t, fmt.Sprintf("%s/tmp", tmpDir)) + rulesJsonFp := "testData/rules.json" + + expected := fmt.Sprintf("%s/tmp/rules.json", tmpDir) + + cpFileForTest(t, rulesJsonFp, expected) + + cfg := &Config{} + + assert.Equal(t, cfg.MappingFilePath, "") + assert.Empty(t, cfg.MappingRules) + err := cfg.LoadMappingFile(expected) + assert.Equal(t, err, nil) + assert.Equal(t, cfg.MappingFilePath, expected) + assert.NotEmpty(t, cfg.MappingRules) + oldRules := cfg.MappingRules + + rulesJsonFp = "testData/rules2.json" + + cpFileForTest(t, rulesJsonFp, expected) + + err = cfg.LoadMappingFile(expected) + assert.Equal(t, err, nil) + assert.Equal(t, cfg.MappingFilePath, expected) + assert.NotEmpty(t, cfg.MappingRules) + + newRules := cfg.MappingRules + + assert.NotEqualValues(t, oldRules, newRules) + }) +} + +func TestGetDefaults(t *testing.T) { + actual, err := getDefaults() + assert.Equal(t, err, nil) + assert.Contains(t, actual, "rulesFp") + assert.Contains(t, actual, "logFp") + assert.Contains(t, actual, "confFp") +} + +func TestSizeIsConfigured(t *testing.T) { + tests := map[string]struct { + input string + want bool + }{ + "wrong input - K": {input: "5677.45K", want: false}, + "wrong input - KiB": {input: "5677.45KiB", want: false}, + "wrong input - M": {input: "9.45M", want: false}, + "wrong input - G": {input: "9.45G", want: false}, + "correct input - KB": {input: "5.45KB", want: true}, + "correct input - MB": {input: "5677.45MB", want: true}, + "correct input - GB": {input: "9.45GB", want: true}, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + got, _ := checkIfSizeIsConfigured(tc.input) + assert.Equal(t, got, tc.want) + }) + } +} + +func TestGenerateConfigFile(t *testing.T) { + tmpDir := t.TempDir() + + tempLogsDir := fmt.Sprintf("%s/tmp/logs", tmpDir) + tempConfigDir := fmt.Sprintf("%s/tmp/config", tmpDir) + + mkDirForTest(t, tempLogsDir) + mkDirForTest(t, tempConfigDir) + + defaultConfig := map[string]string{ + "rulesFp": fmt.Sprintf("%s/rules.json", tempConfigDir), + "confFp": fmt.Sprintf("%s/config.json", tempConfigDir), + "logFp": fmt.Sprintf("%s/app.log", tempLogsDir), + } + + ok := IsDirEmpty(t, tempConfigDir) + assert.Equal(t, ok, true) + ok = IsDirEmpty(t, tempLogsDir) + assert.Equal(t, ok, true) + + expected, err := generateDefaultConfigFile(defaultConfig) + assert.Equal(t, err, nil) + assert.NotEmpty(t, expected) + + ok = IsDirEmpty(t, tempConfigDir) + assert.Equal(t, ok, false) + ok = IsDirEmpty(t, tempLogsDir) + assert.Equal(t, ok, true) + +} + +func TestLoadMainConfigFileFromProvidedLocation(t *testing.T) { + tmpDir := t.TempDir() + + tempConfigDir := fmt.Sprintf("%s/tmp", tmpDir) + tempLogsDir := fmt.Sprintf("%s/tmp/logs", tmpDir) + + mkDirForTest(t, tempConfigDir) + + defaultConfigJson := map[string]any{ + "rulesPath": fmt.Sprintf("%s/rules.json", tempConfigDir), + "logPath": fmt.Sprintf("%s/app.log", tempLogsDir), + "port": "9005", + "compress": false, + "sizeToRotate": "50MB", + } + + tempConfigFile := fmt.Sprintf("%s/confg.json", tempConfigDir) + + writeJsonForTest(t, defaultConfigJson, tempConfigFile) + + cfg := &Config{} + assert.Equal(t, cfg.MappingFilePath, "") + conf, err := cfg.LoadMainConfigFile(tempConfigFile) + assert.Equal(t, err, nil) + assert.NotEmpty(t, conf) + assert.NotEmpty(t, cfg.MappingFilePath) +} diff --git a/embed.go b/embed.go index a99889d..a74ede7 100644 --- a/embed.go +++ b/embed.go @@ -10,7 +10,7 @@ import ( //go:embed templates/* var tmpls embed.FS -func GetServeHtml() (*template.Template, error) { +func getServeHtml() (*template.Template, error) { var t *template.Template data, err := tmpls.ReadFile("templates/success.html") if err != nil { @@ -19,7 +19,7 @@ func GetServeHtml() (*template.Template, error) { return template.New("").Parse(string(data)) } -func GetDefaultHtml() ([]byte, error) { +func getDefaultHtml() ([]byte, error) { var data []byte var err error data, err = tmpls.ReadFile("templates/default.html") diff --git a/go.mod b/go.mod index 341eca9..726011e 100644 --- a/go.mod +++ b/go.mod @@ -1,3 +1,14 @@ module gocustomurls go 1.20 + +require ( + github.com/stretchr/testify v1.9.0 + golang.org/x/net v0.26.0 +) + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..0a13cef --- /dev/null +++ b/go.sum @@ -0,0 +1,12 @@ +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= +golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/handlers.go b/handlers.go index 9b097bc..cc4d85c 100644 --- a/handlers.go +++ b/handlers.go @@ -3,6 +3,7 @@ package main import ( "bytes" "fmt" + "net" "net/http" "strings" ) @@ -15,8 +16,7 @@ func reloadRules(c *Config) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { err := c.LoadMappingFile("") if err != nil { - e := fmt.Errorf("annot reload rules: %+v", err) - // errorLog.Printf("Cannot reload rules: %+v", err) + e := fmt.Errorf("cannot reload rules: %+v", err) http.Error(w, e.Error(), http.StatusInternalServerError) return } @@ -26,6 +26,7 @@ func reloadRules(c *Config) http.HandlerFunc { func serveRules(c *Config) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { + var nameOfPkg string if r.Method != http.MethodGet { http.Error(w, http.StatusText(http.StatusMethodNotAllowed), http.StatusInternalServerError) return @@ -33,7 +34,7 @@ func serveRules(c *Config) http.HandlerFunc { // if go-get param is absent, return nothing if r.FormValue("go-get") != "1" { - data, err := GetDefaultHtml() + data, err := getDefaultHtml() if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return @@ -42,25 +43,44 @@ func serveRules(c *Config) http.HandlerFunc { return } - nameOfPkg := r.Host + r.URL.Path + ipFragments := strings.Split(r.Host, ":") + if len(ipFragments) > 1 { + ipAddr := net.ParseIP(ipFragments[0]) + if ipAddr.IsLoopback() { + nameOfPkg = ipAddr.String() + r.URL.Path + } + } else { + nameOfPkg = r.Host + r.URL.Path + } + var found bool var vanityUrl, proto, repoUrl string for _, rule := range c.MappingRules.Mappings { if strings.HasPrefix(strings.ToLower(rule.VanityUrl+"/"), strings.Trim(strings.ToLower(nameOfPkg), " ")) { vanityUrl = rule.VanityUrl repoUrl = rule.RealUrl proto = rule.Protocol - + found = true break } } + if !found { + data, err := getDefaultHtml() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + w.Write(data) + return + } + d := ImportRuleStruct{ VanityUrl: vanityUrl, Proto: proto, RepoUrl: repoUrl, } - tmpl, err := GetServeHtml() + tmpl, err := getServeHtml() if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return diff --git a/handlers_test.go b/handlers_test.go new file mode 100644 index 0000000..03f3070 --- /dev/null +++ b/handlers_test.go @@ -0,0 +1,263 @@ +package main + +import ( + "encoding/json" + "fmt" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "golang.org/x/net/html" +) + +func TestIntegrationReloadRules(t *testing.T) { + if testing.Short() { + t.Skip("skipping reload rules integration test") + } + + c := &Config{} + + tmpDir := t.TempDir() + + tempLogsDir := fmt.Sprintf("%s/tmp/logs", tmpDir) + tempConfigDir := fmt.Sprintf("%s/tmp/config", tmpDir) + + confFile := fmt.Sprintf("%s/config.json", tempConfigDir) + rulesFile := fmt.Sprintf("%s/rules.json", tempConfigDir) + lFp := fmt.Sprintf("%s/app.log", tempLogsDir) + + mkDirForTest(t, tempConfigDir) + mkDirForTest(t, tempLogsDir) + + rulesJsonFp := "testData/rules.json" + cpFileForTest(t, rulesJsonFp, rulesFile) + + p := ParsedConf{ + RulesFp: rulesFile, + LogFp: lFp, + Port: "9050", + Compression: true, + SizeToRotate: "5MB", + } + jsonString, _ := json.Marshal(p) + writeForTest(t, confFile, jsonString) + + _, err := c.LoadMainConfigFile(confFile) + assert.Equal(t, err, nil) + + l, err := newFileLogger(p.LogFp, p.SizeToRotate, p.Compression) + assert.NotEmpty(t, l) + assert.Equal(t, err, nil) + + isEmpty := isFileEmpty(t, lFp) + assert.Equal(t, isEmpty, true) + + app := newTestApp(t, c, l) + app.Routes() + ts := newTestServer(t, app.Mux) + defer ts.Close() + + code, _, body := ts.get(t, "/reloadRules") + assert.Equal(t, code, http.StatusOK) + assert.Equal(t, string(body), "ok") + + isEmpty = isFileEmpty(t, lFp) + assert.Equal(t, isEmpty, true) + + oldRules := app.Config.MappingRules.Mappings + assert.NotEmpty(t, oldRules) + + nRulesJsonFp := "testData/rules2.json" + cpFileForTest(t, nRulesJsonFp, rulesFile) + + code, _, body = ts.get(t, "/reloadRules") + assert.Equal(t, code, http.StatusOK) + assert.Equal(t, string(body), "ok") + + isEmpty = isFileEmpty(t, lFp) + assert.Equal(t, isEmpty, true) + + newRules := app.Config.MappingRules.Mappings + assert.NotEmpty(t, newRules) + + assert.NotEqualValues(t, oldRules, newRules) +} + +func TestIntegrationServeRules(t *testing.T) { + if testing.Short() { + t.Skip("skipping serve rules integration test") + } + + t.Run("return default html if go-get is missing", func(t *testing.T) { + c := &Config{} + + tmpDir := t.TempDir() + + tempLogsDir := fmt.Sprintf("%s/tmp/logs", tmpDir) + tempConfigDir := fmt.Sprintf("%s/tmp/config", tmpDir) + + confFile := fmt.Sprintf("%s/config.json", tempConfigDir) + rulesFile := fmt.Sprintf("%s/rules.json", tempConfigDir) + lFp := fmt.Sprintf("%s/app.log", tempLogsDir) + + mkDirForTest(t, tempConfigDir) + mkDirForTest(t, tempLogsDir) + + rulesJsonFp := "testData/rules2.json" + cpFileForTest(t, rulesJsonFp, rulesFile) + + p := ParsedConf{ + RulesFp: rulesFile, + LogFp: lFp, + Port: "9050", + Compression: true, + SizeToRotate: "5MB", + } + jsonString, _ := json.Marshal(p) + writeForTest(t, confFile, jsonString) + + _, err := c.LoadMainConfigFile(confFile) + assert.Equal(t, err, nil) + + err = c.LoadMappingFile(p.RulesFp) + assert.Equal(t, err, nil) + + l, err := newFileLogger(p.LogFp, p.SizeToRotate, p.Compression) + assert.NotEmpty(t, l) + assert.Equal(t, err, nil) + + isEmpty := isFileEmpty(t, lFp) + assert.Equal(t, isEmpty, true) + + app := newTestApp(t, c, l) + app.Routes() + + ts := newTestServer(t, app.Mux) + defer ts.Close() + + code, _, body := ts.get(t, "/touche") + assert.Equal(t, code, http.StatusOK) + expected, _ := getDefaultHtml() + assert.Equal(t, string(body), string(expected)) + + isEmpty = isFileEmpty(t, lFp) + assert.Equal(t, isEmpty, false) + + }) + + t.Run("return normal html if go-get is included and the package exists", func(t *testing.T) { + c := &Config{} + + tmpDir := t.TempDir() + + tempLogsDir := fmt.Sprintf("%s/tmp/logs", tmpDir) + tempConfigDir := fmt.Sprintf("%s/tmp/config", tmpDir) + + confFile := fmt.Sprintf("%s/config.json", tempConfigDir) + rulesFile := fmt.Sprintf("%s/rules.json", tempConfigDir) + lFp := fmt.Sprintf("%s/app.log", tempLogsDir) + + mkDirForTest(t, tempConfigDir) + mkDirForTest(t, tempLogsDir) + + rulesJsonFp := "testData/rules3.json" + cpFileForTest(t, rulesJsonFp, rulesFile) + + p := ParsedConf{ + RulesFp: rulesFile, + LogFp: lFp, + Port: "9050", + Compression: true, + SizeToRotate: "5MB", + } + jsonString, _ := json.Marshal(p) + writeForTest(t, confFile, jsonString) + + _, err := c.LoadMainConfigFile(confFile) + assert.Equal(t, err, nil) + + err = c.LoadMappingFile(p.RulesFp) + assert.Equal(t, err, nil) + + l, err := newFileLogger(p.LogFp, p.SizeToRotate, p.Compression) + assert.NotEmpty(t, l) + assert.Equal(t, err, nil) + + isEmpty := isFileEmpty(t, lFp) + assert.Equal(t, isEmpty, true) + + app := newTestApp(t, c, l) + app.Routes() + ts := newTestServer(t, app.Mux) + defer ts.Close() + + code, _, body := ts.get(t, "/x/touche?go-get=1") + assert.Equal(t, code, http.StatusOK) + assert.NotEmpty(t, string(body)) + + _, err = html.Parse(strings.NewReader(string(body))) + assert.Equal(t, err, nil) + + t.Logf("Printing returned html => %s", string(body)) + isEmpty = isFileEmpty(t, lFp) + assert.Equal(t, isEmpty, false) + }) + + t.Run("return default html if go-get is included and the package does not exists", func(t *testing.T) { + c := &Config{} + + tmpDir := t.TempDir() + + tempLogsDir := fmt.Sprintf("%s/tmp/logs", tmpDir) + tempConfigDir := fmt.Sprintf("%s/tmp/config", tmpDir) + + confFile := fmt.Sprintf("%s/config.json", tempConfigDir) + rulesFile := fmt.Sprintf("%s/rules.json", tempConfigDir) + lFp := fmt.Sprintf("%s/app.log", tempLogsDir) + + mkDirForTest(t, tempConfigDir) + mkDirForTest(t, tempLogsDir) + + rulesJsonFp := "testData/rules3.json" + cpFileForTest(t, rulesJsonFp, rulesFile) + + p := ParsedConf{ + RulesFp: rulesFile, + LogFp: lFp, + Port: "9050", + Compression: true, + SizeToRotate: "5MB", + } + jsonString, _ := json.Marshal(p) + writeForTest(t, confFile, jsonString) + + _, err := c.LoadMainConfigFile(confFile) + assert.Equal(t, err, nil) + + err = c.LoadMappingFile(p.RulesFp) + assert.Equal(t, err, nil) + + l, err := newFileLogger(p.LogFp, p.SizeToRotate, p.Compression) + assert.NotEmpty(t, l) + assert.Equal(t, err, nil) + + isEmpty := isFileEmpty(t, lFp) + assert.Equal(t, isEmpty, true) + + app := newTestApp(t, c, l) + app.Routes() + ts := newTestServer(t, app.Mux) + defer ts.Close() + + code, _, body := ts.get(t, "/x/fuckoff?go-get=1") + assert.Equal(t, code, http.StatusOK) + expected, _ := getDefaultHtml() + assert.Equal(t, string(body), string(expected)) + + isEmpty = isFileEmpty(t, lFp) + assert.Equal(t, isEmpty, false) + + t.Logf("Printing returned html => %s", string(body)) + }) +} diff --git a/logger.go b/logger.go index 46dc2ee..76a1e1f 100644 --- a/logger.go +++ b/logger.go @@ -7,7 +7,6 @@ import ( "fmt" "io" "log" - "math" "net/http" "os" "path/filepath" @@ -46,10 +45,13 @@ var ( ) type LogFile struct { - handle *os.File - logger *log.Logger - path string - fileLock sync.Mutex + handle *os.File + logger *log.Logger + path string + fileLock sync.Mutex + canCompress bool + maxSize ByteSize + curSize ByteSize } type LogFileRec struct { @@ -58,7 +60,7 @@ type LogFileRec struct { Url string `json:"url"` } -func (lf *LogFile) makeCopyTo(dst string) error { +func (lf *LogFile) MakeCopyTo(dst string) error { var err error r, err := os.Open(lf.path) if err != nil { @@ -80,7 +82,7 @@ func (lf *LogFile) makeCopyTo(dst string) error { return err } -func (lf *LogFile) truncate() error { +func (lf *LogFile) Truncate() error { fd, err := os.OpenFile(lf.path, os.O_TRUNC, 0666) if err != nil { return fmt.Errorf("could not open file %q for truncation: %v", lf.path, err) @@ -92,17 +94,6 @@ func (lf *LogFile) truncate() error { return nil } -func prettyByteSize(b int64) string { - bf := float64(b) - for _, unit := range []string{"", "K", "M", "G", "T", "P", "E", "Z"} { - if math.Abs(bf) < 1024.0 { - return fmt.Sprintf("%3.1f%sB", bf, unit) - } - bf /= 1024.0 - } - return fmt.Sprintf("%.1fYB", bf) -} - func compressOldFile(fname string) error { reader, err := os.Open(fname) if err != nil { @@ -139,11 +130,10 @@ func compressOldFile(fname string) error { return nil } -func (lf *LogFile) rotate() error { - lf.fileLock.Lock() - defer lf.fileLock.Unlock() +func (lf *LogFile) Rotate() error { - prefix := fmt.Sprintf("%s.%s", lf.handle.Name(), time.Now().Format("2006-01-02")) + // new file + newFilePrefix := fmt.Sprintf("%s.%s", lf.handle.Name(), time.Now().Format("2006-01-02")) // close file to allow for read-only access err := lf.handle.Close() @@ -152,74 +142,83 @@ func (lf *LogFile) rotate() error { } // make a copy of the old log file - err = lf.makeCopyTo(prefix) + err = lf.MakeCopyTo(newFilePrefix) if err != nil { return err } // compress the new log file - err = compressOldFile(prefix) - if err != nil { - return err - } - - // truncate the old log file - err = lf.truncate() - if err != nil { - return err - } - - f, err := os.OpenFile(lf.path, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666) - if err != nil { - return err - } - - lf.handle = f - return nil -} - -func (lf *LogFile) open(maxSize string) error { - f, err := os.OpenFile(lf.path, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666) - if err != nil { - return err - } - finfo, err := f.Stat() - if err != nil { - return err - } - curSize := prettyByteSize(finfo.Size()) - if curSize > maxSize { - err = lf.rotate() + if lf.canCompress { + err = compressOldFile(newFilePrefix) if err != nil { return err } } + + // Truncate the old log file + err = lf.Truncate() + if err != nil { + return err + } + + f, err := os.OpenFile(lf.path, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666) + if err != nil { + return err + } + lf.handle = f - lf.logger = log.New(f, "", 0) return nil } -func newFileLogger(path string, maxSize string) (*LogFile, error) { +func (lf *LogFile) Open() error { + lf.fileLock.Lock() + defer lf.fileLock.Unlock() + f, err := os.OpenFile(lf.path, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666) + if err != nil { + return err + } + lf.handle = f + finfo, err := f.Stat() + if err != nil { + return err + } + curSize := finfo.Size() + if lf.maxSize.NumberRep != 0 && curSize >= lf.maxSize.NumberRep { + err = lf.Rotate() + if err != nil { + return err + } + } + lf.logger = log.New(f, "", 0) + finfo, err = lf.handle.Stat() + if err != nil { + return err + } + by := ByteSize{} + by.ParseFromNumber(finfo.Size()) + lf.curSize = by + return nil +} + +func newFileLogger(path string, maxSize string, canCompress bool) (*LogFile, error) { requestedFile := filepath.Clean(filepath.Join("/", path)) parentDir := filepath.Dir(requestedFile) err := os.MkdirAll(parentDir, 0755) if err != nil { return nil, err } - lf := &LogFile{ - path: path, + by := ByteSize{} + err = by.ParseFromString(maxSize) + if err != nil { + return nil, err } - err = lf.open(maxSize) + lf := &LogFile{ + path: path, + canCompress: canCompress, + maxSize: by, + } + err = lf.Open() return lf, err - // f, err := os.OpenFile(requestedFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666) - // if err != nil { - // return nil, err - // } - // return &LogFile{ - // handle: f, - // logger: log.New(f, "", 0), - // path: path, - // }, nil } func (f *LogFile) Close() error { @@ -291,12 +290,13 @@ func getCurrentDate() string { return dt.Format(time.RFC3339Nano) } -func (f *LogFile) WriteLog(r *http.Request) error { - if f == nil { +func (lf *LogFile) WriteLog(r *http.Request) error { + if lf == nil { return nil } - f.fileLock.Lock() - defer f.fileLock.Unlock() + lf.fileLock.Lock() + defer lf.fileLock.Unlock() + var rec = make(map[string]string) rec["method"] = r.Method rec["requestUri"] = r.RequestURI @@ -314,6 +314,26 @@ func (f *LogFile) WriteLog(r *http.Request) error { if err != nil { return err } - f.logger.Println(string(b)) + lf.logger.Println(string(b)) + + finfo, err := lf.handle.Stat() + if err != nil { + return err + } + curSize := finfo.Size() + + if lf.maxSize.NumberRep != 0 && curSize > lf.maxSize.NumberRep { + err = lf.Rotate() + if err != nil { + return err + } + } + finfo, err = lf.handle.Stat() + if err != nil { + return err + } + by := ByteSize{} + by.ParseFromNumber(finfo.Size()) + lf.curSize = by return nil } diff --git a/logger_test.go b/logger_test.go new file mode 100644 index 0000000..127ab1d --- /dev/null +++ b/logger_test.go @@ -0,0 +1,257 @@ +package main + +import ( + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTruncate(t *testing.T) { + tmpDir := t.TempDir() + + mkDirForTest(t, fmt.Sprintf("%s/tmp", tmpDir)) + rulesJsonFp := "testData/app_over_size.log" + + tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) + + cpFileForTest(t, rulesJsonFp, tmpLf) + + lf := &LogFile{ + path: tmpLf, + } + + lf.path = tmpLf + // Continue from here + isEmpty := isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, false) + + err := lf.Truncate() + assert.Equal(t, err, nil) + isEmpty = isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, true) +} + +func TestMakeCopyTo(t *testing.T) { + tmpDir := t.TempDir() + + mkDirForTest(t, fmt.Sprintf("%s/tmp", tmpDir)) + rulesJsonFp := "testData/app_over_size.log" + + tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) + + cpFileForTest(t, rulesJsonFp, tmpLf) + + newLocation := fmt.Sprintf("%s/tmp/app.1.log", tmpDir) + + lf := &LogFile{ + path: tmpLf, + } + + lf.path = tmpLf + + isEmpty := isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, false) + + err := lf.MakeCopyTo(newLocation) + assert.Equal(t, err, nil) + isEmpty = isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, false) + ok := areFilesTheSame(t, lf.path, newLocation) + assert.Equal(t, ok, true) +} + +func TestCompressFile(t *testing.T) { + tmpDir := t.TempDir() + + mkDirForTest(t, fmt.Sprintf("%s/tmp", tmpDir)) + rulesJsonFp := "testData/app_over_size.log" + + tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) + + cpFileForTest(t, rulesJsonFp, tmpLf) + + expected := fmt.Sprintf("%s/tmp/app.log.gz", tmpDir) + + err := compressOldFile(tmpLf) + assert.Equal(t, err, nil) + exists := doesFileExist(tmpLf) + assert.Equal(t, exists, false) + exists = doesFileExist(expected) + assert.Equal(t, exists, true) +} + +func TestRotate(t *testing.T) { + tmpDir := t.TempDir() + + mkDirForTest(t, fmt.Sprintf("%s/tmp", tmpDir)) + rulesJsonFp := "testData/app_over_size.log" + + tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) + + cpFileForTest(t, rulesJsonFp, tmpLf) + + lf := &LogFile{ + path: tmpLf, + canCompress: true, + } + + fd, _ := os.Open(tmpLf) + lf.handle = fd + + isEmpty := isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, false) + err := lf.Rotate() + assert.Equal(t, err, nil) + exists := doesFileExist(tmpLf) + assert.Equal(t, exists, true) + isEmpty = isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, true) + + expected := walkMatch(t, tmpDir, "*.gz") + assert.NotEmpty(t, expected) + t.Logf("%+v", expected) + + assert.NotEqual(t, lf.handle, fd) +} + +func TestNewLogger(t *testing.T) { + t.Run("load logging file - do not Rotate", func(t *testing.T) { + + tmpDir := t.TempDir() + + mkDirForTest(t, fmt.Sprintf("%s/tmp", tmpDir)) + rulesJsonFp := "testData/app_under_size.log" + + tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) + + cpFileForTest(t, rulesJsonFp, tmpLf) + + lf, err := newFileLogger(tmpLf, "6KB", true) + expected := walkMatch(t, tmpDir, "*.gz") + assert.Equal(t, err, nil) + assert.Empty(t, expected) + assert.Equal(t, lf.path, tmpLf) + assert.NotEmpty(t, lf.handle) + assert.NotEmpty(t, lf.logger) + + assert.FileExists(t, tmpLf) + isEmpty := isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, false) + }) + + t.Run("load logging file - Rotate", func(t *testing.T) { + tmpDir := t.TempDir() + + mkDirForTest(t, fmt.Sprintf("%s/tmp", tmpDir)) + rulesJsonFp := "testData/app_over_size.log" + + tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) + cpFileForTest(t, rulesJsonFp, tmpLf) + + lf, err := newFileLogger(tmpLf, "4KB", true) + expected := walkMatch(t, tmpDir, "*.gz") + assert.Equal(t, err, nil) + assert.NotEmpty(t, expected) + assert.Equal(t, lf.path, tmpLf) + assert.NotEmpty(t, lf.handle) + assert.NotEmpty(t, lf.logger) + + assert.FileExists(t, tmpLf) + isEmpty := isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, true) + }) + + t.Run("create new logging file", func(t *testing.T) { + tmpDir := t.TempDir() + + tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) + + lf, err := newFileLogger(tmpLf, "4KB", true) + + expected := walkMatch(t, tmpDir, "*.gz") + assert.Equal(t, err, nil) + assert.Empty(t, expected) + assert.Equal(t, lf.path, tmpLf) + assert.NotEmpty(t, lf.handle) + assert.NotEmpty(t, lf.logger) + + assert.FileExists(t, tmpLf) + isEmpty := isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, true) + }) +} + +func TestWriteLog(t *testing.T) { + t.Run("write to logging file - do not Rotate", func(t *testing.T) { + req := httptest.NewRequest(http.MethodGet, "/{package}?go-get=1", nil) + + tmpDir := t.TempDir() + + tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) + + lf, err := newFileLogger(tmpLf, "4KB", true) + assert.Equal(t, err, nil) + assert.FileExists(t, tmpLf) + isEmpty := isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, true) + + err = lf.WriteLog(req) + assert.Equal(t, err, nil) + + expected := walkMatch(t, tmpDir, "*.gz") + assert.Empty(t, expected) + + isEmpty = isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, false) + + b := readTestFile(t, tmpLf) + m := make(map[string]string) + _ = json.Unmarshal(b, &m) + keys := make([]string, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + assert.Contains(t, keys, "requestUri") + assert.Contains(t, keys, "Host") + assert.Contains(t, keys, "method") + assert.Contains(t, keys, "ipAddr") + assert.Contains(t, keys, "requestDate") + }) + + t.Run("write to logging file - Rotate", func(t *testing.T) { + req := httptest.NewRequest(http.MethodGet, "/{package}?go-get=1", nil) + + tmpDir := t.TempDir() + + mkDirForTest(t, fmt.Sprintf("%s/tmp", tmpDir)) + rulesJsonFp := "testData/app_under_size.log" + + tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) + cpFileForTest(t, rulesJsonFp, tmpLf) + + lf, err := newFileLogger(tmpLf, "5KB", true) + assert.Equal(t, err, nil) + assert.FileExists(t, tmpLf) + isEmpty := isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, false) + + err = lf.WriteLog(req) + assert.Equal(t, err, nil) + err = lf.WriteLog(req) + assert.Equal(t, err, nil) + // t.Logf("%s\n", lf.curSize) + + expected := walkMatch(t, tmpDir, "*.gz") + assert.NotEmpty(t, expected) + + assert.FileExists(t, tmpLf) + isEmpty = isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, true) + + }) +} diff --git a/main.go b/main.go index b6171d1..e1f2a27 100644 --- a/main.go +++ b/main.go @@ -31,29 +31,6 @@ func flagsSet(flags *flag.FlagSet) map[string]bool { return s } -// generateDefaults - generate the default values for the rules.json and log files -// func generateDefaults(rulesfp string, logfp string) (string, string, error) { -// var newlogfp, newrulesfp string -// var err error -// newlogfp = logfp -// newrulesfp = rulesfp -// if len(newrulesfp) == 0 { -// dir, err := os.UserConfigDir() -// if err != nil { -// return newrulesfp, newlogfp, err -// } -// newrulesfp = filepath.Join(dir, "gocustomcurls", "rules.json") -// } -// if len(newlogfp) == 0 { -// dir, err := os.UserHomeDir() -// if err != nil { -// return newrulesfp, newlogfp, err -// } -// newlogfp = filepath.Join(dir, ".gocustomurls", "logs", "app.log") -// } -// return newrulesfp, newlogfp, err -// } - // isValidPort returns true if the port is valid // following the RFC https://datatracker.ietf.org/doc/html/rfc6056#section-2.1 func isValidPort(port int) bool { @@ -120,42 +97,12 @@ func main() { errorLog.Println(err) os.Exit(1) } - l, err := newFileLogger(pConf.LogFp, pConf.SizeToRotate) + l, err := newFileLogger(pConf.LogFp, pConf.SizeToRotate, pConf.Compression) if err != nil { errorLog.Println(err) os.Exit(1) } - // var rulesFile string - - // if allSetFlags["config"] { - // rulesFile = *rulesFileFlag - // } - - // var logFile string - // if allSetFlags["logFile"] { - // logFile = *logFileFlag - // } - - // rFile, lFile, err := generateDefaults(logFile, rulesFile) - // if err != nil { - // errorLog.Println(err) - // os.Exit(1) - // } - - // // load rules mapping - // c := &Config{} - // err = c.LoadMappingFile(rFile) - // if err != nil { - // errorLog.Println(err) - // os.Exit(1) - // } - // l, err := newFileLogger(lFile) - // if err != nil { - // errorLog.Println(err) - // os.Exit(1) - // } - app := &Application{ Config: c, Log: l, diff --git a/testData/app_over_size.log b/testData/app_over_size.log new file mode 100644 index 0000000..22cec9c --- /dev/null +++ b/testData/app_over_size.log @@ -0,0 +1,66 @@ +Stopped serving new connections. +Graceful shutdown complete. +Stopped serving new connections. +Graceful shutdown complete. +gocustomurls: Starting +Stopped serving new connections. +Graceful shutdown complete. +gocustomurls: Starting +Stopped serving new connections. +Graceful shutdown complete. +2024-05-28T14:29:35.767371211-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:42408","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T14:33:59.491487682-04:00 Stopped serving new connections. +2024-05-28T14:33:59.491691358-04:00 Graceful shutdown complete. +2024-05-28T14:48:29.818981344-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:39974","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T15:17:42.692146237-04:00 Stopped serving new connections. +2024-05-28T15:17:42.692268027-04:00 Graceful shutdown complete. +2024-05-28T15:18:11.625875598-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:38550","method":"GET","requestUri":"/x/touche?go-get=1"} +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:49398","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T15:40:09.870951532-04:00 Stopped serving new connections. +2024-05-28T15:40:09.871085499-04:00 Graceful shutdown complete. +2024-05-28T15:40:24.154320042-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:58664","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T16:52:38.449545322-04:00 Stopped serving new connections. +2024-05-28T16:52:38.449782355-04:00 Graceful shutdown complete. +2024-05-28T16:52:47.19006644-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:35120","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T16:54:39.537910226-04:00 Stopped serving new connections. +2024-05-28T16:54:39.538042526-04:00 Graceful shutdown complete. +2024-05-28T16:54:43.991755643-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:42694","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:00:37.549759919-04:00 Stopped serving new connections. +2024-05-28T17:00:37.549895593-04:00 Graceful shutdown complete. +2024-05-28T17:00:39.564002213-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:40190","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:01:53.902149407-04:00 Stopped serving new connections. +2024-05-28T17:01:53.902326423-04:00 Graceful shutdown complete. +2024-05-28T17:01:55.853543897-04:00 Starting +2024-05-28T17:01:57.187935277-04:00 Stopped serving new connections. +2024-05-28T17:01:57.188065162-04:00 Graceful shutdown complete. +2024-05-28T17:02:02.342827445-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:55876","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:04:36.661802337-04:00 Stopped serving new connections. +2024-05-28T17:04:36.661918402-04:00 Graceful shutdown complete. +2024-05-28T17:04:42.004255484-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:42466","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:13:11.636985677-04:00 Stopped serving new connections. +2024-05-28T17:13:11.637150614-04:00 Graceful shutdown complete. +2024-05-28T17:13:19.225477323-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:35882","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:13:37.590697118-04:00 Stopped serving new connections. +2024-05-28T17:13:37.59075443-04:00 Graceful shutdown complete. +2024-05-28T17:14:30.964387887-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:35074","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:14:40.412222772-04:00 Stopped serving new connections. +2024-05-28T17:14:40.412407892-04:00 Graceful shutdown complete. +2024-05-28T17:19:09.579082129-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:52204","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:19:32.491870213-04:00 Stopped serving new connections. +2024-05-28T17:19:32.492012026-04:00 Graceful shutdown complete. +2024-05-28T17:20:12.700323661-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:42938","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T18:08:26.484225491-04:00 Stopped serving new connections. +2024-05-28T18:08:26.484389145-04:00 Graceful shutdown complete. diff --git a/testData/app_under_size.log b/testData/app_under_size.log new file mode 100644 index 0000000..169e9dd --- /dev/null +++ b/testData/app_under_size.log @@ -0,0 +1,56 @@ +2024-05-28T14:29:35.767371211-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:42408","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T14:33:59.491487682-04:00 Stopped serving new connections. +2024-05-28T14:33:59.491691358-04:00 Graceful shutdown complete. +2024-05-28T14:48:29.818981344-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:39974","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T15:17:42.692146237-04:00 Stopped serving new connections. +2024-05-28T15:17:42.692268027-04:00 Graceful shutdown complete. +2024-05-28T15:18:11.625875598-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:38550","method":"GET","requestUri":"/x/touche?go-get=1"} +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:49398","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T15:40:09.870951532-04:00 Stopped serving new connections. +2024-05-28T15:40:09.871085499-04:00 Graceful shutdown complete. +2024-05-28T15:40:24.154320042-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:58664","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T16:52:38.449545322-04:00 Stopped serving new connections. +2024-05-28T16:52:38.449782355-04:00 Graceful shutdown complete. +2024-05-28T16:52:47.19006644-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:35120","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T16:54:39.537910226-04:00 Stopped serving new connections. +2024-05-28T16:54:39.538042526-04:00 Graceful shutdown complete. +2024-05-28T16:54:43.991755643-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:42694","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:00:37.549759919-04:00 Stopped serving new connections. +2024-05-28T17:00:37.549895593-04:00 Graceful shutdown complete. +2024-05-28T17:00:39.564002213-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:40190","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:01:53.902149407-04:00 Stopped serving new connections. +2024-05-28T17:01:53.902326423-04:00 Graceful shutdown complete. +2024-05-28T17:01:55.853543897-04:00 Starting +2024-05-28T17:01:57.187935277-04:00 Stopped serving new connections. +2024-05-28T17:01:57.188065162-04:00 Graceful shutdown complete. +2024-05-28T17:02:02.342827445-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:55876","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:04:36.661802337-04:00 Stopped serving new connections. +2024-05-28T17:04:36.661918402-04:00 Graceful shutdown complete. +2024-05-28T17:04:42.004255484-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:42466","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:13:11.636985677-04:00 Stopped serving new connections. +2024-05-28T17:13:11.637150614-04:00 Graceful shutdown complete. +2024-05-28T17:13:19.225477323-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:35882","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:13:37.590697118-04:00 Stopped serving new connections. +2024-05-28T17:13:37.59075443-04:00 Graceful shutdown complete. +2024-05-28T17:14:30.964387887-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:35074","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:14:40.412222772-04:00 Stopped serving new connections. +2024-05-28T17:14:40.412407892-04:00 Graceful shutdown complete. +2024-05-28T17:19:09.579082129-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:52204","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T17:19:32.491870213-04:00 Stopped serving new connections. +2024-05-28T17:19:32.492012026-04:00 Graceful shutdown complete. +2024-05-28T17:20:12.700323661-04:00 Starting +{"Accept":"*/*","Accept-Encoding":"gzip, deflate","Host":"localhost:7070","User-Agent":"HTTPie/3.2.1","ipAddr":"[::1]:42938","method":"GET","requestUri":"/x/touche?go-get=1"} +2024-05-28T18:08:26.484225491-04:00 Stopped serving new connections. +2024-05-28T18:08:26.484389145-04:00 Graceful shutdown complete. diff --git a/testData/rules.json b/testData/rules.json new file mode 100644 index 0000000..2403123 --- /dev/null +++ b/testData/rules.json @@ -0,0 +1,14 @@ +{ + "mappings": [ + { + "vanity_url":"scale.dev/x/migrate", + "protocol":"git", + "real_url":"https://github.com/scale/migrate" + }, + { + "vanity_url":"localhost:7070/x/touche", + "protocol":"git", + "real_url":"https://github.com/mine/touche" + } + ] +} diff --git a/testData/rules2.json b/testData/rules2.json new file mode 100644 index 0000000..aebbb19 --- /dev/null +++ b/testData/rules2.json @@ -0,0 +1,19 @@ +{ + "mappings": [ + { + "vanity_url":"scale.dev/x/migrate", + "protocol":"git", + "real_url":"https://github.com/scale/migrate" + }, + { + "vanity_url":"codeberg.org/woodpecker-plugins/plugin-gitea-release", + "protocol":"git", + "real_url":"https://codeberg.org/woodpecker-plugins/gitea-release" + }, + { + "vanity_url":"localhost:7070/x/touche", + "protocol":"git", + "real_url":"https://github.com/mine/touche" + } + ] +} diff --git a/testData/rules3.json b/testData/rules3.json new file mode 100644 index 0000000..df28cc4 --- /dev/null +++ b/testData/rules3.json @@ -0,0 +1,14 @@ +{ + "mappings": [ + { + "vanity_url":"codeberg.org/woodpecker-plugins/plugin-gitea-release", + "protocol":"svn", + "real_url":"https://codeberg.org/woodpecker-plugins/gitea-release" + }, + { + "vanity_url":"127.0.0.1/x/touche", + "protocol":"git", + "real_url":"https://github.com/mine/touche" + } + ] +} diff --git a/testUtils_test.go b/testUtils_test.go new file mode 100644 index 0000000..925c12b --- /dev/null +++ b/testUtils_test.go @@ -0,0 +1,260 @@ +package main + +import ( + "bytes" + "encoding/json" + "errors" + "io" + "io/fs" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "testing" +) + +func mkDirForTest(t *testing.T, fp string) { + err := os.MkdirAll(fp, os.ModePerm) + if err != nil { + t.Fatal(err) + } +} + +func cpFileForTest(t *testing.T, src string, dst string) { + var srcfd *os.File + var dstfd *os.File + var err error + var srcinfo os.FileInfo + srcfd, err = os.Open(src) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + return + } else { + t.Fatal(err) + } + + } + // if err != nil { + // t.Fatal(err) + // } + defer srcfd.Close() + dstfd, err = os.Create(dst) + if err != nil { + t.Fatal(err) + } + + defer dstfd.Close() + _, err = io.Copy(dstfd, srcfd) + if err != nil { + t.Fatal(err) + } + srcinfo, err = os.Stat(src) + if err != nil { + t.Fatal(err) + } + err = os.Chmod(dst, srcinfo.Mode()) + if err != nil { + t.Fatal(err) + } +} + +func writeForTest(t *testing.T, fp string, data []byte) { + err := os.WriteFile(fp, data, 0666) + if err != nil { + t.Fatal(err) + } +} + +func IsDirEmpty(t *testing.T, name string) bool { + f, err := os.Open(name) + if err != nil { + t.Fatal(err) + } + defer f.Close() + + // read in ONLY one file + _, err = f.Readdir(1) + + // and if the file is EOF... well, the dir is empty. + return err == io.EOF +} + +func doesFileExist(name string) bool { + _, err := os.ReadFile(name) + // defer fp.Close() + return err == nil +} + +// Derived from here (https://stackoverflow.com/a/55300382) +func walkMatch(t *testing.T, root, pattern string) []string { + var matches []string + err := filepath.Walk(root, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if info.IsDir() { + return nil + } + if matched, err := filepath.Match(pattern, filepath.Base(path)); err != nil { + return err + } else if matched { + matches = append(matches, path) + } + return nil + }) + if err != nil { + t.Fatal(err) + } + return matches +} + +// func doesFileExist(name string) bool { +// _, err := os.Stat(name) +// return !errors.Is(err, fs.ErrNotExist) +// } + +func removeFileForTest(t *testing.T, name string) { + err := os.Remove(name) + if err != nil { + t.Fatal(err) + } +} + +func writeJsonForTest(t *testing.T, data map[string]any, fp string) { + jsonString, _ := json.Marshal(data) + err := os.WriteFile(fp, jsonString, os.ModePerm) + if err != nil { + t.Fatal(err) + } +} + +func isFileEmpty(t *testing.T, name string) bool { + fd, err := os.Open(name) + if err != nil { + t.Fatal(err) + } + defer fd.Close() + finfo, err := fd.Stat() + if err != nil { + t.Fatal(err) + } + return finfo.Size() < 1 +} + +// Derived from here (https://stackoverflow.com/a/73411967) +func areFilesTheSame(t *testing.T, fp_1 string, fp_2 string) bool { + chunkSize := 4 * 1024 + + // shortcuts: check file metadata + finfo_1, err := os.Stat(fp_1) + if err != nil { + t.Fatal(err) + } + + finfo_2, err := os.Stat(fp_2) + if err != nil { + t.Fatal(err) + } + + // are inputs are literally the same file? + if os.SameFile(finfo_1, finfo_2) { + return true + } + + // do inputs at least have the same size? + if finfo_1.Size() != finfo_2.Size() { + return false + } + + // long way: compare contents + fd_1, err := os.Open(fp_1) + if err != nil { + t.Fatal(err) + } + + defer fd_1.Close() + + fd_2, err := os.Open(fp_2) + if err != nil { + t.Fatal(err) + } + defer fd_2.Close() + + bfd_1 := make([]byte, chunkSize) + bfd_2 := make([]byte, chunkSize) + for { + n1, err1 := io.ReadFull(fd_1, bfd_1) + n2, err2 := io.ReadFull(fd_2, bfd_2) + + // https://pkg.go.dev/io#Reader + // > Callers should always process the n > 0 bytes returned + // > before considering the error err. Doing so correctly + // > handles I/O errors that happen after reading some bytes + // > and also both of the allowed EOF behaviors. + + if !bytes.Equal(bfd_1[:n1], bfd_2[:n2]) { + return false + } + + if (err1 == io.EOF && err2 == io.EOF) || (err1 == io.ErrUnexpectedEOF && err2 == io.ErrUnexpectedEOF) { + return true + } + + // some other error, like a dropped network connection or a bad transfer + if err1 != nil { + t.Fatal(err1) + } + if err2 != nil { + t.Fatal(err2) + } + } +} + +func readTestFile(t *testing.T, fp string) []byte { + f, err := os.ReadFile(fp) + if err != nil { + t.Fatal(err) + } + return f +} + +// func writeTestConfFile(t *testing.T, rulesPath string, logPath string) { + +// writeJsonForTest(t, p, ) + +// } + +// Integration tests + +func newTestApp(t *testing.T, cfg *Config, lfg *LogFile) *Application { + return &Application{ + Config: cfg, + Log: lfg, + } +} + +type testServer struct { + *httptest.Server +} + +func newTestServer(t *testing.T, h http.Handler) *testServer { + ts := httptest.NewServer(h) + return &testServer{ts} +} + +func (ts *testServer) get(t *testing.T, urlPath string) (int, http.Header, []byte) { + rs, err := ts.Client().Get(ts.URL + urlPath) + if err != nil { + t.Fatal(err) + } + + defer rs.Body.Close() + + body, err := io.ReadAll(rs.Body) + if err != nil { + t.Fatal(err) + } + + body = bytes.TrimSpace(body) + return rs.StatusCode, rs.Header, body +}