From f7ff8eb6b7c755080c691e5d5932e50b63b0409a Mon Sep 17 00:00:00 2001 From: iratusmachina Date: Sun, 30 Jun 2024 13:18:26 -0400 Subject: [PATCH] WIP --- bytesize.go | 77 +++++++++++++++++++++++++ bytesize_test.go | 70 +++++++++++++++++++++++ logger.go | 56 +++++++++---------- logger_test.go | 139 ++++++++++++++++++++++++++++++++++++---------- testUtils_test.go | 8 +++ 5 files changed, 294 insertions(+), 56 deletions(-) create mode 100644 bytesize.go create mode 100644 bytesize_test.go diff --git a/bytesize.go b/bytesize.go new file mode 100644 index 0000000..ee5bd8f --- /dev/null +++ b/bytesize.go @@ -0,0 +1,77 @@ +package main + +import ( + "fmt" + "math" + "strconv" + "strings" +) + +var units []string = []string{"KB", "MB", "GB", "B"} + +// ByteSize represents a number of bytes +type ByteSize struct { + HumanRep string + NumberRep int64 +} + +// Byte size size suffixes. +const ( + B int64 = 1 + KB int64 = 1 << (10 * iota) + MB + GB +) + +// Used to convert user input to ByteSize +var unitMap = map[string]int64{ + "B": B, + "KB": KB, + "MB": MB, + "GB": GB, +} + +func (b *ByteSize) parseFromString(s string) error { + s = strings.TrimSpace(s) + b.HumanRep = s + var fragments []string + unitFound := "" + + for _, unit := range units { + fragments = strings.Split(s, unit) + if len(fragments) == 2 { + unitFound = unit + break + } + } + + if len(unitFound) == 0 { + return fmt.Errorf("unrecognized size suffix") + } + + value, err := strconv.ParseFloat(fragments[0], 64) + if err != nil { + return err + } + + unit, ok := unitMap[strings.ToUpper(unitFound)] + if !ok { + return fmt.Errorf("unrecognized size suffix %s", fragments[1]) + } + + b.NumberRep = int64(value * float64(unit)) + return nil +} + +func (b *ByteSize) parseFromNumber(n int64) { + b.NumberRep = n + bf := float64(n) + for _, unit := range []string{"", "K", "M", "G"} { + if math.Abs(bf) < 1024.0 { + b.HumanRep = fmt.Sprintf("%3.1f%sB", bf, unit) + return + } + bf /= 1024.0 + } + b.HumanRep = fmt.Sprintf("%.1fTB", bf) +} diff --git a/bytesize_test.go b/bytesize_test.go new file mode 100644 index 0000000..20ec501 --- /dev/null +++ b/bytesize_test.go @@ -0,0 +1,70 @@ +package main + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseFromString(t *testing.T) { + tests := map[string]struct { + input1 string + input2 int64 + }{ + "KB": {input1: "5.5KB", input2: 5632}, + "MB": {input1: "6.7MB", input2: 7025459}, + "GB": {input1: "7.5GB", input2: 8053063680}, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + by := ByteSize{} + err := by.parseFromString(tc.input1) + assert.Equal(t, err, nil) + assert.EqualValues(t, by.NumberRep, tc.input2) + }) + } +} + +func TestParseFromNumber(t *testing.T) { + tests := map[string]struct { + input1 int64 + input2 string + }{ + "KB": {input1: 528870, input2: "516.5KB"}, + "MB": {input1: 7025459, input2: "6.7MB"}, + "GB": {input1: 8053063680, input2: "7.5GB"}, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + by := ByteSize{} + by.parseFromNumber(tc.input1) + assert.EqualValues(t, by.HumanRep, tc.input2) + }) + } +} + +// func TestByteSizeMath(t *testing.T) { +// tests := map[string]struct { +// operation string +// input1 ByteSize +// input2 ByteSize +// want bool +// }{ +// "Greater than": {operation: ">", input1: ByteSize(5675), input2: ByteSize(5775), want: false}, +// "Less Than": {operation: "<", input1: ByteSize(5675), input2: ByteSize(5775), want: true}, +// } +// for name, tc := range tests { +// t.Run(name, func(t *testing.T) { +// switch tc.operation { +// case ">": +// ok := tc.input1 > tc.input2 +// assert.Equal(t, ok, tc.want) +// case "<": +// ok := tc.input1 < tc.input2 +// assert.Equal(t, ok, tc.want) +// } +// }) +// } +// } diff --git a/logger.go b/logger.go index de2f8a6..5d0d0f4 100644 --- a/logger.go +++ b/logger.go @@ -7,7 +7,6 @@ import ( "fmt" "io" "log" - "math" "net/http" "os" "path/filepath" @@ -51,7 +50,8 @@ type LogFile struct { path string fileLock sync.Mutex canCompress bool - maxSize string + maxSize ByteSize + curSize ByteSize } type LogFileRec struct { @@ -94,17 +94,6 @@ func (lf *LogFile) truncate() error { return nil } -func prettyByteSize(b int64) string { - bf := float64(b) - for _, unit := range []string{"", "K", "M", "G", "T", "P"} { - if math.Abs(bf) < 1024.0 { - return fmt.Sprintf("%3.1f%sB", bf, unit) - } - bf /= 1024.0 - } - return fmt.Sprintf("%.1fEB", bf) -} - func compressOldFile(fname string) error { reader, err := os.Open(fname) if err != nil { @@ -188,19 +177,26 @@ func (lf *LogFile) open() error { if err != nil { return err } + lf.handle = f finfo, err := f.Stat() if err != nil { return err } - curSize := prettyByteSize(finfo.Size()) - if len(strings.TrimSpace(lf.maxSize)) != 0 && curSize > lf.maxSize { + curSize := finfo.Size() + if lf.maxSize.NumberRep != 0 && curSize >= lf.maxSize.NumberRep { err = lf.rotate() if err != nil { return err } } - lf.handle = f lf.logger = log.New(f, "", 0) + finfo, err = lf.handle.Stat() + if err != nil { + return err + } + by := ByteSize{} + by.parseFromNumber(finfo.Size()) + lf.curSize = by return nil } @@ -211,22 +207,18 @@ func newFileLogger(path string, maxSize string, canCompress bool) (*LogFile, err if err != nil { return nil, err } + by := ByteSize{} + err = by.parseFromString(maxSize) + if err != nil { + return nil, err + } lf := &LogFile{ path: path, canCompress: canCompress, - maxSize: maxSize, + maxSize: by, } err = lf.open() return lf, err - // f, err := os.OpenFile(requestedFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666) - // if err != nil { - // return nil, err - // } - // return &LogFile{ - // handle: f, - // logger: log.New(f, "", 0), - // path: path, - // }, nil } func (f *LogFile) Close() error { @@ -328,12 +320,20 @@ func (lf *LogFile) WriteLog(r *http.Request) error { if err != nil { return err } - curSize := prettyByteSize(finfo.Size()) - if len(strings.TrimSpace(lf.maxSize)) != 0 && curSize > lf.maxSize { + curSize := finfo.Size() + + if lf.maxSize.NumberRep != 0 && curSize > lf.maxSize.NumberRep { err = lf.rotate() if err != nil { return err } } + finfo, err = lf.handle.Stat() + if err != nil { + return err + } + by := ByteSize{} + by.parseFromNumber(finfo.Size()) + lf.curSize = by return nil } diff --git a/logger_test.go b/logger_test.go index 774fe91..bf1b3c9 100644 --- a/logger_test.go +++ b/logger_test.go @@ -1,7 +1,10 @@ package main import ( + "encoding/json" "fmt" + "net/http" + "net/http/httptest" "os" "testing" @@ -62,26 +65,6 @@ func TestMakeCopyTo(t *testing.T) { assert.Equal(t, ok, true) } -func TestPrettyByteSize(t *testing.T) { - tests := map[string]struct { - input int - want string - }{ - "KB": {input: 5675, want: "5.5KB"}, - "MB": {input: 7060600, want: "6.7MB"}, - "GB": {input: 8000000000, want: "7.5GB"}, - "TB": {input: 1300007000000, want: "1.2TB"}, - "EB": {input: 1300007000000000000, want: "1.1EB"}, - } - - for name, tc := range tests { - t.Run(name, func(t *testing.T) { - got := prettyByteSize(int64(tc.input)) - assert.Equal(t, got, tc.want) - }) - } -} - func TestCompressFile(t *testing.T) { tmpDir := t.TempDir() @@ -142,33 +125,133 @@ func TestNewLogger(t *testing.T) { tmpDir := t.TempDir() mkDirForTest(t, fmt.Sprintf("%s/tmp", tmpDir)) - rulesJsonFp := "testData/app_over_size.log" + rulesJsonFp := "testData/app_under_size.log" tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) - }) + cpFileForTest(t, rulesJsonFp, tmpLf) + + lf, err := newFileLogger(tmpLf, "6KB", true) + expected := walkMatch(t, tmpDir, "*.gz") + assert.Equal(t, err, nil) + assert.Empty(t, expected) + assert.Equal(t, lf.path, tmpLf) + assert.NotEmpty(t, lf.handle) + assert.NotEmpty(t, lf.logger) + + assert.FileExists(t, tmpLf) + isEmpty := isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, false) + }) t.Run("load logging file - rotate", func(t *testing.T) { - tmpDir := t.TempDir() mkDirForTest(t, fmt.Sprintf("%s/tmp", tmpDir)) rulesJsonFp := "testData/app_over_size.log" tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) + cpFileForTest(t, rulesJsonFp, tmpLf) + + lf, err := newFileLogger(tmpLf, "4KB", true) + expected := walkMatch(t, tmpDir, "*.gz") + assert.Equal(t, err, nil) + assert.NotEmpty(t, expected) + assert.Equal(t, lf.path, tmpLf) + assert.NotEmpty(t, lf.handle) + assert.NotEmpty(t, lf.logger) + + assert.FileExists(t, tmpLf) + isEmpty := isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, true) }) - - t.Run("create new logging file", func(t *testing.T) { + tmpDir := t.TempDir() + + tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) + + lf, err := newFileLogger(tmpLf, "4KB", true) + + expected := walkMatch(t, tmpDir, "*.gz") + assert.Equal(t, err, nil) + assert.Empty(t, expected) + assert.Equal(t, lf.path, tmpLf) + assert.NotEmpty(t, lf.handle) + assert.NotEmpty(t, lf.logger) + + assert.FileExists(t, tmpLf) + isEmpty := isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, true) + }) +} + +func TestWriteLog(t *testing.T) { + t.Run("write to logging file - do not rotate", func(t *testing.T) { + req := httptest.NewRequest(http.MethodGet, "/{package}?go-get=1", nil) + + tmpDir := t.TempDir() + + tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) + + lf, err := newFileLogger(tmpLf, "4KB", true) + assert.Equal(t, err, nil) + assert.FileExists(t, tmpLf) + isEmpty := isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, true) + + err = lf.WriteLog(req) + assert.Equal(t, err, nil) + + expected := walkMatch(t, tmpDir, "*.gz") + assert.Empty(t, expected) + + isEmpty = isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, false) + + b := readTestFile(t, tmpLf) + m := make(map[string]string) + _ = json.Unmarshal(b, &m) + keys := make([]string, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + assert.Contains(t, keys, "requestUri") + assert.Contains(t, keys, "Host") + assert.Contains(t, keys, "method") + assert.Contains(t, keys, "ipAddr") + assert.Contains(t, keys, "requestDate") + }) + + t.Run("write to logging file - rotate", func(t *testing.T) { + req := httptest.NewRequest(http.MethodGet, "/{package}?go-get=1", nil) tmpDir := t.TempDir() mkDirForTest(t, fmt.Sprintf("%s/tmp", tmpDir)) - rulesJsonFp := "testData/app_over_size.log" + rulesJsonFp := "testData/app_under_size.log" tmpLf := fmt.Sprintf("%s/tmp/app.log", tmpDir) + cpFileForTest(t, rulesJsonFp, tmpLf) + + lf, err := newFileLogger(tmpLf, "5KB", true) + assert.Equal(t, err, nil) + assert.FileExists(t, tmpLf) + isEmpty := isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, false) + + err = lf.WriteLog(req) + assert.Equal(t, err, nil) + err = lf.WriteLog(req) + assert.Equal(t, err, nil) + // t.Logf("%s\n", lf.curSize) + + expected := walkMatch(t, tmpDir, "*.gz") + assert.NotEmpty(t, expected) + + assert.FileExists(t, tmpLf) + isEmpty = isFileEmpty(t, tmpLf) + assert.Equal(t, isEmpty, true) + }) } -} -} diff --git a/testUtils_test.go b/testUtils_test.go index 9051323..a2cb2d6 100644 --- a/testUtils_test.go +++ b/testUtils_test.go @@ -207,3 +207,11 @@ func areFilesTheSame(t *testing.T, fp_1 string, fp_2 string) bool { } } } + +func readTestFile(t *testing.T, fp string) []byte { + f, err := os.ReadFile(fp) + if err != nil { + t.Fatal(err) + } + return f +}