Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Sparkline #110

Merged
merged 12 commits into from
Dec 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions cmd/commands.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ var commands []*cli.Command = []*cli.Command{
filterCommand(),
histogramCommand(),
heatmapCommand(),
sparkCommand(),
bargraphCommand(),
analyzeCommand(),
tabulateCommand(),
Expand Down
119 changes: 119 additions & 0 deletions cmd/spark.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
package cmd

import (
"fmt"
"rare/cmd/helpers"
"rare/pkg/aggregation"
"rare/pkg/color"
"rare/pkg/csv"
"rare/pkg/expressions"
"rare/pkg/multiterm"
"rare/pkg/multiterm/termrenderers"

"github.com/urfave/cli/v2"
)

func sparkFunction(c *cli.Context) error {
var (
delim = c.String("delim")
numRows = c.Int("num")
numCols = c.Int("cols")
noTruncate = c.Bool("notruncate")
scalerName = c.String(helpers.ScaleFlag.Name)
sortRows = c.String("sort-rows")
sortCols = c.String("sort-cols")
)

counter := aggregation.NewTable(delim)

batcher := helpers.BuildBatcherFromArguments(c)
ext := helpers.BuildExtractorFromArguments(c, batcher)
rowSorter := helpers.BuildSorterOrFail(sortRows)
colSorter := helpers.BuildSorterOrFail(sortCols)

vt := helpers.BuildVTermFromArguments(c)
writer := termrenderers.NewSpark(vt, numRows, numCols)
writer.Scaler = helpers.BuildScalerOrFail(scalerName)

helpers.RunAggregationLoop(ext, counter, func() {

// Trim unused data from the data store (keep memory tidy!)
if !noTruncate {
if keepCols := counter.OrderedColumns(colSorter); len(keepCols) > numCols {
keepCols = keepCols[len(keepCols)-numCols:]
keepLookup := make(map[string]struct{})
for _, item := range keepCols {
keepLookup[item] = struct{}{}
}
counter.Trim(func(col, row string, val int64) bool {
zix99 marked this conversation as resolved.
Show resolved Hide resolved
_, ok := keepLookup[col]
return !ok
})
}
}

// Write spark
writer.WriteTable(counter, rowSorter, colSorter)
writer.WriteFooter(0, helpers.FWriteExtractorSummary(ext, counter.ParseErrors(),
fmt.Sprintf("(R: %v; C: %v)", color.Wrapi(color.Yellow, counter.RowCount()), color.Wrapi(color.BrightBlue, counter.ColumnCount()))))
writer.WriteFooter(1, batcher.StatusString())
})

// Not deferred intentionally
writer.Close()

if err := helpers.TryWriteCSV(c, counter, csv.WriteTable); err != nil {
return err
}

return helpers.DetermineErrorState(batcher, ext, counter)
}

func sparkCommand() *cli.Command {
return helpers.AdaptCommandForExtractor(cli.Command{
Name: "spark",
Aliases: []string{"sparkline", "s"},
Usage: "Create rows of sparkline graphs",
Description: `Create rows of a sparkkline graph, all scaled equally
based on a table like input`,
Category: cmdCatVisualize,
Action: sparkFunction,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "delim",
Usage: "Character to tabulate on. Use {$} helper by default",
Value: expressions.ArraySeparatorString,
},
&cli.IntFlag{
Name: "num",
Aliases: []string{"rows", "n"},
Usage: "Number of elements (rows) to display",
Value: 20,
},
&cli.IntFlag{
Name: "cols",
Usage: "Number of columns to display",
Value: multiterm.TermCols() - 15,
},
&cli.BoolFlag{
Name: "notruncate",
Usage: "Disable truncating data that doesnt fit in the sparkline",
Value: false,
},
&cli.StringFlag{
Name: "sort-rows",
Usage: helpers.DefaultSortFlag.Usage,
Value: "value",
},
&cli.StringFlag{
Name: "sort-cols",
Usage: helpers.DefaultSortFlag.Usage,
Value: "numeric",
},
helpers.SnapshotFlag,
helpers.NoOutFlag,
helpers.CSVFlag,
helpers.ScaleFlag,
},
})
}
22 changes: 22 additions & 0 deletions cmd/spark_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
package cmd

import (
"testing"

"github.com/stretchr/testify/assert"
)

func TestSparkline(t *testing.T) {
testCommandSet(t, sparkCommand(),
`-m "(.+) (\d+)" -e "{$ {1} {2}}" testdata/graph.txt`,
`-o - -m "(.+) (\d+)" -e "{$ {1} {2}}" testdata/graph.txt`,
)
}

func TestSparklineWithTrim(t *testing.T) {
out, eout, err := testCommandCapture(sparkCommand(), `--snapshot -m "(.+) (.+)" -e {1} -e {2} --cols 2 testdata/heat.txt`)

assert.NoError(t, err)
assert.Empty(t, eout)
assert.Contains(t, out, " First bc Last \ny 1 _█ 2 \nx 1 __ 1 \nMatched: 10 / 10 (R: 2; C: 2)")
}
7 changes: 6 additions & 1 deletion docs/images/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,12 @@
nvm use --lts
npm install -g terminalizer
terminalizer record -k output.yml
# Do any yaml cleanup/delays
terminalizer render -o temp.gif output.yml
gifsicle -O3 --colors 128 -i temp.gif -o output.gif
```

Note on environment; Make sure bashrc when terminalizer starts is set by changing `command:` in config yaml
Note on environment; Make sure bashrc when terminalizer starts is set by changing `command: bash --rcfile ~/terminalizer/bashrc` in config yaml
```bash
export PS1="$ "
export PATH="./:$PATH"
Expand Down Expand Up @@ -62,6 +63,10 @@ rare table -m '\[(.+?)\].*" (\d+)' -e '{buckettime {1} year}' -e '{2}' access.lo

rare heatmap -m '\[(.+?)\].*" (\d+)' -e "{timeattr {time {1}} yearweek}" -e "{2}" access.log

### Sparkline

rare spark -m '\[(.+?)\].*" (\d+)' -e "{timeattr {time {1}} yearweek}" -e "{2}" access.log

### Analyze bytes sent, only looking at 200's

rare analyze -m '(\d{3}) (\d+)' -e '{2}' -i '{neq {1} 200}' access.log
Expand Down
Binary file added docs/images/rare-spark.gif
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
35 changes: 35 additions & 0 deletions docs/usage/aggregators.md
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,41 @@ Matched: 1,035,666 / 1,035,666 (R: 8; C: 61)

![Gif of heatmap](../images/heatmap.gif)

## Sparkline

```
rare help sparkline
```

### Summary

Creates one or more sparklines based on table-style input. Provide
zix99 marked this conversation as resolved.
Show resolved Hide resolved
multiple inputs using `{$ a b}` helper. Unlike other output styles,
columns in the spark graph are right-aligned to always show
the most recent data on the right side.

Supports [alternative scales](#alternative-scales)

### Example

```bash
$ rare spark -m '\[(.+?)\].*" (\d+)' \
-e "{timeattr {time {1}} yearweek}" -e "{2}" access.log

First 2019-34................................................2020-9 Last
404 15,396 ..._._-.^_._.._..________.____.__.___.____________.______.___ 5,946
200 7,146 _____________________________________________________________ 4,938
400 162 _____________________________________________________________ 522
405 6 _____________________________________________________________ 6
408 0 _____________________________________________________________ 6
304 0 _____________________________________________________________ 0
301 6 _____________________________________________________________ 0
206 0 _____________________________________________________________ 0
Matched: 1,034,166 / 1,034,166 (R: 8; C: 61)
```

![Gif of sparkline](../images/rare-spark.gif)

## Reduce

```
Expand Down
61 changes: 42 additions & 19 deletions pkg/aggregation/table.go
Original file line number Diff line number Diff line change
Expand Up @@ -120,32 +120,26 @@ func (s *TableAggregator) OrderedRows(sorter sorting.NameValueSorter) []*TableRo
return rows
}

func (s *TableAggregator) ComputeMin() (ret int64) {
ret = math.MaxInt64
func (s *TableAggregator) ComputeMinMax() (min, max int64) {
min, max = math.MaxInt64, math.MinInt64

for _, r := range s.rows {
for colKey := range s.cols {
if val := r.cols[colKey]; val < ret {
ret = val
val := r.cols[colKey]
if val < min {
min = val
}
if val > max {
max = val
}
}
}
if ret == math.MaxInt64 {
return 0
}
return
}

func (s *TableAggregator) ComputeMax() (ret int64) {
ret = math.MinInt64
for _, r := range s.rows {
for colKey := range s.cols {
if val := r.cols[colKey]; val > ret {
ret = val
}
}
if min == math.MaxInt64 {
min = 0
}
if ret == math.MinInt64 {
return 0
if max == math.MinInt64 {
max = 0
}
return
}
Expand All @@ -155,13 +149,42 @@ func (s *TableAggregator) ColTotal(k string) int64 {
return s.cols[k]
}

// Sum all data
func (s *TableAggregator) Sum() (ret int64) {
for _, v := range s.cols {
ret += v
}
return
}

// Trim data. Returns number of fields trimmed
func (s *TableAggregator) Trim(predicate func(col, row string, val int64) bool) int {
trimmed := 0

for colName := range s.cols {

removeAllInCol := true
for rowName, row := range s.rows {
if predicate(colName, rowName, row.cols[colName]) {
delete(row.cols, colName)
trimmed++
} else {
removeAllInCol = false
}

if len(row.cols) == 0 {
delete(s.rows, rowName)
}
}

if removeAllInCol {
delete(s.cols, colName)
}
}

return trimmed
}

func (s *TableRow) Name() string {
return s.name
}
Expand Down
55 changes: 51 additions & 4 deletions pkg/aggregation/table_test.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
package aggregation

import (
"fmt"
"rare/pkg/aggregation/sorting"
"strconv"
"testing"

"github.com/stretchr/testify/assert"
Expand Down Expand Up @@ -44,8 +46,9 @@ func TestSimpleTable(t *testing.T) {
assert.Equal(t, int64(5), table.Sum())

// Minmax
assert.Equal(t, int64(0), table.ComputeMin())
assert.Equal(t, int64(3), table.ComputeMax())
min, max := table.ComputeMinMax()
assert.Equal(t, int64(0), min)
assert.Equal(t, int64(3), max)
}

func TestTableMultiIncrement(t *testing.T) {
Expand All @@ -72,8 +75,16 @@ func TestTableMultiIncrement(t *testing.T) {
assert.Equal(t, int64(6), table.Sum())

// Minmax
assert.Equal(t, int64(0), table.ComputeMin())
assert.Equal(t, int64(5), table.ComputeMax())
min, max := table.ComputeMinMax()
assert.Equal(t, int64(0), min)
assert.Equal(t, int64(5), max)
}

func TestEmptyTableMinMax(t *testing.T) {
table := NewTable(" ")
min, max := table.ComputeMinMax()
assert.Equal(t, int64(0), min)
assert.Equal(t, int64(0), max)
}

func TestSingleRowTable(t *testing.T) {
Expand All @@ -91,3 +102,39 @@ func TestSingleRowTable(t *testing.T) {
assert.Equal(t, int64(2), rows[0].Value("a"))
assert.Equal(t, int64(1), rows[0].Value("b"))
}

func TestTrimData(t *testing.T) {
table := NewTable(" ")
for i := 0; i < 10; i++ {
table.Sample(fmt.Sprintf("%d a", i))
table.Sample(fmt.Sprintf("%d b", i))
}

assert.Len(t, table.Columns(), 10)

trimmed := table.Trim(func(col, row string, val int64) bool {
if row == "b" {
return true
}
cVal, _ := strconv.Atoi(col)
return cVal < 5
})

assert.ElementsMatch(t, []string{"5", "6", "7", "8", "9"}, table.Columns())
assert.Equal(t, 15, trimmed)
assert.Len(t, table.Rows(), 1)
assert.Len(t, table.Rows()[0].cols, 5)
}

// BenchmarkMinMax-4 1020728 1234 ns/op 0 B/op 0 allocs/op
func BenchmarkMinMax(b *testing.B) {
table := NewTable(" ")
for i := 0; i < 10; i++ {
table.Sample(fmt.Sprintf("%d a", i))
table.Sample(fmt.Sprintf("%d b", i))
}

for i := 0; i < b.N; i++ {
table.ComputeMinMax()
}
}
Loading
Loading