-
Notifications
You must be signed in to change notification settings - Fork 81
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
services: add new service for fetching blocks from NeoFS
Close #3496 Signed-off-by: Ekaterina Pavlova <[email protected]>
- Loading branch information
1 parent
7766168
commit 29559e3
Showing
16 changed files
with
1,043 additions
and
78 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,102 +1,112 @@ | ||
package server | ||
|
||
import ( | ||
"encoding/json" | ||
"fmt" | ||
"os" | ||
"path/filepath" | ||
|
||
"github.com/nspcc-dev/neo-go/pkg/core/storage" | ||
"github.com/nspcc-dev/neo-go/pkg/core/storage/dboper" | ||
"github.com/nspcc-dev/neo-go/cli/cmdargs" | ||
"github.com/nspcc-dev/neo-go/cli/options" | ||
"github.com/nspcc-dev/neo-go/pkg/core/block" | ||
"github.com/nspcc-dev/neo-go/pkg/io" | ||
"github.com/urfave/cli/v2" | ||
) | ||
|
||
type dump []blockDump | ||
|
||
type blockDump struct { | ||
Block uint32 `json:"block"` | ||
Size int `json:"size"` | ||
Storage []dboper.Operation `json:"storage"` | ||
} | ||
|
||
func newDump() *dump { | ||
return new(dump) | ||
} | ||
|
||
func (d *dump) add(index uint32, batch *storage.MemBatch) { | ||
ops := storage.BatchToOperations(batch) | ||
*d = append(*d, blockDump{ | ||
Block: index, | ||
Size: len(ops), | ||
Storage: ops, | ||
}) | ||
} | ||
|
||
func (d *dump) tryPersist(prefix string, index uint32) error { | ||
if len(*d) == 0 { | ||
return nil | ||
func dumpBin(ctx *cli.Context) error { | ||
var ( | ||
err error | ||
) | ||
if err := cmdargs.EnsureNone(ctx); err != nil { | ||
return err | ||
} | ||
path, err := getPath(prefix, index) | ||
cfg, err := options.GetConfigFromContext(ctx) | ||
if err != nil { | ||
return err | ||
return cli.Exit(err, 1) | ||
} | ||
log, _, logCloser, err := options.HandleLoggingParams(ctx.Bool("debug"), cfg.ApplicationConfiguration) | ||
if err != nil { | ||
return cli.Exit(err, 1) | ||
} | ||
old, err := readFile(path) | ||
if err == nil { | ||
*old = append(*old, *d...) | ||
} else { | ||
old = d | ||
if logCloser != nil { | ||
defer func() { _ = logCloser() }() | ||
} | ||
f, err := os.Create(path) | ||
count := uint32(ctx.Uint("count")) | ||
start := uint32(ctx.Uint("start")) | ||
|
||
chain, prometheus, pprof, err := initBCWithMetrics(cfg, log) | ||
if err != nil { | ||
return err | ||
} | ||
defer f.Close() | ||
defer func() { | ||
pprof.ShutDown() | ||
prometheus.ShutDown() | ||
chain.Close() | ||
}() | ||
|
||
enc := json.NewEncoder(f) | ||
enc.SetIndent("", " ") | ||
if err := enc.Encode(*old); err != nil { | ||
return err | ||
chainCount := chain.BlockHeight() + 1 | ||
if start+count > chainCount { | ||
return cli.Exit(fmt.Errorf("chain is not that high (%d) to dump %d blocks starting from %d", chainCount-1, count, start), 1) | ||
} | ||
if count == 0 { | ||
count = chainCount - start | ||
} | ||
|
||
*d = (*d)[:0] | ||
testDir := "./test/" | ||
if _, err = os.Stat(testDir); os.IsNotExist(err) { | ||
if err = os.MkdirAll(testDir, 0755); err != nil { | ||
return cli.Exit(fmt.Sprintf("failed to create directory %s: %v", testDir, err), 1) | ||
} | ||
} | ||
|
||
for i := start; i < start+count; i++ { | ||
bh := chain.GetHeaderHash(i) | ||
blk, err2 := chain.GetBlock(bh) | ||
if err2 != nil { | ||
return cli.Exit(fmt.Sprintf("failed to get block %d: %v", i, err), 1) | ||
} | ||
filePath := filepath.Join(testDir, fmt.Sprintf("block-%d.bin", i)) | ||
if err = saveBlockToFile(blk, filePath); err != nil { | ||
return cli.Exit(fmt.Sprintf("failed to save block %d to file: %v", i, err), 1) | ||
} | ||
} | ||
|
||
return nil | ||
} | ||
|
||
func readFile(path string) (*dump, error) { | ||
data, err := os.ReadFile(path) | ||
func saveBlockToFile(blk *block.Block, filePath string) error { | ||
file, err := os.Create(filePath) | ||
if err != nil { | ||
return nil, err | ||
} | ||
d := newDump() | ||
if err := json.Unmarshal(data, d); err != nil { | ||
return nil, err | ||
return err | ||
} | ||
return d, err | ||
} | ||
defer file.Close() | ||
|
||
// getPath returns filename for storing blocks up to index. | ||
// Directory structure is the following: | ||
// https://github.com/NeoResearch/neo-storage-audit#folder-organization-where-to-find-the-desired-block | ||
// Dir `BlockStorage_$DIRNO` contains blocks up to $DIRNO (from $DIRNO-100k) | ||
// Inside it there are files grouped by 1k blocks. | ||
// File dump-block-$FILENO.json contains blocks from $FILENO-999, $FILENO | ||
// Example: file `BlockStorage_100000/dump-block-6000.json` contains blocks from 5001 to 6000. | ||
func getPath(prefix string, index uint32) (string, error) { | ||
dirN := ((index + 99999) / 100000) * 100000 | ||
dir := fmt.Sprintf("BlockStorage_%d", dirN) | ||
writer := io.NewBinWriterFromIO(file) | ||
|
||
path := filepath.Join(prefix, dir) | ||
info, err := os.Stat(path) | ||
if os.IsNotExist(err) { | ||
err := os.MkdirAll(path, os.ModePerm) | ||
if err != nil { | ||
return "", err | ||
} | ||
} else if !info.IsDir() { | ||
return "", fmt.Errorf("file `%s` is not a directory", path) | ||
var buf = io.NewBufBinWriter() | ||
blk.EncodeBinary(buf.BinWriter) | ||
bytes := buf.Bytes() | ||
|
||
writer.WriteU32LE(uint32(len(bytes))) | ||
blk.EncodeBinary(writer) | ||
if writer.Err != nil { | ||
return writer.Err | ||
} | ||
//file.Close() | ||
|
||
//file_1, err := os.Open(filePath) | ||
// | ||
//defer file_1.Close() | ||
// | ||
//br := io.NewBinReaderFromIO(file_1) | ||
// | ||
//blk_new := new(block.Block) | ||
//blk_new.DecodeBinary(br) | ||
// | ||
//var size = br.ReadU32LE() | ||
// | ||
//buf := make([]byte, size) | ||
// | ||
//br.ReadBytes(buf) | ||
|
||
fileN := ((index + 999) / 1000) * 1000 | ||
file := fmt.Sprintf("dump-block-%d.json", fileN) | ||
return filepath.Join(path, file), nil | ||
return nil | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
package config | ||
|
||
import "time" | ||
|
||
// NeoFS represents the configuration for the blockfetcher service. | ||
type ( | ||
NeoFS struct { | ||
Nodes []string `yaml:"Nodes"` | ||
Timeout time.Duration `yaml:"Timeout"` | ||
ContainerID string `yaml:"ContainerID"` | ||
DumpDir string `yaml:"DumpDir"` | ||
Restore bool `yaml:"Restore"` | ||
} | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.