You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

139 lines
3.3 KiB
Go

package chunk
import (
"archive/tar"
"crypto/sha256"
"encoding/json"
"fmt"
"io"
"os"
"time"
"git.cheetah.cat/worksucc/gma-puzzles/common"
"github.com/klauspost/compress/zstd"
)
type PoolRecoveryData struct {
PoolID string `json:"_key"`
Size uint64 `json:"size"`
Created time.Time `json:"date"`
Hash string `json:"hash"`
ItemCount int `json:"itemCount"`
Items []string `json:"items"`
RecoveryData []common.DB_File `json:"recoveryData"`
}
type ChunkReader struct {
FileHandle *os.File
ExpectedHash *string
ExpectedSize *uint64
}
func NewChunkReader(fileName string) (_ ChunkReader, err error) {
return ChunkReader{}.NewReader(fileName)
}
func (r ChunkReader) NewReader(fileName string) (_ ChunkReader, err error) {
r.FileHandle, err = os.Open(fileName)
if err != nil {
return r, err
}
return r, nil
}
func (r ChunkReader) NewReaderFrom(fileHandle *os.File) (_ ChunkReader, err error) {
r.FileHandle = fileHandle
return r, nil
}
func (r *ChunkReader) LoadRecoveryFile(fileName string) (err error) {
var poolRecoveryData PoolRecoveryData
readJSONFile, err := os.Open(fileName)
if err != nil {
return err
}
defer readJSONFile.Close()
readBytes, err := io.ReadAll(readJSONFile)
if err != nil {
return err
}
err = json.Unmarshal(readBytes, &poolRecoveryData)
if err != nil {
return err
}
r.ExpectedHash = &poolRecoveryData.Hash
r.ExpectedSize = &poolRecoveryData.Size
return nil
}
func (r *ChunkReader) CheckIntegrity() (err error) {
// re-open and check
r.FileHandle.Seek(0, 0)
shaHasher := sha256.New()
hashedBytes, err := io.Copy(shaHasher, r.FileHandle)
if err != nil {
return err
}
readHash := fmt.Sprintf("%x", shaHasher.Sum(nil))
//fmt.Printf("PackPoolTar hash is %s\n", readHash)
if readHash != *r.ExpectedHash {
return fmt.Errorf("WORM Hash %s != Hash %s", readHash, *r.ExpectedHash)
}
packFileStats, err := r.FileHandle.Stat()
if err != nil {
return err
}
readSize := packFileStats.Size()
if readSize != int64(*r.ExpectedSize) {
return fmt.Errorf("WORM Copy FileSize %d != FileSize %d", readSize, *r.ExpectedSize)
}
// validate written tar-chunk
_, err = r.FileHandle.Seek(0, 0)
if err != nil {
return err
}
decompressor, err := zstd.NewReader(r.FileHandle, zstd.WithDecoderConcurrency(8))
if err != nil {
return err
}
defer decompressor.Close()
tarFileCheckReader := tar.NewReader(decompressor)
//filenamesReadBackList := []string{}
for {
header, err := tarFileCheckReader.Next()
//header.PAXRecords
if err == io.EOF {
break
}
if err != nil {
return err
}
hasher := sha256.New()
hashedBytes, err := io.Copy(hasher, tarFileCheckReader)
if err != nil {
return err
}
readBackChecksum := fmt.Sprintf("%x", hasher.Sum(nil))
if hashedBytes != header.Size {
return fmt.Errorf("validation on output archive, incorrect size file %s has %d should be %d", header.Name, hashedBytes, header.Size)
}
if header.Name != readBackChecksum {
return fmt.Errorf("validation on output archive, incorrect checksum file %s has %s", header.Name, readBackChecksum)
}
//filenamesReadBackList = append(filenamesReadBackList, header.Name)
}
if hashedBytes != int64(*r.ExpectedSize) {
return fmt.Errorf("WORM Copy HashedBytes %d != FileSize %d", hashedBytes, *r.ExpectedSize)
}
return nil
}
func (r *ChunkReader) Close() {
r.FileHandle.Close()
}