first commit
commit
cd14f2e2ee
@ -0,0 +1,17 @@
|
||||
package commands
|
||||
|
||||
type BaseCommand struct {
|
||||
Help HelpCommand `command:"help" description:"Print this help message"`
|
||||
|
||||
PrepareBin PrepareBinCommand `command:"prepare" alias:"p" description:"Prepare a Binary file and attach the propietary header to it"`
|
||||
DecryptDBF DecryptDBFCommand `command:"decrypt-dbf" description:"decrypt dbf-data"`
|
||||
DecryptDES DecryptDESCommand `command:"decrypt-des" description:"decrypt des-data"`
|
||||
|
||||
MparUnpack MparUnpackCommand `command:"mpar-unpack" description:"extracts the raw bin from an mpar file"`
|
||||
MparPack MparPackCommand `command:"mpar-pack" description:"creates an mpar file from the binary file and header info"`
|
||||
|
||||
S19Tree S19TreeCommand `command:"s19-tree" description:"dump s19 tree"`
|
||||
S19IsolatePart S19IsolatePartitionCommand `command:"s19-isolate-part" description:"isolate a segment from an s19"`
|
||||
}
|
||||
|
||||
var MotoCLI BaseCommand
|
@ -0,0 +1,46 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"git.cheetah.cat/cheetah/moto-flash-data/motolol/cpe"
|
||||
)
|
||||
|
||||
type DecryptDBFCommand struct {
|
||||
SourceFileName string `long:"src" alias:"i" required:"true" description:"input-filename"`
|
||||
DestFileName string `long:"dest" alias:"o" required:"true" description:"output-filename"`
|
||||
}
|
||||
|
||||
func (command *DecryptDBFCommand) Execute(args []string) error {
|
||||
inputFile, err := os.Open(command.SourceFileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer inputFile.Close()
|
||||
outputFile, err := os.Create(command.DestFileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outputFile.Close()
|
||||
|
||||
inputFileStat, err := inputFile.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
pdata := make([]byte, inputFileStat.Size())
|
||||
_, err = io.ReadFull(inputFile, pdata)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
decrypted := cpe.DecryptDBFFile(pdata)
|
||||
|
||||
_, err = outputFile.Write(decrypted)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -0,0 +1,54 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"crypto/cipher"
|
||||
"crypto/des"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
type DecryptDESCommand struct {
|
||||
SourceFileName string `long:"src" alias:"i" required:"true" description:"input-filename"`
|
||||
DestFileName string `long:"dest" alias:"o" required:"true" description:"output-filename"`
|
||||
}
|
||||
|
||||
func (command *DecryptDESCommand) Execute(args []string) error {
|
||||
inputFile, err := os.Open(command.SourceFileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer inputFile.Close()
|
||||
outputFile, err := os.Create(command.DestFileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outputFile.Close()
|
||||
|
||||
inputFileStat, err := inputFile.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
pdata := make([]byte, inputFileStat.Size())
|
||||
_, err = io.ReadFull(inputFile, pdata)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
block, err := des.NewTripleDESCipher([]byte("HAVNCPSCMTTUNERAIRTRACER"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
iv := []byte("VEDKDJSP")
|
||||
|
||||
decrypter := cipher.NewCBCDecrypter(block, iv)
|
||||
decrypted := make([]byte, len(pdata))
|
||||
decrypter.CryptBlocks(decrypted, pdata)
|
||||
|
||||
_, err = outputFile.Write(decrypted)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -0,0 +1,13 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
var ErrShowHelpMessage = errors.New("help command invoked")
|
||||
|
||||
type HelpCommand struct{}
|
||||
|
||||
func (command *HelpCommand) Execute(args []string) error {
|
||||
return ErrShowHelpMessage
|
||||
}
|
@ -0,0 +1,88 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"git.cheetah.cat/cheetah/moto-flash-data/flashpart"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type MparPackCommand struct {
|
||||
SourceFileName string `long:"src" short:"i" required:"true" description:"input-filename"`
|
||||
OriginalFullHeader string `long:"full-header" required:"true" description:"original full header hex"`
|
||||
|
||||
DestFileName string `long:"dest" short:"o" required:"true" description:"output-filename"`
|
||||
}
|
||||
|
||||
func (command *MparPackCommand) Execute(args []string) error {
|
||||
inputFile, err := os.Open(command.SourceFileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer inputFile.Close()
|
||||
outputFile, err := os.Create(command.DestFileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outputFile.Close()
|
||||
|
||||
inputFileStat, err := inputFile.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Info().Int64("fileSize", inputFileStat.Size()).Msg("Reading raw file...")
|
||||
rawData := make([]byte, inputFileStat.Size())
|
||||
_, err = io.ReadFull(inputFile, rawData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
originalHeader, err := hex.DecodeString(command.OriginalFullHeader)
|
||||
if len(originalHeader) != 32 {
|
||||
return errors.New("header invalid")
|
||||
}
|
||||
|
||||
fpHeader := flashpart.ParseFlashPartHeader(originalHeader)
|
||||
log.Info().Msg(
|
||||
fmt.Sprintf("Media-ID: 0x%02X, Partition Size: 0x%08X / %d bytes, Partition Tag: %s",
|
||||
fpHeader.MediaID,
|
||||
fpHeader.TotalSize,
|
||||
fpHeader.TotalSize,
|
||||
fpHeader.VersionText,
|
||||
),
|
||||
)
|
||||
log.Info().Msg(
|
||||
fmt.Sprintf("Original-Full-Header-Hex: %s", hex.EncodeToString(fpHeader.GetBytes())),
|
||||
)
|
||||
fpHeader.AdjustRawSize(uint32(len(rawData)))
|
||||
|
||||
log.Info().Msg(
|
||||
fmt.Sprintf("New-Full-Header-Hex: %s", hex.EncodeToString(fpHeader.GetBytes())),
|
||||
)
|
||||
|
||||
fpHeaderNew := flashpart.ParseFlashPartHeader(fpHeader.GetBytes())
|
||||
log.Info().Msg(
|
||||
fmt.Sprintf("Media-ID: 0x%02X, Partition Size: 0x%08X / %d bytes, Partition Tag: %s",
|
||||
fpHeaderNew.MediaID,
|
||||
fpHeaderNew.TotalSize,
|
||||
fpHeaderNew.TotalSize,
|
||||
fpHeaderNew.VersionText,
|
||||
),
|
||||
)
|
||||
|
||||
_, err = outputFile.Write(fpHeaderNew.GetBytes())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = outputFile.Write(rawData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -0,0 +1,69 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"git.cheetah.cat/cheetah/moto-flash-data/flashpart"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type MparUnpackCommand struct {
|
||||
SourceFileName string `long:"src" short:"i" required:"true" description:"input-filename"`
|
||||
DestFileName string `long:"dest" short:"o" required:"true" description:"output-filename"`
|
||||
}
|
||||
|
||||
func (command *MparUnpackCommand) Execute(args []string) error {
|
||||
inputFile, err := os.Open(command.SourceFileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer inputFile.Close()
|
||||
outputFile, err := os.Create(command.DestFileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outputFile.Close()
|
||||
|
||||
inputFileStat, err := inputFile.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Info().Int64("fileSize", inputFileStat.Size()).Msg("Reading raw file...")
|
||||
pdata := make([]byte, inputFileStat.Size())
|
||||
_, err = io.ReadFull(inputFile, pdata)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fpHeader := flashpart.ParseFlashPartHeader(pdata[:32])
|
||||
log.Info().Msg(
|
||||
fmt.Sprintf("Media-ID: 0x%02X, Partition Size: 0x%08X / %d bytes, Partition Tag: %s",
|
||||
fpHeader.MediaID,
|
||||
fpHeader.TotalSize,
|
||||
fpHeader.TotalSize,
|
||||
fpHeader.VersionText,
|
||||
),
|
||||
)
|
||||
log.Info().Msg(
|
||||
fmt.Sprintf("Original-Full-Header-Hex: %s", hex.EncodeToString(pdata[:32])),
|
||||
)
|
||||
|
||||
if len(pdata[32:]) != len(pdata)-32 {
|
||||
panic("bla")
|
||||
}
|
||||
if len(pdata[32:]) != int(fpHeader.TotalSize)-32 {
|
||||
log.Error().Uint32("header ts", fpHeader.TotalSize).Uint32("len pdata", uint32(len(pdata))).Msg("size mismatch")
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err = outputFile.Write(pdata[32:])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -0,0 +1,74 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/binary"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
type PrepareBinCommand struct {
|
||||
Version string `long:"version" alias:"V" required:"true" description:"a version string of max 16 chars"`
|
||||
SourceFileName string `long:"src" alias:"i" required:"true" description:"input-filename"`
|
||||
DestFileName string `long:"dest" alias:"o" required:"true" description:"output-filename"`
|
||||
MediaID byte `long:"mediaid" alias:"m" required:"true" description:"media identifier"`
|
||||
}
|
||||
|
||||
func (command *PrepareBinCommand) Execute(args []string) error {
|
||||
inputFile, err := os.Open(command.SourceFileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer inputFile.Close()
|
||||
inputFileStat, err := inputFile.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
headerFileSize := inputFileStat.Size() + 32
|
||||
|
||||
outputFile, err := os.Create(command.DestFileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outputFile.Close()
|
||||
|
||||
outputWriter := bufio.NewWriter(outputFile)
|
||||
//Header
|
||||
headerBytes := []byte{command.MediaID, 0x32, 0xF6, 0x0C}
|
||||
_, err = outputWriter.Write(headerBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
totalSizeBytes := make([]byte, 4)
|
||||
binary.LittleEndian.PutUint32(totalSizeBytes, uint32(headerFileSize))
|
||||
_, err = outputWriter.Write(totalSizeBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fillerBytes := make([]byte, 8)
|
||||
_, err = outputWriter.Write(fillerBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
versionTextBytes := make([]byte, 16)
|
||||
for i := 0; i < 16; i++ {
|
||||
if i < len(command.Version) {
|
||||
versionTextBytes[i] = command.Version[i]
|
||||
} else {
|
||||
versionTextBytes[i] = 0x20
|
||||
}
|
||||
}
|
||||
_, err = outputWriter.Write(versionTextBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = io.Copy(outputWriter, inputFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -0,0 +1,86 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"git.cheetah.cat/cheetah/moto-flash-data/flashpart"
|
||||
"git.cheetah.cat/cheetah/moto-flash-data/s19"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type S19IsolatePartitionCommand struct {
|
||||
FileName string `long:"file" short:"f" required:"true" description:"s19 filename"`
|
||||
PartitionIndex *int `long:"part-index" short:"i" description:"partition-number"`
|
||||
|
||||
OutputFileName string `long:"out" short:"o" required:"true" description:"output-filename, format:mpar"`
|
||||
}
|
||||
|
||||
func (command *S19IsolatePartitionCommand) Execute(args []string) error {
|
||||
log.Info().Str("fileName", command.FileName).Msg("S19-Isolate")
|
||||
|
||||
s19File, err := s19.NewS19Reader(command.FileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer s19File.Close()
|
||||
log.Info().Int64("fileSize", s19File.OsSize).Msg("Reading file...")
|
||||
|
||||
//
|
||||
records, err := s19File.ReadAllRecords()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
records = records[1:] // skip first ??
|
||||
ranges := s19File.DetectAddressRanges(records)
|
||||
//
|
||||
//
|
||||
if command.PartitionIndex != nil {
|
||||
partIndex := *command.PartitionIndex
|
||||
for index, segment := range ranges {
|
||||
if index != partIndex {
|
||||
continue
|
||||
}
|
||||
//
|
||||
log.Info().Msg(
|
||||
fmt.Sprintf("Index: %d, Start: 0x%08X, End: 0x%08X, Size: %d bytes, Slice: %d-%d",
|
||||
index,
|
||||
segment.StartAddress, segment.EndAddress,
|
||||
segment.Size,
|
||||
segment.SliceStart, segment.SliceEnd,
|
||||
),
|
||||
)
|
||||
//
|
||||
fpHeader := flashpart.ParseFlashPartHeader(records[segment.SliceStart].Data)
|
||||
log.Info().Msg(
|
||||
fmt.Sprintf("Index: %d, Media-ID: 0x%02X, Partition Size: 0x%08X / %d bytes, Partition Tag: %s",
|
||||
index,
|
||||
fpHeader.MediaID,
|
||||
fpHeader.TotalSize,
|
||||
fpHeader.TotalSize,
|
||||
fpHeader.VersionText,
|
||||
),
|
||||
)
|
||||
|
||||
if fpHeader.TotalSize == 0 {
|
||||
log.Error().Msg("cannot extract a partition thats empty")
|
||||
return nil
|
||||
}
|
||||
//
|
||||
{
|
||||
outMBarFile, err := os.Create(command.OutputFileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outMBarFile.Close()
|
||||
for _, record := range records[segment.SliceStart : segment.SliceEnd+1] {
|
||||
outMBarFile.Write(record.Data)
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -0,0 +1,57 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"git.cheetah.cat/cheetah/moto-flash-data/flashpart"
|
||||
"git.cheetah.cat/cheetah/moto-flash-data/s19"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type S19TreeCommand struct {
|
||||
FileName string `long:"file" short:"f" required:"true" description:"filename"`
|
||||
}
|
||||
|
||||
func (command *S19TreeCommand) Execute(args []string) error {
|
||||
log.Info().Str("fileName", command.FileName).Msg("S19-Tree")
|
||||
|
||||
s19File, err := s19.NewS19Reader(command.FileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer s19File.Close()
|
||||
log.Info().Int64("fileSize", s19File.OsSize).Msg("Reading file...")
|
||||
|
||||
//
|
||||
records, err := s19File.ReadAllRecords()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
records = records[1:] // skip first ??
|
||||
ranges := s19File.DetectAddressRanges(records)
|
||||
//
|
||||
for index, segment := range ranges {
|
||||
log.Info().Msg(
|
||||
fmt.Sprintf("Index: %d, Start: 0x%08X, End: 0x%08X, Size: %d bytes, Slice: %d-%d",
|
||||
index,
|
||||
segment.StartAddress, segment.EndAddress,
|
||||
segment.Size,
|
||||
segment.SliceStart, segment.SliceEnd,
|
||||
),
|
||||
)
|
||||
//
|
||||
fpHeader := flashpart.ParseFlashPartHeader(records[segment.SliceStart].Data)
|
||||
log.Info().Msg(
|
||||
fmt.Sprintf("Index: %d, Media-ID: 0x%02X, Partition Size: 0x%08X / %d bytes, Partition Tag: %s",
|
||||
index,
|
||||
fpHeader.MediaID,
|
||||
fpHeader.TotalSize,
|
||||
fpHeader.TotalSize,
|
||||
fpHeader.VersionText,
|
||||
),
|
||||
)
|
||||
//log.Debug().Msg(fmt.Sprint(fpHeader))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -0,0 +1,58 @@
|
||||
package flashpart
|
||||
|
||||
import "encoding/binary"
|
||||
|
||||
type FlashPartHeader struct {
|
||||
MediaID byte
|
||||
Byte1 byte
|
||||
Byte2 byte
|
||||
Byte3 byte
|
||||
TotalSize uint32
|
||||
RawSize uint32
|
||||
VersionText string // 16byte
|
||||
|
||||
raw []byte
|
||||
}
|
||||
|
||||
func ParseFlashPartHeader(data []byte) (fph *FlashPartHeader) {
|
||||
if len(data) != 32 {
|
||||
panic("flash part header len != 32")
|
||||
}
|
||||
fph = &FlashPartHeader{
|
||||
MediaID: data[0],
|
||||
Byte1: data[1],
|
||||
Byte2: data[2],
|
||||
Byte3: data[3],
|
||||
|
||||
TotalSize: binary.LittleEndian.Uint32(data[4:8]),
|
||||
VersionText: string(data[15:]),
|
||||
|
||||
raw: data,
|
||||
}
|
||||
fph.RawSize = fph.TotalSize - 32
|
||||
|
||||
// copyright string in header space
|
||||
if fph.MediaID == 67 && fph.Byte1 == 111 && fph.Byte2 == 112 && fph.Byte3 == 121 {
|
||||
fph.VersionText = string(data[:])
|
||||
fph.TotalSize = 0
|
||||
fph.RawSize = 0
|
||||
}
|
||||
return fph
|
||||
}
|
||||
|
||||
func (fph *FlashPartHeader) AdjustRawSize(size uint32) {
|
||||
fph.RawSize = size
|
||||
fph.AdjustTotalSize(size + 32)
|
||||
}
|
||||
func (fph *FlashPartHeader) AdjustTotalSize(size uint32) {
|
||||
fph.TotalSize = size
|
||||
totalSizeBytes := make([]byte, 4)
|
||||
binary.LittleEndian.PutUint32(totalSizeBytes, fph.TotalSize)
|
||||
// TODO: shitty copy
|
||||
for i, b := range totalSizeBytes {
|
||||
fph.raw[4+i] = b
|
||||
}
|
||||
}
|
||||
func (fph *FlashPartHeader) GetBytes() []byte {
|
||||
return fph.raw
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
module git.cheetah.cat/cheetah/moto-flash-data
|
||||
|
||||
go 1.20
|
||||
|
||||
require (
|
||||
github.com/jessevdk/go-flags v1.5.0
|
||||
github.com/rs/zerolog v1.33.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
golang.org/x/sys v0.29.0 // indirect
|
||||
)
|
@ -0,0 +1,21 @@
|
||||
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc=
|
||||
github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
|
||||
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
|
||||
github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8=
|
||||
github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
|
||||
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
@ -0,0 +1,50 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"git.cheetah.cat/cheetah/moto-flash-data/commands"
|
||||
"github.com/jessevdk/go-flags"
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func main() {
|
||||
zerolog.TimeFieldFormat = zerolog.TimeFormatUnix
|
||||
zerolog.SetGlobalLevel(zerolog.DebugLevel)
|
||||
|
||||
consoleWriter := zerolog.ConsoleWriter{Out: os.Stdout}
|
||||
log.Logger = log.Output(consoleWriter)
|
||||
//
|
||||
parser := flags.NewParser(&commands.MotoCLI, flags.HelpFlag|flags.PassDoubleDash)
|
||||
parser.NamespaceDelimiter = "-"
|
||||
|
||||
helpParser := flags.NewParser(&commands.MotoCLI, flags.HelpFlag)
|
||||
helpParser.NamespaceDelimiter = "-"
|
||||
|
||||
_, err := parser.Parse()
|
||||
handleError(helpParser, err)
|
||||
}
|
||||
func handleError(helpParser *flags.Parser, err error) {
|
||||
if err != nil {
|
||||
if err == commands.ErrShowHelpMessage {
|
||||
showHelp(helpParser)
|
||||
} else if flagsErr, ok := err.(*flags.Error); ok && flagsErr.Type == flags.ErrCommandRequired {
|
||||
showHelp(helpParser)
|
||||
} else if flagsErr, ok := err.(*flags.Error); ok && flagsErr.Type == flags.ErrHelp {
|
||||
fmt.Println(err)
|
||||
os.Exit(0)
|
||||
} else {
|
||||
fmt.Printf("error: %s\n", err)
|
||||
}
|
||||
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func showHelp(helpParser *flags.Parser) {
|
||||
helpParser.ParseArgs([]string{"-h"})
|
||||
helpParser.WriteHelp(os.Stdout)
|
||||
os.Exit(0)
|
||||
}
|
@ -0,0 +1,204 @@
|
||||
package cp
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
type CPReader struct {
|
||||
FileHandle *os.File
|
||||
streamReader *bufio.Reader
|
||||
cursorOffset uint32
|
||||
//h header
|
||||
}
|
||||
|
||||
const (
|
||||
CPE_FILE_IDENTIFIER string = "CPDFile"
|
||||
CPE_FILE_IDENTIFIER_SIZE int = 7
|
||||
CPE_FILE_VER int = 2
|
||||
CPE_FILE_VER_SIZE int = 1
|
||||
CPE_ENCRYPTED_FILE_VER int = 3
|
||||
CPE_FILE_SUBVER int = 0
|
||||
CPE_FILE_SUBVER_SIZE int = 1
|
||||
CPE_FILE_HEADER_LENGTH int = 9
|
||||
CPE_SECTION_NAME_SIZE int = 3
|
||||
CPE_SECTION_LEN_SIZE int = 4
|
||||
CPE_SECTION_COUNT int = 3
|
||||
CPE_SECTION_EXT string = "EXT"
|
||||
CPE_SECTION_CP string = "CPS"
|
||||
CPE_SECTION_LLP string = "LLP"
|
||||
CPE_SECTION_RADIOPASS string = "RPP"
|
||||
CPE_SECTION_TONEFILE string = "TON"
|
||||
)
|
||||
|
||||
func NewReader(fileName string) (_ CPReader, err error) {
|
||||
return CPReader{}.NewReader(fileName)
|
||||
}
|
||||
func (r CPReader) NewReader(fileName string) (_ CPReader, err error) {
|
||||
r.FileHandle, err = os.Open(fileName)
|
||||
if err != nil {
|
||||
return r, err
|
||||
}
|
||||
r.streamReader = bufio.NewReader(r.FileHandle)
|
||||
return r, nil
|
||||
}
|
||||
func (r *CPReader) Close() {
|
||||
r.FileHandle.Close()
|
||||
}
|
||||
func (r *CPReader) read32Bit() (value int, err error) {
|
||||
bytes := make([]byte, 4)
|
||||
_, err = io.ReadFull(r.streamReader, bytes)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
r.cursorOffset += 4
|
||||
return int(binary.LittleEndian.Uint32(bytes)), nil
|
||||
}
|
||||
func (r *CPReader) Test() (err error) {
|
||||
firstInt, _ := r.read32Bit()
|
||||
fmt.Printf("first int is %d\n", firstInt)
|
||||
if firstInt == 0 {
|
||||
return errors.New("table empty")
|
||||
}
|
||||
tableIndex := 0
|
||||
if tableIndex < firstInt {
|
||||
recordType, _ := r.read32Bit()
|
||||
fmt.Printf("recordType is %d\n", recordType)
|
||||
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
/*
|
||||
func (r *CPEReader) readFileMetadata() (GMAFileMetadata, error) {
|
||||
metadata := GMAFileMetadata{}
|
||||
|
||||
// Read the file name
|
||||
fileName, err := r.gmaStreamReader.ReadString(byte(0))
|
||||
if err != nil {
|
||||
return metadata, err
|
||||
}
|
||||
//fmt.Printf("bufio ReadString(byte(0)) = len(%d) data=%x\n", len(fileName), fileName)
|
||||
|
||||
fileName = fileName[:len(fileName)-1] // remove nullbyte that causes go string fuckyness
|
||||
r.cursorOffset += uint32(len(fileName) + 1) // Add name length + null byte
|
||||
metadata.FileName = fileName
|
||||
|
||||
// Read the file size
|
||||
fileSizeBytes := make([]byte, 8)
|
||||
_, err = io.ReadFull(r.gmaStreamReader, fileSizeBytes)
|
||||
if err != nil {
|
||||
return metadata, err
|
||||
}
|
||||
r.cursorOffset += 8
|
||||
//fmt.Printf("bufio Read([]byte(4)]) fileSizeBytes = bytesRead(%d) data=%x\n", bytesRead, fileSizeBytes)
|
||||
metadata.FileSize = int64(binary.LittleEndian.Uint64(fileSizeBytes))
|
||||
|
||||
// Read the file crc
|
||||
crcBytes := make([]byte, 4)
|
||||
_, err = io.ReadFull(r.gmaStreamReader, crcBytes)
|
||||
if err != nil {
|
||||
return metadata, err
|
||||
}
|
||||
r.cursorOffset += 4
|
||||
//fmt.Printf("bufio Read([]byte(4)]) crcBytes = bytesRead(%d) data=%x\n", bytesRead, crcBytes)
|
||||
metadata.CRC = binary.LittleEndian.Uint32(crcBytes)
|
||||
|
||||
// Read the next type
|
||||
nextTypeBytes := make([]byte, 4)
|
||||
_, err = io.ReadFull(r.gmaStreamReader, nextTypeBytes)
|
||||
if err != nil {
|
||||
return metadata, err
|
||||
}
|
||||
r.cursorOffset += 4
|
||||
metadata.NextType = binary.LittleEndian.Uint32(nextTypeBytes)
|
||||
//fmt.Printf("bufio Read([]byte(4)]) nextTypeBytes = bytesRead(%d) data=%x\n", bytesRead, nextTypeBytes)
|
||||
|
||||
return metadata, nil
|
||||
}
|
||||
func (r *CPEReader) ReadAddonCRC(lastOffset int64) (crc uint32, err error) {
|
||||
|
||||
limitReader := io.NewSectionReader(r.FileHandle, int64(r.cursorOffset)+lastOffset, int64(4))
|
||||
|
||||
crcBytes := make([]byte, 4)
|
||||
_, err = limitReader.Read(crcBytes)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
r.cursorOffset += 4
|
||||
CRC := binary.LittleEndian.Uint32(crcBytes)
|
||||
return CRC, nil
|
||||
}
|
||||
|
||||
func (r *CPEReader) ReadFiles() (firstType int32, files []GMAFileMetadata, err error) {
|
||||
// read nType 4byte
|
||||
firstTypeBytes := make([]byte, 4)
|
||||
_, err = r.gmaStreamReader.Read(firstTypeBytes)
|
||||
if err != nil {
|
||||
return 0, files, err
|
||||
}
|
||||
r.cursorOffset += 4
|
||||
firstType = int32(binary.LittleEndian.Uint32(firstTypeBytes))
|
||||
|
||||
if firstType == 0 {
|
||||
return 0, files, nil
|
||||
}
|
||||
fileOffset := int64(0)
|
||||
fileNumber := int32(1)
|
||||
for {
|
||||
fileMeta, err := r.readFileMetadata()
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
return firstType, files, err
|
||||
}
|
||||
fileMeta.FileNumber = fileNumber
|
||||
fileMeta.Offset = fileOffset
|
||||
//fmt.Printf("%s CRC: %d Offset: %d Size: %d\n", fileMeta.FileName, fileMeta.CRC, fileMeta.Offset, fileMeta.FileSize)
|
||||
//fmt.Printf("[% x]\n", fileMeta.FileName)
|
||||
files = append(files, fileMeta)
|
||||
fileOffset += fileMeta.FileSize
|
||||
fileNumber++
|
||||
if fileMeta.NextType == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return firstType, files, nil
|
||||
}
|
||||
func (r *CPEReader) GetOffset() (offset uint32) {
|
||||
return r.cursorOffset
|
||||
}
|
||||
func (r *CPEReader) ExtractFileTo(fileMeta GMAFileMetadata, writer io.Writer) (extractMeta GMAExtractionMeta, err error) {
|
||||
extractMeta.OriginalMeta = fileMeta
|
||||
// Seek to the specified offset in the reader
|
||||
limitReader := io.NewSectionReader(r.FileHandle, int64(r.cursorOffset)+fileMeta.Offset, int64(fileMeta.FileSize))
|
||||
// Copy the specified length of data from the reader to the output file
|
||||
buf := bytes.NewBuffer(nil)
|
||||
_, err = io.CopyN(buf, limitReader, int64(fileMeta.FileSize))
|
||||
if err != nil {
|
||||
return extractMeta, err
|
||||
}
|
||||
shaHasher := sha256.New()
|
||||
|
||||
extractMeta.ExtractedCRC = crc32.Checksum(buf.Bytes(), crc32.MakeTable(crc32.IEEE))
|
||||
shaHasher.Write(buf.Bytes())
|
||||
extractMeta.ExtractedSHA256 = fmt.Sprintf("%x", shaHasher.Sum(nil))
|
||||
buf.WriteTo(writer)
|
||||
|
||||
return extractMeta, nil
|
||||
}
|
||||
func (r *CPEReader) GetSHA256() (hash string, err error) {
|
||||
shaHasher := sha256.New()
|
||||
if _, err := io.Copy(shaHasher, r.FileHandle); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return fmt.Sprintf("%x", shaHasher.Sum(nil)), nil
|
||||
}
|
||||
|
||||
*/
|
@ -0,0 +1,8 @@
|
||||
package cp
|
||||
|
||||
type Table struct {
|
||||
}
|
||||
|
||||
func NewTable(fileName string) (_ CPReader, err error) {
|
||||
return CPReader{}.NewReader(fileName)
|
||||
}
|
@ -0,0 +1,251 @@
|
||||
package cpe
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
type CPEFileSection struct {
|
||||
Name string
|
||||
Size int
|
||||
Data []byte
|
||||
}
|
||||
type CPEMetadata struct {
|
||||
Segments map[string]CPEFileSection
|
||||
}
|
||||
type CPEReader struct {
|
||||
FileHandle *os.File
|
||||
streamReader *bufio.Reader
|
||||
cursorOffset uint32
|
||||
Metadata CPEMetadata
|
||||
//h header
|
||||
}
|
||||
|
||||
const (
|
||||
CPE_FILE_IDENTIFIER string = "CPDFile"
|
||||
CPE_FILE_IDENTIFIER_SIZE int = 7
|
||||
CPE_FILE_VER int = 2
|
||||
CPE_FILE_VER_SIZE int = 1
|
||||
CPE_ENCRYPTED_FILE_VER int = 3
|
||||
CPE_FILE_SUBVER int = 0
|
||||
CPE_FILE_SUBVER_SIZE int = 1
|
||||
CPE_FILE_HEADER_LENGTH int = 9
|
||||
CPE_SECTION_NAME_SIZE int = 3
|
||||
CPE_SECTION_LEN_SIZE int = 4
|
||||
CPE_SECTION_COUNT int = 3
|
||||
CPE_SECTION_EXT string = "EXT"
|
||||
CPE_SECTION_CP string = "CPS"
|
||||
CPE_SECTION_LLP string = "LLP"
|
||||
CPE_SECTION_RADIOPASS string = "RPP"
|
||||
CPE_SECTION_TONEFILE string = "TON"
|
||||
)
|
||||
|
||||
func NewReader(fileName string) (_ CPEReader, err error) {
|
||||
return CPEReader{}.NewReader(fileName)
|
||||
}
|
||||
func (r CPEReader) NewReader(fileName string) (_ CPEReader, err error) {
|
||||
r.FileHandle, err = os.Open(fileName)
|
||||
if err != nil {
|
||||
return r, err
|
||||
}
|
||||
r.streamReader = bufio.NewReader(r.FileHandle)
|
||||
return r, nil
|
||||
}
|
||||
func (r *CPEReader) Close() {
|
||||
r.FileHandle.Close()
|
||||
}
|
||||
func (r *CPEReader) ValidateHeader() (err error) {
|
||||
//
|
||||
first7Chars := make([]byte, 7)
|
||||
_, err = io.ReadFull(r.streamReader, first7Chars)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if string(first7Chars) != "CPDFile" {
|
||||
return fmt.Errorf("missing 'cpdfile' header")
|
||||
}
|
||||
versionByte, err := r.streamReader.ReadByte()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if int(versionByte) != CPE_FILE_VER {
|
||||
if int(versionByte) == CPE_ENCRYPTED_FILE_VER {
|
||||
return fmt.Errorf("file is encrypted (version %d)", versionByte)
|
||||
}
|
||||
return fmt.Errorf("file-version %d not supported", versionByte)
|
||||
}
|
||||
// waste 1 byte
|
||||
r.streamReader.Discard(1)
|
||||
return nil
|
||||
}
|
||||
func (r *CPEReader) readSegment() (fileSection CPEFileSection, err error) {
|
||||
sectionNameBytes := make([]byte, 3)
|
||||
_, err = io.ReadFull(r.streamReader, sectionNameBytes)
|
||||
if err != nil {
|
||||
return fileSection, err
|
||||
}
|
||||
fileSection.Name = string(sectionNameBytes)
|
||||
|
||||
sectionSizeBytes := make([]byte, 4)
|
||||
_, err = io.ReadFull(r.streamReader, sectionSizeBytes)
|
||||
if err != nil {
|
||||
return fileSection, err
|
||||
}
|
||||
fileSection.Size = int(binary.LittleEndian.Uint32(sectionSizeBytes))
|
||||
|
||||
fileSection.Data = make([]byte, fileSection.Size)
|
||||
_, err = io.ReadFull(r.streamReader, fileSection.Data)
|
||||
if err != nil {
|
||||
return fileSection, err
|
||||
}
|
||||
|
||||
return fileSection, nil
|
||||
}
|
||||
func (r *CPEReader) ReadData() (err error) {
|
||||
r.Metadata.Segments = make(map[string]CPEFileSection)
|
||||
for {
|
||||
nextSegment, err := r.readSegment()
|
||||
if err != nil {
|
||||
if errors.Is(err, io.EOF) {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
r.Metadata.Segments[nextSegment.Name] = nextSegment
|
||||
//fmt.Printf("Segment Found: %s (%d bytes)\n", nextSegment.Name, nextSegment.Size)
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
func (r *CPEReader) readFileMetadata() (GMAFileMetadata, error) {
|
||||
metadata := GMAFileMetadata{}
|
||||
|
||||
// Read the file name
|
||||
fileName, err := r.gmaStreamReader.ReadString(byte(0))
|
||||
if err != nil {
|
||||
return metadata, err
|
||||
}
|
||||
//fmt.Printf("bufio ReadString(byte(0)) = len(%d) data=%x\n", len(fileName), fileName)
|
||||
|
||||
fileName = fileName[:len(fileName)-1] // remove nullbyte that causes go string fuckyness
|
||||
r.cursorOffset += uint32(len(fileName) + 1) // Add name length + null byte
|
||||
metadata.FileName = fileName
|
||||
|
||||
// Read the file size
|
||||
fileSizeBytes := make([]byte, 8)
|
||||
_, err = io.ReadFull(r.gmaStreamReader, fileSizeBytes)
|
||||
if err != nil {
|
||||
return metadata, err
|
||||
}
|
||||
r.cursorOffset += 8
|
||||
//fmt.Printf("bufio Read([]byte(4)]) fileSizeBytes = bytesRead(%d) data=%x\n", bytesRead, fileSizeBytes)
|
||||
metadata.FileSize = int64(binary.LittleEndian.Uint64(fileSizeBytes))
|
||||
|
||||
// Read the file crc
|
||||
crcBytes := make([]byte, 4)
|
||||
_, err = io.ReadFull(r.gmaStreamReader, crcBytes)
|
||||
if err != nil {
|
||||
return metadata, err
|
||||
}
|
||||
r.cursorOffset += 4
|
||||
//fmt.Printf("bufio Read([]byte(4)]) crcBytes = bytesRead(%d) data=%x\n", bytesRead, crcBytes)
|
||||
metadata.CRC = binary.LittleEndian.Uint32(crcBytes)
|
||||
|
||||
// Read the next type
|
||||
nextTypeBytes := make([]byte, 4)
|
||||
_, err = io.ReadFull(r.gmaStreamReader, nextTypeBytes)
|
||||
if err != nil {
|
||||
return metadata, err
|
||||
}
|
||||
r.cursorOffset += 4
|
||||
metadata.NextType = binary.LittleEndian.Uint32(nextTypeBytes)
|
||||
//fmt.Printf("bufio Read([]byte(4)]) nextTypeBytes = bytesRead(%d) data=%x\n", bytesRead, nextTypeBytes)
|
||||
|
||||
return metadata, nil
|
||||
}
|
||||
func (r *CPEReader) ReadAddonCRC(lastOffset int64) (crc uint32, err error) {
|
||||
|
||||
limitReader := io.NewSectionReader(r.FileHandle, int64(r.cursorOffset)+lastOffset, int64(4))
|
||||
|
||||
crcBytes := make([]byte, 4)
|
||||
_, err = limitReader.Read(crcBytes)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
r.cursorOffset += 4
|
||||
CRC := binary.LittleEndian.Uint32(crcBytes)
|
||||
return CRC, nil
|
||||
}
|
||||
|
||||
func (r *CPEReader) ReadFiles() (firstType int32, files []GMAFileMetadata, err error) {
|
||||
// read nType 4byte
|
||||
firstTypeBytes := make([]byte, 4)
|
||||
_, err = r.gmaStreamReader.Read(firstTypeBytes)
|
||||
if err != nil {
|
||||
return 0, files, err
|
||||
}
|
||||
r.cursorOffset += 4
|
||||
firstType = int32(binary.LittleEndian.Uint32(firstTypeBytes))
|
||||
|
||||
if firstType == 0 {
|
||||
return 0, files, nil
|
||||
}
|
||||
fileOffset := int64(0)
|
||||
fileNumber := int32(1)
|
||||
for {
|
||||
fileMeta, err := r.readFileMetadata()
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
return firstType, files, err
|
||||
}
|
||||
fileMeta.FileNumber = fileNumber
|
||||
fileMeta.Offset = fileOffset
|
||||
//fmt.Printf("%s CRC: %d Offset: %d Size: %d\n", fileMeta.FileName, fileMeta.CRC, fileMeta.Offset, fileMeta.FileSize)
|
||||
//fmt.Printf("[% x]\n", fileMeta.FileName)
|
||||
files = append(files, fileMeta)
|
||||
fileOffset += fileMeta.FileSize
|
||||
fileNumber++
|
||||
if fileMeta.NextType == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return firstType, files, nil
|
||||
}
|
||||
func (r *CPEReader) GetOffset() (offset uint32) {
|
||||
return r.cursorOffset
|
||||
}
|
||||
func (r *CPEReader) ExtractFileTo(fileMeta GMAFileMetadata, writer io.Writer) (extractMeta GMAExtractionMeta, err error) {
|
||||
extractMeta.OriginalMeta = fileMeta
|
||||
// Seek to the specified offset in the reader
|
||||
limitReader := io.NewSectionReader(r.FileHandle, int64(r.cursorOffset)+fileMeta.Offset, int64(fileMeta.FileSize))
|
||||
// Copy the specified length of data from the reader to the output file
|
||||
buf := bytes.NewBuffer(nil)
|
||||
_, err = io.CopyN(buf, limitReader, int64(fileMeta.FileSize))
|
||||
if err != nil {
|
||||
return extractMeta, err
|
||||
}
|
||||
shaHasher := sha256.New()
|
||||
|
||||
extractMeta.ExtractedCRC = crc32.Checksum(buf.Bytes(), crc32.MakeTable(crc32.IEEE))
|
||||
shaHasher.Write(buf.Bytes())
|
||||
extractMeta.ExtractedSHA256 = fmt.Sprintf("%x", shaHasher.Sum(nil))
|
||||
buf.WriteTo(writer)
|
||||
|
||||
return extractMeta, nil
|
||||
}
|
||||
func (r *CPEReader) GetSHA256() (hash string, err error) {
|
||||
shaHasher := sha256.New()
|
||||
if _, err := io.Copy(shaHasher, r.FileHandle); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return fmt.Sprintf("%x", shaHasher.Sum(nil)), nil
|
||||
}
|
||||
|
||||
*/
|
@ -0,0 +1,9 @@
|
||||
package cpe
|
||||
|
||||
/*
|
||||
func {
|
||||
r, _ := zlib.NewReader(bytes.NewReader(z))
|
||||
result, _ := ioutil.ReadAll(r)
|
||||
}
|
||||
|
||||
*/
|
@ -0,0 +1,19 @@
|
||||
package cpe
|
||||
|
||||
func EncryptDBFFile(pdata []byte) []byte {
|
||||
length := len(pdata)
|
||||
for index := 0; index < length; index++ {
|
||||
num3 := byte((byte(pdata[index]) + byte(length-index)) ^ byte(index*111))
|
||||
pdata[index] = num3
|
||||
}
|
||||
return pdata
|
||||
}
|
||||
func DecryptDBFFile(pdata []byte) []byte {
|
||||
length := len(pdata)
|
||||
|
||||
for index := 0; index < length; index++ {
|
||||
num3 := byte((byte(pdata[index]) ^ byte(index*111)) - byte(length-index))
|
||||
pdata[index] = num3
|
||||
}
|
||||
return pdata
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
package s19
|
||||
|
||||
import "strconv"
|
||||
|
||||
func validateChecksum(line string, checksum byte) bool {
|
||||
sum := byte(0)
|
||||
for i := 2; i < len(line)-2; i += 2 {
|
||||
value, _ := strconv.ParseUint(line[i:i+2], 16, 8)
|
||||
sum += byte(value)
|
||||
}
|
||||
return ^sum == checksum
|
||||
}
|
@ -0,0 +1,171 @@
|
||||
package s19
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"sort"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type S19Reader struct {
|
||||
FilePath string
|
||||
File *os.File
|
||||
OsSize int64
|
||||
}
|
||||
|
||||
func NewS19Reader(filePath string) (*S19Reader, error) {
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stat, err := file.Stat()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &S19Reader{
|
||||
FilePath: filePath,
|
||||
File: file,
|
||||
OsSize: stat.Size(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (fr *S19Reader) ReadAllRecords() (records []*SRecord, err error) {
|
||||
_, err = fr.File.Seek(0, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
scanner := bufio.NewScanner(fr.File)
|
||||
for scanner.Scan() {
|
||||
if len(scanner.Text()) < 10 {
|
||||
continue
|
||||
}
|
||||
record, err := parseSRecord(scanner.Text())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
records = append(records, record)
|
||||
//log.Debug().Msg(fmt.Sprint(record))
|
||||
}
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return records, nil
|
||||
}
|
||||
|
||||
func parseSRecord(line string) (*SRecord, error) {
|
||||
if len(line) < 10 || line[0] != 'S' {
|
||||
return nil, fmt.Errorf("invalid S-record format '%s'", line)
|
||||
}
|
||||
|
||||
record := &SRecord{}
|
||||
record.Type = line[:2]
|
||||
|
||||
// Extract byte count (hexadecimal, 2 characters)
|
||||
count, err := strconv.ParseInt(line[2:4], 16, 8)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid byte count: %v", err)
|
||||
}
|
||||
record.Count = int(count)
|
||||
|
||||
// Address length depends on the type
|
||||
var addressLength int
|
||||
switch record.Type {
|
||||
case "S0", "S1", "S9": // Address is 2 bytes
|
||||
addressLength = 4
|
||||
case "S2", "S8": // Address is 3 bytes
|
||||
addressLength = 6
|
||||
case "S3", "S7": // Address is 4 bytes
|
||||
addressLength = 8
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported record type: %s", record.Type)
|
||||
}
|
||||
|
||||
// Extract and parse the address
|
||||
if len(line) < 4+addressLength {
|
||||
return nil, errors.New("record is too short for address")
|
||||
}
|
||||
address, err := strconv.ParseUint(line[4:4+addressLength], 16, 32)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid address: %v", err)
|
||||
}
|
||||
record.Address = uint32(address)
|
||||
|
||||
// Extract data and checksum
|
||||
dataStart := 4 + addressLength
|
||||
dataEnd := len(line) - 2 // Exclude checksum
|
||||
if dataEnd > dataStart {
|
||||
record.Data, err = hex.DecodeString(line[dataStart:dataEnd])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid data: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Validate checksum
|
||||
checksum, err := strconv.ParseUint(line[len(line)-2:], 16, 8)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid checksum: %v", err)
|
||||
}
|
||||
if !validateChecksum(line, byte(checksum)) {
|
||||
return nil, errors.New("checksum validation failed")
|
||||
}
|
||||
|
||||
return record, nil
|
||||
}
|
||||
|
||||
func (fr *S19Reader) Close() error {
|
||||
return fr.File.Close()
|
||||
}
|
||||
|
||||
func (_ *S19Reader) DetectAddressRanges(records []*SRecord) []AddressRange {
|
||||
// Sort records by address
|
||||
sort.Slice(records, func(i, j int) bool {
|
||||
return records[i].Address < records[j].Address
|
||||
})
|
||||
|
||||
var ranges []AddressRange
|
||||
var currentRange *AddressRange
|
||||
|
||||
for sliceIndex, record := range records {
|
||||
if currentRange == nil {
|
||||
// Start a new range
|
||||
currentRange = &AddressRange{
|
||||
SliceStart: uint32(sliceIndex),
|
||||
SliceEnd: 0,
|
||||
StartAddress: record.Address,
|
||||
EndAddress: record.Address + uint32(len(record.Data)) - 1,
|
||||
Size: uint32(len(record.Data)),
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if the current record is contiguous with the previous range
|
||||
if record.Address == currentRange.EndAddress+1 {
|
||||
currentRange.EndAddress = record.Address + uint32(len(record.Data)) - 1
|
||||
currentRange.Size += uint32(len(record.Data))
|
||||
currentRange.SliceEnd = uint32(sliceIndex)
|
||||
} else {
|
||||
// Save the current range and start a new one
|
||||
ranges = append(ranges, *currentRange)
|
||||
currentRange = &AddressRange{
|
||||
SliceStart: uint32(sliceIndex),
|
||||
SliceEnd: 0,
|
||||
StartAddress: record.Address,
|
||||
EndAddress: record.Address + uint32(len(record.Data)) - 1,
|
||||
Size: uint32(len(record.Data)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Append the last range
|
||||
if currentRange != nil {
|
||||
ranges = append(ranges, *currentRange)
|
||||
}
|
||||
|
||||
return ranges
|
||||
}
|
@ -0,0 +1,36 @@
|
||||
package s19_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"git.cheetah.cat/cheetah/moto-flash-data/s19"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func TestReadLuca(t *testing.T) {
|
||||
s19File, err := s19.NewS19Reader("../test-data/srec/luca.s19")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer s19File.Close()
|
||||
|
||||
//
|
||||
records, err := s19File.ReadAllRecords()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
records = records[1:] // skip first ??
|
||||
ranges := s19File.DetectAddressRanges(records)
|
||||
//
|
||||
for index, segment := range ranges {
|
||||
log.Info().Msg(fmt.Sprintf("Index: %d, Start: 0x%08X, End: 0x%08X, Size: %d bytes", index, segment.StartAddress, segment.EndAddress, segment.Size))
|
||||
}
|
||||
|
||||
if len(ranges) != 32 {
|
||||
t.Fail()
|
||||
}
|
||||
if ranges[31].Size != 13400 {
|
||||
t.Fail()
|
||||
}
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
package s19
|
||||
|
||||
type SRecord struct {
|
||||
Type string
|
||||
Count int
|
||||
Address uint32
|
||||
Data []byte
|
||||
}
|
||||
|
||||
type AddressRange struct {
|
||||
// for internal access
|
||||
SliceStart uint32
|
||||
SliceEnd uint32
|
||||
//
|
||||
StartAddress uint32
|
||||
EndAddress uint32
|
||||
Size uint32
|
||||
}
|
@ -0,0 +1,107 @@
|
||||
package s19
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
// writeSRecord creates a formatted S-record line
|
||||
func writeSRecord(record SRecord) (string, error) {
|
||||
var addressLength int
|
||||
switch record.Type {
|
||||
case "S1": // 2-byte address
|
||||
addressLength = 2
|
||||
case "S2": // 3-byte address
|
||||
addressLength = 3
|
||||
case "S3": // 4-byte address
|
||||
addressLength = 4
|
||||
case "S9": // Termination record with a 2-byte start address
|
||||
addressLength = 2
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported record type: %s", record.Type)
|
||||
}
|
||||
|
||||
// Calculate the byte count: address bytes + data bytes + checksum byte
|
||||
byteCount := 1 + addressLength + len(record.Data)
|
||||
|
||||
// Create the record string
|
||||
addressFormat := fmt.Sprintf("%%0%dX", addressLength*2)
|
||||
address := fmt.Sprintf(addressFormat, record.Address)
|
||||
|
||||
// Convert data to hexadecimal string
|
||||
data := hex.EncodeToString(record.Data)
|
||||
|
||||
// Combine all components
|
||||
body := fmt.Sprintf("%02X%s%s", byteCount, address, data)
|
||||
|
||||
// Compute checksum
|
||||
checksum := computeChecksum(body)
|
||||
|
||||
// Construct the final S-record
|
||||
return fmt.Sprintf("S%s%s%02X", record.Type[1:], body, checksum), nil
|
||||
}
|
||||
|
||||
// computeChecksum calculates the checksum for an S-record
|
||||
func computeChecksum(body string) byte {
|
||||
sum := byte(0)
|
||||
for i := 0; i < len(body); i += 2 {
|
||||
value, _ := hex.DecodeString(body[i : i+2])
|
||||
sum += value[0]
|
||||
}
|
||||
return ^sum
|
||||
}
|
||||
|
||||
// writeS19File creates an S19 file with the provided records
|
||||
func writeS19File(filename string, records []SRecord) error {
|
||||
file, err := os.Create(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
for _, record := range records {
|
||||
line, err := writeSRecord(record)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = file.WriteString(line + "\n")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Example data to write
|
||||
data := []byte("Hello, S19 world!")
|
||||
|
||||
// Create a list of records
|
||||
records := []SRecord{
|
||||
{
|
||||
Type: "S1",
|
||||
Address: 0x1000,
|
||||
Data: data[:8], // First chunk
|
||||
},
|
||||
{
|
||||
Type: "S1",
|
||||
Address: 0x1008,
|
||||
Data: data[8:], // Second chunk
|
||||
},
|
||||
{
|
||||
Type: "S9",
|
||||
Address: 0x1000, // Start address for the program
|
||||
Data: nil,
|
||||
},
|
||||
}
|
||||
|
||||
// Write the records to a file
|
||||
err := writeS19File("output.s19", records)
|
||||
if err != nil {
|
||||
fmt.Printf("Error writing S19 file: %v\n", err)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Println("S19 file written successfully.")
|
||||
}
|
Loading…
Reference in New Issue