(Feat): Initial Commit

This commit is contained in:
2025-11-16 19:48:50 +00:00
commit a00f70a7fe
17 changed files with 1654 additions and 0 deletions

View File

@@ -0,0 +1,35 @@
package archiver
import (
"zipprine/internal/models"
)
func Compress(config *models.CompressConfig) error {
switch config.ArchiveType {
case models.ZIP:
return createZip(config)
case models.TARGZ:
return createTarGz(config)
case models.TAR:
return createTar(config)
case models.GZIP:
return createGzip(config)
default:
return nil
}
}
func Extract(config *models.ExtractConfig) error {
switch config.ArchiveType {
case models.ZIP:
return extractZip(config)
case models.TARGZ:
return extractTarGz(config)
case models.TAR:
return extractTar(config)
case models.GZIP:
return extractGzip(config)
default:
return nil
}
}

107
internal/archiver/detect.go Normal file
View File

@@ -0,0 +1,107 @@
package archiver
import (
"bytes"
"compress/gzip"
"io"
"os"
"path/filepath"
"strings"
"zipprine/internal/models"
)
func DetectArchiveType(path string) (models.ArchiveType, error) {
// First, try by extension
ext := strings.ToLower(filepath.Ext(path))
switch ext {
case ".zip":
return models.ZIP, nil
case ".gz":
if strings.HasSuffix(strings.ToLower(path), ".tar.gz") {
return models.TARGZ, nil
}
return models.GZIP, nil
case ".tar":
return models.TAR, nil
case ".tgz":
return models.TARGZ, nil
}
// Try by magic bytes
file, err := os.Open(path)
if err != nil {
return "", err
}
defer file.Close()
header := make([]byte, 512)
n, err := file.Read(header)
if err != nil && err != io.EOF {
return "", err
}
header = header[:n]
// ZIP magic: PK (0x504B)
if len(header) >= 2 && header[0] == 0x50 && header[1] == 0x4B {
return models.ZIP, nil
}
// GZIP magic: 0x1F 0x8B
if len(header) >= 2 && header[0] == 0x1F && header[1] == 0x8B {
// Check if it's a tar.gz by trying to decompress and check for tar header
file.Seek(0, 0)
gzReader, err := gzip.NewReader(file)
if err == nil {
defer gzReader.Close()
tarHeader := make([]byte, 512)
if n, _ := gzReader.Read(tarHeader); n >= 257 {
// TAR magic: "ustar" at offset 257
if bytes.Equal(tarHeader[257:262], []byte("ustar")) {
return models.TARGZ, nil
}
}
}
return models.GZIP, nil
}
// TAR magic: "ustar" at offset 257
if len(header) >= 262 && bytes.Equal(header[257:262], []byte("ustar")) {
return models.TAR, nil
}
return models.AUTO, nil
}
func Analyze(path string) (*models.ArchiveInfo, error) {
archiveType, err := DetectArchiveType(path)
if err != nil {
return nil, err
}
switch archiveType {
case models.ZIP:
return analyzeZip(path)
case models.TARGZ:
return analyzeTar(path, true)
case models.TAR:
return analyzeTar(path, false)
case models.GZIP:
// For GZIP, provide basic file info
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
fileStat, _ := file.Stat()
return &models.ArchiveInfo{
Type: models.GZIP,
CompressedSize: fileStat.Size(),
FileCount: 1,
Files: []models.FileInfo{},
}, nil
default:
return nil, nil
}
}

281
internal/archiver/tar.go Normal file
View File

@@ -0,0 +1,281 @@
package archiver
import (
"archive/tar"
"compress/gzip"
"crypto/sha256"
"fmt"
"io"
"os"
"path/filepath"
"zipprine/internal/models"
"zipprine/pkg/fileutil"
)
func createTar(config *models.CompressConfig) error {
outFile, err := os.Create(config.OutputPath)
if err != nil {
return err
}
defer outFile.Close()
tarWriter := tar.NewWriter(outFile)
defer tarWriter.Close()
return addToTar(tarWriter, config)
}
func createTarGz(config *models.CompressConfig) error {
outFile, err := os.Create(config.OutputPath)
if err != nil {
return err
}
defer outFile.Close()
gzWriter, err := gzip.NewWriterLevel(outFile, config.CompressionLevel)
if err != nil {
return err
}
defer gzWriter.Close()
tarWriter := tar.NewWriter(gzWriter)
defer tarWriter.Close()
return addToTar(tarWriter, config)
}
func createGzip(config *models.CompressConfig) error {
inFile, err := os.Open(config.SourcePath)
if err != nil {
return err
}
defer inFile.Close()
outFile, err := os.Create(config.OutputPath)
if err != nil {
return err
}
defer outFile.Close()
gzWriter, err := gzip.NewWriterLevel(outFile, config.CompressionLevel)
if err != nil {
return err
}
defer gzWriter.Close()
_, err = io.Copy(gzWriter, inFile)
return err
}
func addToTar(tarWriter *tar.Writer, config *models.CompressConfig) error {
return filepath.Walk(config.SourcePath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !fileutil.ShouldInclude(path, config.ExcludePaths, config.IncludePaths) {
if info.IsDir() {
return filepath.SkipDir
}
return nil
}
relPath, err := filepath.Rel(config.SourcePath, path)
if err != nil {
return err
}
header, err := tar.FileInfoHeader(info, "")
if err != nil {
return err
}
header.Name = relPath
if err := tarWriter.WriteHeader(header); err != nil {
return err
}
if info.IsDir() {
return nil
}
fmt.Printf(" → %s\n", relPath)
file, err := os.Open(path)
if err != nil {
return err
}
defer file.Close()
_, err = io.Copy(tarWriter, file)
return err
})
}
func extractTar(config *models.ExtractConfig) error {
file, err := os.Open(config.ArchivePath)
if err != nil {
return err
}
defer file.Close()
tarReader := tar.NewReader(file)
return extractFromTar(tarReader, config)
}
func extractTarGz(config *models.ExtractConfig) error {
file, err := os.Open(config.ArchivePath)
if err != nil {
return err
}
defer file.Close()
gzReader, err := gzip.NewReader(file)
if err != nil {
return err
}
defer gzReader.Close()
tarReader := tar.NewReader(gzReader)
return extractFromTar(tarReader, config)
}
func extractGzip(config *models.ExtractConfig) error {
inFile, err := os.Open(config.ArchivePath)
if err != nil {
return err
}
defer inFile.Close()
gzReader, err := gzip.NewReader(inFile)
if err != nil {
return err
}
defer gzReader.Close()
outPath := filepath.Join(config.DestPath, filepath.Base(config.ArchivePath))
outPath = outPath[:len(outPath)-3] // Remove .gz extension
outFile, err := os.Create(outPath)
if err != nil {
return err
}
defer outFile.Close()
_, err = io.Copy(outFile, gzReader)
return err
}
func extractFromTar(tarReader *tar.Reader, config *models.ExtractConfig) error {
for {
header, err := tarReader.Next()
if err == io.EOF {
break
}
if err != nil {
return err
}
destPath := filepath.Join(config.DestPath, header.Name)
switch header.Typeflag {
case tar.TypeDir:
os.MkdirAll(destPath, os.ModePerm)
case tar.TypeReg:
if !config.OverwriteAll {
if _, err := os.Stat(destPath); err == nil {
fmt.Printf(" ⚠️ Skipping: %s\n", header.Name)
continue
}
}
fmt.Printf(" → Extracting: %s\n", header.Name)
os.MkdirAll(filepath.Dir(destPath), os.ModePerm)
outFile, err := os.Create(destPath)
if err != nil {
return err
}
if _, err := io.Copy(outFile, tarReader); err != nil {
outFile.Close()
return err
}
outFile.Close()
if config.PreservePerms {
os.Chmod(destPath, os.FileMode(header.Mode))
}
}
}
return nil
}
func analyzeTar(path string, isGzipped bool) (*models.ArchiveInfo, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
info := &models.ArchiveInfo{
Type: models.TAR,
Files: []models.FileInfo{},
}
if isGzipped {
info.Type = models.TARGZ
}
fileStat, _ := file.Stat()
info.CompressedSize = fileStat.Size()
hash := sha256.New()
io.Copy(hash, file)
info.Checksum = fmt.Sprintf("%x", hash.Sum(nil))
// Reopen for tar reading
file.Seek(0, 0)
var tarReader *tar.Reader
if isGzipped {
gzReader, err := gzip.NewReader(file)
if err != nil {
return nil, err
}
defer gzReader.Close()
tarReader = tar.NewReader(gzReader)
} else {
tarReader = tar.NewReader(file)
}
for {
header, err := tarReader.Next()
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
info.FileCount++
info.TotalSize += header.Size
if len(info.Files) < 100 {
info.Files = append(info.Files, models.FileInfo{
Name: header.Name,
Size: header.Size,
IsDir: header.Typeflag == tar.TypeDir,
ModTime: header.ModTime.Format("2006-01-02 15:04:05"),
})
}
}
if info.TotalSize > 0 {
info.CompressionRatio = (1 - float64(info.CompressedSize)/float64(info.TotalSize)) * 100
}
return info, nil
}

174
internal/archiver/zip.go Normal file
View File

@@ -0,0 +1,174 @@
package archiver
import (
"archive/zip"
"compress/flate"
"crypto/sha256"
"fmt"
"io"
"os"
"path/filepath"
"zipprine/internal/models"
"zipprine/pkg/fileutil"
)
func createZip(config *models.CompressConfig) error {
outFile, err := os.Create(config.OutputPath)
if err != nil {
return err
}
defer outFile.Close()
zipWriter := zip.NewWriter(outFile)
defer zipWriter.Close()
// Set compression level
if config.CompressionLevel > 0 {
zipWriter.RegisterCompressor(zip.Deflate, func(out io.Writer) (io.WriteCloser, error) {
return flate.NewWriter(out, config.CompressionLevel)
})
}
return filepath.Walk(config.SourcePath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !fileutil.ShouldInclude(path, config.ExcludePaths, config.IncludePaths) {
if info.IsDir() {
return filepath.SkipDir
}
return nil
}
relPath, err := filepath.Rel(config.SourcePath, path)
if err != nil {
return err
}
if info.IsDir() {
return nil
}
fmt.Printf(" → %s\n", relPath)
header, err := zip.FileInfoHeader(info)
if err != nil {
return err
}
header.Name = relPath
header.Method = zip.Deflate
writer, err := zipWriter.CreateHeader(header)
if err != nil {
return err
}
file, err := os.Open(path)
if err != nil {
return err
}
defer file.Close()
_, err = io.Copy(writer, file)
return err
})
}
func extractZip(config *models.ExtractConfig) error {
r, err := zip.OpenReader(config.ArchivePath)
if err != nil {
return err
}
defer r.Close()
for _, f := range r.File {
destPath := filepath.Join(config.DestPath, f.Name)
if f.FileInfo().IsDir() {
os.MkdirAll(destPath, os.ModePerm)
continue
}
if !config.OverwriteAll {
if _, err := os.Stat(destPath); err == nil {
fmt.Printf(" ⚠️ Skipping: %s (already exists)\n", f.Name)
continue
}
}
fmt.Printf(" → Extracting: %s\n", f.Name)
if err := os.MkdirAll(filepath.Dir(destPath), os.ModePerm); err != nil {
return err
}
outFile, err := os.Create(destPath)
if err != nil {
return err
}
rc, err := f.Open()
if err != nil {
outFile.Close()
return err
}
_, err = io.Copy(outFile, rc)
outFile.Close()
rc.Close()
if err != nil {
return err
}
if config.PreservePerms {
os.Chmod(destPath, f.Mode())
}
}
return nil
}
func analyzeZip(path string) (*models.ArchiveInfo, error) {
r, err := zip.OpenReader(path)
if err != nil {
return nil, err
}
defer r.Close()
info := &models.ArchiveInfo{
Type: models.ZIP,
Files: []models.FileInfo{},
}
file, _ := os.Open(path)
defer file.Close()
fileStat, _ := file.Stat()
info.CompressedSize = fileStat.Size()
hash := sha256.New()
io.Copy(hash, file)
info.Checksum = fmt.Sprintf("%x", hash.Sum(nil))
for _, f := range r.File {
info.FileCount++
info.TotalSize += int64(f.UncompressedSize64)
if len(info.Files) < 100 {
info.Files = append(info.Files, models.FileInfo{
Name: f.Name,
Size: int64(f.UncompressedSize64),
IsDir: f.FileInfo().IsDir(),
ModTime: f.Modified.Format("2006-01-02 15:04:05"),
})
}
}
if info.TotalSize > 0 {
info.CompressionRatio = (1 - float64(info.CompressedSize)/float64(info.TotalSize)) * 100
}
return info, nil
}