Compare commits
10 Commits
35f042be34
...
3caca0dfcb
| Author | SHA1 | Date | |
|---|---|---|---|
| 3caca0dfcb | |||
| 8081085f87 | |||
| bb47c427f9 | |||
| 53c2180b9e | |||
| 1037031d3a | |||
| c8e4c691e6 | |||
| b0788b8aa7 | |||
| 8297721514 | |||
| 795b56cbe6 | |||
| b2afdd96e9 |
61
CHANGELOG.md
Normal file
61
CHANGELOG.md
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [1.0.3] - 2025-11-22
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **RAR Support**: Added extraction support for RAR archives (v4 and v5)
|
||||||
|
- Magic byte detection for RAR files
|
||||||
|
- Full extraction with permission preservation
|
||||||
|
- Analysis capabilities for RAR archives
|
||||||
|
- Note: RAR compression not supported due to proprietary format
|
||||||
|
- **Semantic Versioning**: Implemented proper semantic versioning system
|
||||||
|
- Version module with Major.Minor.Patch format
|
||||||
|
- `--version` flag to display version information
|
||||||
|
- Version displayed in interactive TUI mode
|
||||||
|
- **Command-Line Interface (CLI)**: Added non-interactive CLI mode for automation
|
||||||
|
- `--compress` flag for compression operations
|
||||||
|
- `--extract` flag for extraction operations
|
||||||
|
- `--analyze` flag for archive analysis
|
||||||
|
- `--output` flag for specifying output paths
|
||||||
|
- `--type` flag for archive type selection
|
||||||
|
- `--level` flag for compression level control
|
||||||
|
- `--overwrite` flag for overwrite control
|
||||||
|
- `--preserve-perms` flag for permission preservation
|
||||||
|
- `--exclude` and `--include` flags for filtering
|
||||||
|
- `--verify` flag for integrity verification
|
||||||
|
- `--help` flag for usage information
|
||||||
|
- **Remote URL Fetching**: Added ability to download and extract archives from URLs
|
||||||
|
- `--url` flag for remote archive fetching
|
||||||
|
- HTTP/HTTPS support
|
||||||
|
- Progress tracking during download
|
||||||
|
- Automatic format detection and extraction
|
||||||
|
- Support for all archive formats via URL
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Updated README.md with comprehensive documentation for new features
|
||||||
|
- Enhanced main.go to support both CLI and interactive modes
|
||||||
|
- Improved archive type detection to include RAR format
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
- Added `github.com/nwaples/rardecode` v1.1.3 for RAR extraction support
|
||||||
|
|
||||||
|
## [0.x.x] - Previous Versions
|
||||||
|
|
||||||
|
Previous versions included:
|
||||||
|
|
||||||
|
- ZIP, TAR, TAR.GZ, and GZIP support
|
||||||
|
- Interactive TUI mode
|
||||||
|
- Batch operations
|
||||||
|
- Archive comparison
|
||||||
|
- Format conversion
|
||||||
|
- Compression levels
|
||||||
|
- Include/exclude patterns
|
||||||
|
- Integrity verification
|
||||||
16
Makefile
16
Makefile
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
# Application name
|
# Application name
|
||||||
BINARY_NAME=zipprine
|
BINARY_NAME=zipprine
|
||||||
VERSION?=1.0.0
|
VERSION?=1.0.3
|
||||||
BUILD_DIR=build
|
BUILD_DIR=build
|
||||||
RELEASE_DIR=releases
|
RELEASE_DIR=releases
|
||||||
|
|
||||||
@@ -41,7 +41,7 @@ help: ## Display this help screen
|
|||||||
@echo "$(CYAN)║ 🗜️ zipprine Build System 🚀 ║$(NC)"
|
@echo "$(CYAN)║ 🗜️ zipprine Build System 🚀 ║$(NC)"
|
||||||
@echo "$(CYAN)╚═══════════════════════════════════════════════════╝$(NC)"
|
@echo "$(CYAN)╚═══════════════════════════════════════════════════╝$(NC)"
|
||||||
@echo ""
|
@echo ""
|
||||||
@awk 'BEGIN {FS = ":.*##"; printf "Usage:\n make $(CYAN)<target>$(NC)\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " $(CYAN)%-15s$(NC) %s\n", $1, $2 } /^##@/ { printf "\n$(MAGENTA)%s$(NC)\n", substr($0, 5) } ' $(MAKEFILE_LIST)
|
@awk 'BEGIN {FS = ":.*##"; printf "Usage:\n make $(CYAN)<target>$(NC)\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " $(CYAN)%-15s$(NC) %s\n", $$1, $$2 } /^##@/ { printf "\n$(MAGENTA)%s$(NC)\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||||
|
|
||||||
##@ Development
|
##@ Development
|
||||||
|
|
||||||
@@ -232,9 +232,15 @@ release: clean build-all ## Create release packages
|
|||||||
|
|
||||||
checksums: ## Generate SHA256 checksums for releases
|
checksums: ## Generate SHA256 checksums for releases
|
||||||
@echo "$(BLUE)🔐 Generating checksums...$(NC)"
|
@echo "$(BLUE)🔐 Generating checksums...$(NC)"
|
||||||
@cd $(RELEASE_DIR) && shasum -a 256 * > SHA256SUMS
|
@mkdir -p $(RELEASE_DIR)
|
||||||
@echo "$(GREEN)✅ Checksums generated: $(RELEASE_DIR)/SHA256SUMS$(NC)"
|
@rm -f $(RELEASE_DIR)/SHA256SUMS
|
||||||
@cat $(RELEASE_DIR)/SHA256SUMS
|
@if [ -n "$$(ls -A $(RELEASE_DIR) 2>/dev/null)" ]; then \
|
||||||
|
cd $(RELEASE_DIR) && shasum -a 256 * > SHA256SUMS && \
|
||||||
|
echo "$(GREEN)✅ Checksums generated: $(RELEASE_DIR)/SHA256SUMS$(NC)" && \
|
||||||
|
cat SHA256SUMS; \
|
||||||
|
else \
|
||||||
|
echo "$(YELLOW)⚠️ No files found in $(RELEASE_DIR). Run 'make release' first.$(NC)"; \
|
||||||
|
fi
|
||||||
|
|
||||||
##@ Docker (Bonus)
|
##@ Docker (Bonus)
|
||||||
|
|
||||||
|
|||||||
97
README.md
97
README.md
@@ -1,6 +1,8 @@
|
|||||||
# 🗜️ Zipprine - TUI zipping tool
|
# 🗜️ Zipprine - TUI/CLI Archiving Tool
|
||||||
|
|
||||||
Zipprine is a modern TUI application for managing archives with support for multiple formats including ZIP, TAR, TAR.GZ, and GZIP.
|
Zipprine is a modern TUI/CLI application for managing archives with support for multiple formats including ZIP, TAR, TAR.GZ, GZIP, and RAR (extraction only).
|
||||||
|
|
||||||
|
**Version:** 1.0.3
|
||||||
|
|
||||||
## ✨ Features
|
## ✨ Features
|
||||||
|
|
||||||
@@ -10,10 +12,13 @@ Zipprine is a modern TUI application for managing archives with support for mult
|
|||||||
- **Compression levels**: Fast, Balanced, Best
|
- **Compression levels**: Fast, Balanced, Best
|
||||||
- **Smart filtering**: Include/exclude patterns with wildcards
|
- **Smart filtering**: Include/exclude patterns with wildcards
|
||||||
- **Integrity verification**: SHA256 checksums and validation
|
- **Integrity verification**: SHA256 checksums and validation
|
||||||
|
- **CLI mode**: Non-interactive command-line interface for automation
|
||||||
|
|
||||||
### 📂 Extraction
|
### 📂 Extraction
|
||||||
|
|
||||||
- **Auto-detection**: Automatically detects archive type by magic bytes
|
- **Auto-detection**: Automatically detects archive type by magic bytes
|
||||||
|
- **RAR support**: Extract RAR archives (v4 and v5)
|
||||||
|
- **Remote fetching**: Download and extract archives from URLs
|
||||||
- **Safe extraction**: Optional overwrite protection
|
- **Safe extraction**: Optional overwrite protection
|
||||||
- **Permission preservation**: Keep original file permissions
|
- **Permission preservation**: Keep original file permissions
|
||||||
- **Progress tracking**: Real-time extraction feedback
|
- **Progress tracking**: Real-time extraction feedback
|
||||||
@@ -23,7 +28,7 @@ Zipprine is a modern TUI application for managing archives with support for mult
|
|||||||
- **Detailed statistics**: File count, sizes, compression ratios
|
- **Detailed statistics**: File count, sizes, compression ratios
|
||||||
- **File listing**: View contents without extraction
|
- **File listing**: View contents without extraction
|
||||||
- **Checksum verification**: SHA256 integrity checks
|
- **Checksum verification**: SHA256 integrity checks
|
||||||
- **Format detection**: Magic byte analysis
|
- **Format detection**: Magic byte analysis (including RAR)
|
||||||
|
|
||||||
### 📚 Batch Operations
|
### 📚 Batch Operations
|
||||||
|
|
||||||
@@ -45,6 +50,13 @@ Zipprine is a modern TUI application for managing archives with support for mult
|
|||||||
- **Preserve contents**: Maintains file structure and permissions
|
- **Preserve contents**: Maintains file structure and permissions
|
||||||
- **Automatic extraction**: Seamless conversion process
|
- **Automatic extraction**: Seamless conversion process
|
||||||
|
|
||||||
|
### 🌐 Remote Archive Fetching
|
||||||
|
|
||||||
|
- **URL download**: Fetch archives from HTTP/HTTPS URLs
|
||||||
|
- **Auto-extract**: Automatically extract downloaded archives
|
||||||
|
- **Progress tracking**: Real-time download progress
|
||||||
|
- **Format detection**: Supports all archive formats via URL
|
||||||
|
|
||||||
## 🚀 Installation
|
## 🚀 Installation
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -61,11 +73,17 @@ make build
|
|||||||
|
|
||||||
## 📖 Usage
|
## 📖 Usage
|
||||||
|
|
||||||
Just run `zipprine` and follow the interactive menu:
|
### Interactive Mode (TUI)
|
||||||
|
|
||||||
|
Just run `zipprine` without arguments to launch the interactive menu:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
zipprine
|
||||||
|
```
|
||||||
|
|
||||||
**Compress** - Choose files/folders, pick a format (ZIP, TAR, TAR.GZ, GZIP), and set your preferences
|
**Compress** - Choose files/folders, pick a format (ZIP, TAR, TAR.GZ, GZIP), and set your preferences
|
||||||
|
|
||||||
**Extract** - Point to an archive and choose where to extract (format is auto-detected)
|
**Extract** - Point to an archive and choose where to extract (format is auto-detected, supports RAR)
|
||||||
|
|
||||||
**Analyze** - View detailed stats about any archive without extracting it
|
**Analyze** - View detailed stats about any archive without extracting it
|
||||||
|
|
||||||
@@ -75,6 +93,53 @@ Just run `zipprine` and follow the interactive menu:
|
|||||||
|
|
||||||
**Convert** - Change archive formats while preserving structure
|
**Convert** - Change archive formats while preserving structure
|
||||||
|
|
||||||
|
### Command-Line Mode (CLI)
|
||||||
|
|
||||||
|
For automation and scripting, use CLI flags:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Compress a directory
|
||||||
|
zipprine --compress /path/to/source --output archive.zip --type zip
|
||||||
|
|
||||||
|
# Extract an archive (auto-detects format)
|
||||||
|
zipprine --extract archive.tar.gz --output /path/to/dest
|
||||||
|
|
||||||
|
# Extract a RAR archive
|
||||||
|
zipprine --extract archive.rar --output /path/to/dest
|
||||||
|
|
||||||
|
# Analyze an archive
|
||||||
|
zipprine --analyze archive.zip
|
||||||
|
|
||||||
|
# Download and extract from URL
|
||||||
|
zipprine --url https://example.com/archive.zip --output /path/to/dest
|
||||||
|
|
||||||
|
# Compress with exclusions
|
||||||
|
zipprine --compress /project --output project.tar.gz --type tar.gz --exclude '*.log,*.tmp'
|
||||||
|
|
||||||
|
# Show version
|
||||||
|
zipprine --version
|
||||||
|
|
||||||
|
# Show help
|
||||||
|
zipprine --help
|
||||||
|
```
|
||||||
|
|
||||||
|
#### CLI Options
|
||||||
|
|
||||||
|
- `--compress <path>` - Compress files/folders at the specified path
|
||||||
|
- `--extract <path>` - Extract archive at the specified path
|
||||||
|
- `--analyze <path>` - Analyze archive at the specified path
|
||||||
|
- `--output <path>` - Output path for compression or extraction
|
||||||
|
- `--type <type>` - Archive type: zip, tar, tar.gz, gzip, rar (default: zip)
|
||||||
|
- `--level <1-9>` - Compression level: 1=fast, 6=balanced, 9=best (default: 6)
|
||||||
|
- `--overwrite` - Overwrite existing files during extraction
|
||||||
|
- `--preserve-perms` - Preserve file permissions (default: true)
|
||||||
|
- `--exclude <patterns>` - Comma-separated patterns to exclude
|
||||||
|
- `--include <patterns>` - Comma-separated patterns to include
|
||||||
|
- `--verify` - Verify archive integrity after compression
|
||||||
|
- `--url <url>` - Download and extract archive from remote URL
|
||||||
|
- `--version` - Show version information
|
||||||
|
- `--help` - Show help message
|
||||||
|
|
||||||
## 🔨 Building
|
## 🔨 Building
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -123,11 +188,31 @@ make bench
|
|||||||
- `src/*,docs/*` - Only src and docs folders
|
- `src/*,docs/*` - Only src and docs folders
|
||||||
- `*.md,*.txt` - Only markdown and text files
|
- `*.md,*.txt` - Only markdown and text files
|
||||||
|
|
||||||
|
## 📚 Supported Formats
|
||||||
|
|
||||||
|
### Compression (Create Archives)
|
||||||
|
|
||||||
|
- **ZIP** - Universal format, works everywhere
|
||||||
|
- **TAR** - Unix standard, no compression
|
||||||
|
- **TAR.GZ** - Compressed TAR, best for Linux
|
||||||
|
- **GZIP** - Single file compression
|
||||||
|
|
||||||
|
### Extraction (Read Archives)
|
||||||
|
|
||||||
|
- **ZIP** - Full support
|
||||||
|
- **TAR** - Full support
|
||||||
|
- **TAR.GZ** - Full support
|
||||||
|
- **GZIP** - Full support
|
||||||
|
- **RAR** - Extraction only (RAR v4 and v5)
|
||||||
|
|
||||||
|
**Note:** RAR compression is not supported due to proprietary format restrictions. Use ZIP or TAR.GZ for creating archives.
|
||||||
|
|
||||||
## 🛠️ Technologies
|
## 🛠️ Technologies
|
||||||
|
|
||||||
- **[Charm Bracelet Huh](https://github.com/charmbracelet/huh)** - Beautiful TUI forms
|
- **[Charm Bracelet Huh](https://github.com/charmbracelet/huh)** - Beautiful TUI forms
|
||||||
- **[Lipgloss](https://github.com/charmbracelet/lipgloss)** - Styling and colors
|
- **[Lipgloss](https://github.com/charmbracelet/lipgloss)** - Styling and colors
|
||||||
- **Go standard library** - Archive formats
|
- **[rardecode](https://github.com/nwaples/rardecode)** - RAR extraction support
|
||||||
|
- **Go standard library** - Archive formats and HTTP client
|
||||||
|
|
||||||
## 📝 License
|
## 📝 License
|
||||||
|
|
||||||
|
|||||||
1
go.mod
1
go.mod
@@ -5,6 +5,7 @@ go 1.25.4
|
|||||||
require (
|
require (
|
||||||
github.com/charmbracelet/huh v0.8.0
|
github.com/charmbracelet/huh v0.8.0
|
||||||
github.com/charmbracelet/lipgloss v1.1.0
|
github.com/charmbracelet/lipgloss v1.1.0
|
||||||
|
github.com/nwaples/rardecode v1.1.3
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
|||||||
2
go.sum
2
go.sum
@@ -58,6 +58,8 @@ github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELU
|
|||||||
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
|
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
|
||||||
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
|
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
|
||||||
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
|
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
|
||||||
|
github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc=
|
||||||
|
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
|
||||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
|
|||||||
@@ -80,7 +80,6 @@ func TestCompareIdenticalArchives(t *testing.T) {
|
|||||||
}
|
}
|
||||||
defer os.RemoveAll(tmpDir)
|
defer os.RemoveAll(tmpDir)
|
||||||
|
|
||||||
// Create source directory
|
|
||||||
sourceDir := filepath.Join(tmpDir, "source")
|
sourceDir := filepath.Join(tmpDir, "source")
|
||||||
os.Mkdir(sourceDir, 0755)
|
os.Mkdir(sourceDir, 0755)
|
||||||
os.WriteFile(filepath.Join(sourceDir, "file1.txt"), []byte("content"), 0644)
|
os.WriteFile(filepath.Join(sourceDir, "file1.txt"), []byte("content"), 0644)
|
||||||
|
|||||||
@@ -21,12 +21,10 @@ func TestCreateZip(t *testing.T) {
|
|||||||
os.WriteFile(filepath.Join(sourceDir, "file1.txt"), []byte("content1"), 0644)
|
os.WriteFile(filepath.Join(sourceDir, "file1.txt"), []byte("content1"), 0644)
|
||||||
os.WriteFile(filepath.Join(sourceDir, "file2.txt"), []byte("content2"), 0644)
|
os.WriteFile(filepath.Join(sourceDir, "file2.txt"), []byte("content2"), 0644)
|
||||||
|
|
||||||
// Create subdirectory
|
|
||||||
subDir := filepath.Join(sourceDir, "subdir")
|
subDir := filepath.Join(sourceDir, "subdir")
|
||||||
os.Mkdir(subDir, 0755)
|
os.Mkdir(subDir, 0755)
|
||||||
os.WriteFile(filepath.Join(subDir, "file3.txt"), []byte("content3"), 0644)
|
os.WriteFile(filepath.Join(subDir, "file3.txt"), []byte("content3"), 0644)
|
||||||
|
|
||||||
// Create ZIP
|
|
||||||
zipPath := filepath.Join(tmpDir, "test.zip")
|
zipPath := filepath.Join(tmpDir, "test.zip")
|
||||||
config := &models.CompressConfig{
|
config := &models.CompressConfig{
|
||||||
SourcePath: sourceDir,
|
SourcePath: sourceDir,
|
||||||
@@ -59,11 +57,9 @@ func TestCreateZipWithCompressionLevels(t *testing.T) {
|
|||||||
}
|
}
|
||||||
defer os.RemoveAll(tmpDir)
|
defer os.RemoveAll(tmpDir)
|
||||||
|
|
||||||
// Create test file with compressible content
|
|
||||||
sourceDir := filepath.Join(tmpDir, "source")
|
sourceDir := filepath.Join(tmpDir, "source")
|
||||||
os.Mkdir(sourceDir, 0755)
|
os.Mkdir(sourceDir, 0755)
|
||||||
|
|
||||||
// Create a file with repetitive content (compresses well)
|
|
||||||
content := make([]byte, 10000)
|
content := make([]byte, 10000)
|
||||||
for i := range content {
|
for i := range content {
|
||||||
content[i] = byte(i % 10)
|
content[i] = byte(i % 10)
|
||||||
@@ -402,11 +398,9 @@ func TestZipEmptyDirectory(t *testing.T) {
|
|||||||
}
|
}
|
||||||
defer os.RemoveAll(tmpDir)
|
defer os.RemoveAll(tmpDir)
|
||||||
|
|
||||||
// Create empty directory
|
|
||||||
sourceDir := filepath.Join(tmpDir, "empty")
|
sourceDir := filepath.Join(tmpDir, "empty")
|
||||||
os.Mkdir(sourceDir, 0755)
|
os.Mkdir(sourceDir, 0755)
|
||||||
|
|
||||||
// Create ZIP
|
|
||||||
zipPath := filepath.Join(tmpDir, "empty.zip")
|
zipPath := filepath.Join(tmpDir, "empty.zip")
|
||||||
config := &models.CompressConfig{
|
config := &models.CompressConfig{
|
||||||
SourcePath: sourceDir,
|
SourcePath: sourceDir,
|
||||||
@@ -420,7 +414,6 @@ func TestZipEmptyDirectory(t *testing.T) {
|
|||||||
t.Fatalf("createZip failed: %v", err)
|
t.Fatalf("createZip failed: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify ZIP was created
|
|
||||||
if _, err := os.Stat(zipPath); os.IsNotExist(err) {
|
if _, err := os.Stat(zipPath); os.IsNotExist(err) {
|
||||||
t.Error("ZIP file was not created")
|
t.Error("ZIP file was not created")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ import (
|
|||||||
"zipprine/internal/version"
|
"zipprine/internal/version"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Run executes the CLI mode
|
|
||||||
func Run() bool {
|
func Run() bool {
|
||||||
// Define flags
|
// Define flags
|
||||||
compress := flag.String("compress", "", "Compress files/folders (source path)")
|
compress := flag.String("compress", "", "Compress files/folders (source path)")
|
||||||
@@ -32,24 +31,19 @@ func Run() bool {
|
|||||||
|
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
|
|
||||||
// Show version
|
|
||||||
if *showVersion {
|
if *showVersion {
|
||||||
fmt.Println(version.FullVersion())
|
fmt.Println(version.FullVersion())
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Show help
|
|
||||||
if *help {
|
if *help {
|
||||||
printHelp()
|
printHelp()
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if any CLI flags were provided
|
|
||||||
if flag.NFlag() == 0 {
|
if flag.NFlag() == 0 {
|
||||||
return false // No flags, use interactive mode
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle remote URL fetching
|
|
||||||
if *remoteURL != "" {
|
if *remoteURL != "" {
|
||||||
if *output == "" {
|
if *output == "" {
|
||||||
fmt.Println("❌ Error: --output is required when using --url")
|
fmt.Println("❌ Error: --output is required when using --url")
|
||||||
@@ -68,7 +62,6 @@ func Run() bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle compression
|
|
||||||
if *compress != "" {
|
if *compress != "" {
|
||||||
if *output == "" {
|
if *output == "" {
|
||||||
fmt.Println("❌ Error: --output is required for compression")
|
fmt.Println("❌ Error: --output is required for compression")
|
||||||
@@ -105,7 +98,6 @@ func Run() bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle extraction
|
|
||||||
if *extract != "" {
|
if *extract != "" {
|
||||||
if *output == "" {
|
if *output == "" {
|
||||||
fmt.Println("❌ Error: --output is required for extraction")
|
fmt.Println("❌ Error: --output is required for extraction")
|
||||||
@@ -172,7 +164,6 @@ func Run() bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we get here, no valid operation was specified
|
|
||||||
fmt.Println("❌ Error: No valid operation specified. Use --help for usage information.")
|
fmt.Println("❌ Error: No valid operation specified. Use --help for usage information.")
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
return true
|
return true
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import (
|
|||||||
|
|
||||||
// FetchAndExtract downloads an archive from a URL and extracts it to the destination path
|
// FetchAndExtract downloads an archive from a URL and extracts it to the destination path
|
||||||
func FetchAndExtract(archiveURL, destPath string, overwriteAll, preservePerms bool) error {
|
func FetchAndExtract(archiveURL, destPath string, overwriteAll, preservePerms bool) error {
|
||||||
// Validate URL
|
|
||||||
parsedURL, err := url.Parse(archiveURL)
|
parsedURL, err := url.Parse(archiveURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("invalid URL: %w", err)
|
return fmt.Errorf("invalid URL: %w", err)
|
||||||
@@ -25,13 +25,11 @@ func FetchAndExtract(archiveURL, destPath string, overwriteAll, preservePerms bo
|
|||||||
return fmt.Errorf("only HTTP and HTTPS URLs are supported")
|
return fmt.Errorf("only HTTP and HTTPS URLs are supported")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract filename from URL
|
|
||||||
filename := filepath.Base(parsedURL.Path)
|
filename := filepath.Base(parsedURL.Path)
|
||||||
if filename == "" || filename == "." || filename == "/" {
|
if filename == "" || filename == "." || filename == "/" {
|
||||||
filename = "archive.tmp"
|
filename = "archive.tmp"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create temporary directory
|
|
||||||
tempDir, err := os.MkdirTemp("", "zipprine-*")
|
tempDir, err := os.MkdirTemp("", "zipprine-*")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to create temp directory: %w", err)
|
return fmt.Errorf("failed to create temp directory: %w", err)
|
||||||
@@ -40,7 +38,6 @@ func FetchAndExtract(archiveURL, destPath string, overwriteAll, preservePerms bo
|
|||||||
|
|
||||||
tempFile := filepath.Join(tempDir, filename)
|
tempFile := filepath.Join(tempDir, filename)
|
||||||
|
|
||||||
// Download the file
|
|
||||||
fmt.Printf("📥 Downloading from %s...\n", archiveURL)
|
fmt.Printf("📥 Downloading from %s...\n", archiveURL)
|
||||||
if err := downloadFile(tempFile, archiveURL); err != nil {
|
if err := downloadFile(tempFile, archiveURL); err != nil {
|
||||||
return fmt.Errorf("failed to download file: %w", err)
|
return fmt.Errorf("failed to download file: %w", err)
|
||||||
@@ -48,7 +45,6 @@ func FetchAndExtract(archiveURL, destPath string, overwriteAll, preservePerms bo
|
|||||||
|
|
||||||
fmt.Printf("✅ Download complete: %s\n", tempFile)
|
fmt.Printf("✅ Download complete: %s\n", tempFile)
|
||||||
|
|
||||||
// Detect archive type
|
|
||||||
archiveType, err := archiver.DetectArchiveType(tempFile)
|
archiveType, err := archiver.DetectArchiveType(tempFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to detect archive type: %w", err)
|
return fmt.Errorf("failed to detect archive type: %w", err)
|
||||||
@@ -60,7 +56,6 @@ func FetchAndExtract(archiveURL, destPath string, overwriteAll, preservePerms bo
|
|||||||
|
|
||||||
fmt.Printf("📦 Detected archive type: %s\n", archiveType)
|
fmt.Printf("📦 Detected archive type: %s\n", archiveType)
|
||||||
|
|
||||||
// Extract the archive
|
|
||||||
fmt.Printf("📂 Extracting to %s...\n", destPath)
|
fmt.Printf("📂 Extracting to %s...\n", destPath)
|
||||||
extractConfig := &models.ExtractConfig{
|
extractConfig := &models.ExtractConfig{
|
||||||
ArchivePath: tempFile,
|
ArchivePath: tempFile,
|
||||||
@@ -80,14 +75,12 @@ func FetchAndExtract(archiveURL, destPath string, overwriteAll, preservePerms bo
|
|||||||
|
|
||||||
// downloadFile downloads a file from a URL to a local path with progress indication
|
// downloadFile downloads a file from a URL to a local path with progress indication
|
||||||
func downloadFile(filepath, url string) error {
|
func downloadFile(filepath, url string) error {
|
||||||
// Create the file
|
|
||||||
out, err := os.Create(filepath)
|
out, err := os.Create(filepath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer out.Close()
|
defer out.Close()
|
||||||
|
|
||||||
// Get the data
|
|
||||||
resp, err := http.Get(url)
|
resp, err := http.Get(url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -99,10 +92,8 @@ func downloadFile(filepath, url string) error {
|
|||||||
return fmt.Errorf("bad status: %s", resp.Status)
|
return fmt.Errorf("bad status: %s", resp.Status)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get content length for progress
|
|
||||||
contentLength := resp.ContentLength
|
contentLength := resp.ContentLength
|
||||||
|
|
||||||
// Create progress reader
|
|
||||||
var reader io.Reader = resp.Body
|
var reader io.Reader = resp.Body
|
||||||
if contentLength > 0 {
|
if contentLength > 0 {
|
||||||
reader = &progressReader{
|
reader = &progressReader{
|
||||||
@@ -119,7 +110,7 @@ func downloadFile(filepath, url string) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println() // New line after progress
|
fmt.Println()
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ func TestArchiveTypeConstants(t *testing.T) {
|
|||||||
{"TARGZ type", TARGZ, "TAR.GZ"},
|
{"TARGZ type", TARGZ, "TAR.GZ"},
|
||||||
{"TAR type", TAR, "TAR"},
|
{"TAR type", TAR, "TAR"},
|
||||||
{"GZIP type", GZIP, "GZIP"},
|
{"GZIP type", GZIP, "GZIP"},
|
||||||
|
{"RAR type", RAR, "RAR"},
|
||||||
{"AUTO type", AUTO, "AUTO"},
|
{"AUTO type", AUTO, "AUTO"},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -90,4 +91,4 @@ func TestArchiveInfo(t *testing.T) {
|
|||||||
if len(info.Files) != 1 {
|
if len(info.Files) != 1 {
|
||||||
t.Errorf("Files length = %d; want %d", len(info.Files), 1)
|
t.Errorf("Files length = %d; want %d", len(info.Files), 1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -12,7 +12,6 @@ func getPathCompletions(input string) []string {
|
|||||||
input = "."
|
input = "."
|
||||||
}
|
}
|
||||||
|
|
||||||
// Expand home directory
|
|
||||||
if strings.HasPrefix(input, "~") {
|
if strings.HasPrefix(input, "~") {
|
||||||
home, err := os.UserHomeDir()
|
home, err := os.UserHomeDir()
|
||||||
if err == nil {
|
if err == nil {
|
||||||
@@ -20,20 +19,16 @@ func getPathCompletions(input string) []string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the directory and file pattern
|
|
||||||
dir := filepath.Dir(input)
|
dir := filepath.Dir(input)
|
||||||
pattern := filepath.Base(input)
|
pattern := filepath.Base(input)
|
||||||
|
|
||||||
// If input ends with /, we want to list that directory
|
|
||||||
if strings.HasSuffix(input, string(filepath.Separator)) {
|
if strings.HasSuffix(input, string(filepath.Separator)) {
|
||||||
dir = input
|
dir = input
|
||||||
pattern = ""
|
pattern = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read directory
|
|
||||||
entries, err := os.ReadDir(dir)
|
entries, err := os.ReadDir(dir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// If can't read, try current directory
|
|
||||||
entries, err = os.ReadDir(".")
|
entries, err = os.ReadDir(".")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return []string{}
|
return []string{}
|
||||||
@@ -63,7 +58,6 @@ func getPathCompletions(input string) []string {
|
|||||||
completions = append(completions, fullPath)
|
completions = append(completions, fullPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Limit to 15 suggestions
|
|
||||||
if len(completions) > 15 {
|
if len(completions) > 15 {
|
||||||
completions = completions[:15]
|
completions = completions[:15]
|
||||||
}
|
}
|
||||||
@@ -85,13 +79,11 @@ func getArchiveCompletions(input string) []string {
|
|||||||
archiveCompletions := []string{}
|
archiveCompletions := []string{}
|
||||||
|
|
||||||
for _, path := range allCompletions {
|
for _, path := range allCompletions {
|
||||||
// Keep directories
|
|
||||||
if strings.HasSuffix(path, string(filepath.Separator)) {
|
if strings.HasSuffix(path, string(filepath.Separator)) {
|
||||||
archiveCompletions = append(archiveCompletions, path)
|
archiveCompletions = append(archiveCompletions, path)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if file has archive extension
|
|
||||||
ext := filepath.Ext(path)
|
ext := filepath.Ext(path)
|
||||||
if archiveExts[ext] {
|
if archiveExts[ext] {
|
||||||
archiveCompletions = append(archiveCompletions, path)
|
archiveCompletions = append(archiveCompletions, path)
|
||||||
|
|||||||
@@ -228,7 +228,6 @@ func RunBatchExtractFlow() error {
|
|||||||
fmt.Println(InfoStyle.Render(fmt.Sprintf("📂 Batch extracting %d archives...", len(configs))))
|
fmt.Println(InfoStyle.Render(fmt.Sprintf("📂 Batch extracting %d archives...", len(configs))))
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
|
|
||||||
// Create batch config
|
|
||||||
batchConfig := &archiver.BatchExtractConfig{
|
batchConfig := &archiver.BatchExtractConfig{
|
||||||
Configs: configs,
|
Configs: configs,
|
||||||
Parallel: parallel,
|
Parallel: parallel,
|
||||||
@@ -246,7 +245,6 @@ func RunBatchExtractFlow() error {
|
|||||||
|
|
||||||
errors := archiver.BatchExtract(batchConfig)
|
errors := archiver.BatchExtract(batchConfig)
|
||||||
|
|
||||||
// Count successes
|
|
||||||
successCount := 0
|
successCount := 0
|
||||||
for _, err := range errors {
|
for _, err := range errors {
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
|||||||
@@ -21,7 +21,6 @@ func RunCompressFlow() error {
|
|||||||
var verify bool
|
var verify bool
|
||||||
var compressionLevel string
|
var compressionLevel string
|
||||||
|
|
||||||
// Get current working directory
|
|
||||||
cwd, _ := os.Getwd()
|
cwd, _ := os.Getwd()
|
||||||
|
|
||||||
form := huh.NewForm(
|
form := huh.NewForm(
|
||||||
@@ -124,14 +123,11 @@ func RunCompressFlow() error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Auto-generate output path if not provided
|
|
||||||
if outputPath == "" {
|
if outputPath == "" {
|
||||||
sourceName := filepath.Base(sourcePath)
|
sourceName := filepath.Base(sourcePath)
|
||||||
|
|
||||||
// Remove trailing slashes
|
|
||||||
sourceName = strings.TrimSuffix(sourceName, string(filepath.Separator))
|
sourceName = strings.TrimSuffix(sourceName, string(filepath.Separator))
|
||||||
|
|
||||||
// Determine file extension based on archive type
|
|
||||||
var extension string
|
var extension string
|
||||||
switch models.ArchiveType(archiveTypeStr) {
|
switch models.ArchiveType(archiveTypeStr) {
|
||||||
case models.ZIP:
|
case models.ZIP:
|
||||||
@@ -146,7 +142,6 @@ func RunCompressFlow() error {
|
|||||||
extension = ".zip"
|
extension = ".zip"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create output path in current working directory
|
|
||||||
outputPath = filepath.Join(cwd, sourceName+extension)
|
outputPath = filepath.Join(cwd, sourceName+extension)
|
||||||
|
|
||||||
fmt.Println(InfoStyle.Render(fmt.Sprintf("📝 Auto-generated output: %s", outputPath)))
|
fmt.Println(InfoStyle.Render(fmt.Sprintf("📝 Auto-generated output: %s", outputPath)))
|
||||||
|
|||||||
75
internal/ui/remote.go
Normal file
75
internal/ui/remote.go
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
package ui
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"zipprine/internal/fetcher"
|
||||||
|
|
||||||
|
"github.com/charmbracelet/huh"
|
||||||
|
)
|
||||||
|
|
||||||
|
func RunRemoteFetchFlow() error {
|
||||||
|
var url, destPath string
|
||||||
|
var overwrite, preservePerms bool
|
||||||
|
|
||||||
|
form := huh.NewForm(
|
||||||
|
huh.NewGroup(
|
||||||
|
huh.NewInput().
|
||||||
|
Title("🌐 Remote Archive URL").
|
||||||
|
Description("HTTP/HTTPS URL to download archive from").
|
||||||
|
Placeholder("https://example.com/archive.zip").
|
||||||
|
Value(&url).
|
||||||
|
Validate(func(s string) error {
|
||||||
|
if s == "" {
|
||||||
|
return fmt.Errorf("URL cannot be empty")
|
||||||
|
}
|
||||||
|
if !fetcher.IsValidArchiveURL(s) {
|
||||||
|
return fmt.Errorf("URL does not appear to point to a supported archive format")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}),
|
||||||
|
|
||||||
|
huh.NewInput().
|
||||||
|
Title("📂 Destination Path").
|
||||||
|
Description("Where to extract the downloaded archive - Tab for completions").
|
||||||
|
Placeholder("/path/to/destination").
|
||||||
|
Value(&destPath).
|
||||||
|
Validate(func(s string) error {
|
||||||
|
if s == "" {
|
||||||
|
return fmt.Errorf("destination path cannot be empty")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}).
|
||||||
|
Suggestions(getDirCompletions("")),
|
||||||
|
),
|
||||||
|
|
||||||
|
huh.NewGroup(
|
||||||
|
huh.NewConfirm().
|
||||||
|
Title("⚠️ Overwrite Existing Files").
|
||||||
|
Description("Replace files if they already exist?").
|
||||||
|
Value(&overwrite).
|
||||||
|
Affirmative("Yes, overwrite").
|
||||||
|
Negative("No, skip"),
|
||||||
|
|
||||||
|
huh.NewConfirm().
|
||||||
|
Title("🔒 Preserve Permissions").
|
||||||
|
Description("Keep original file permissions?").
|
||||||
|
Value(&preservePerms).
|
||||||
|
Affirmative("Yes").
|
||||||
|
Negative("No"),
|
||||||
|
),
|
||||||
|
).WithTheme(huh.ThemeCatppuccin())
|
||||||
|
|
||||||
|
if err := form.Run(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println(InfoStyle.Render("🌐 Fetching remote archive..."))
|
||||||
|
|
||||||
|
if err := fetcher.FetchAndExtract(url, destPath, overwrite, preservePerms); err != nil {
|
||||||
|
return fmt.Errorf("failed to fetch and extract: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
17
internal/version/version.go
Normal file
17
internal/version/version.go
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
package version
|
||||||
|
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
|
const (
|
||||||
|
Major = 1
|
||||||
|
Minor = 0
|
||||||
|
Patch = 3
|
||||||
|
)
|
||||||
|
|
||||||
|
func Version() string {
|
||||||
|
return fmt.Sprintf("%d.%d.%d", Major, Minor, Patch)
|
||||||
|
}
|
||||||
|
|
||||||
|
func FullVersion() string {
|
||||||
|
return fmt.Sprintf("Zipprine v%s", Version())
|
||||||
|
}
|
||||||
87
internal/version/version_test.go
Normal file
87
internal/version/version_test.go
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
package version
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestVersion(t *testing.T) {
|
||||||
|
v := Version()
|
||||||
|
|
||||||
|
if v == "" {
|
||||||
|
t.Error("Version() returned empty string")
|
||||||
|
}
|
||||||
|
|
||||||
|
parts := strings.Split(v, ".")
|
||||||
|
if len(parts) != 3 {
|
||||||
|
t.Errorf("Version() = %q; expected format X.Y.Z", v)
|
||||||
|
}
|
||||||
|
expected := "1.0.3"
|
||||||
|
if v != expected {
|
||||||
|
t.Errorf("Version() = %q; want %q", v, expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFullVersion(t *testing.T) {
|
||||||
|
fv := FullVersion()
|
||||||
|
|
||||||
|
if fv == "" {
|
||||||
|
t.Error("FullVersion() returned empty string")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(fv, "Zipprine") {
|
||||||
|
t.Errorf("FullVersion() = %q; expected to contain 'Zipprine'", fv)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(fv, Version()) {
|
||||||
|
t.Errorf("FullVersion() = %q; expected to contain version %q", fv, Version())
|
||||||
|
}
|
||||||
|
|
||||||
|
expected := "Zipprine v1.0.3"
|
||||||
|
if fv != expected {
|
||||||
|
t.Errorf("FullVersion() = %q; want %q", fv, expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestVersionConstants(t *testing.T) {
|
||||||
|
if Major != 1 {
|
||||||
|
t.Errorf("Major = %d; want 1", Major)
|
||||||
|
}
|
||||||
|
if Minor != 0 {
|
||||||
|
t.Errorf("Minor = %d; want 0", Minor)
|
||||||
|
}
|
||||||
|
if Patch != 3 {
|
||||||
|
t.Errorf("Patch = %d; want 3", Patch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestVersionFormat(t *testing.T) {
|
||||||
|
v := Version()
|
||||||
|
|
||||||
|
if strings.Contains(v, " ") {
|
||||||
|
t.Errorf("Version() contains spaces: %q", v)
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(v, "v") {
|
||||||
|
t.Errorf("Version() should not have 'v' prefix: %q", v)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, c := range v {
|
||||||
|
if c != '.' && (c < '0' || c > '9') {
|
||||||
|
t.Errorf("Version() contains non-numeric character at position %d: %q", i, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFullVersionFormat(t *testing.T) {
|
||||||
|
fv := FullVersion()
|
||||||
|
|
||||||
|
expectedPrefix := "Zipprine v"
|
||||||
|
if !strings.HasPrefix(fv, expectedPrefix) {
|
||||||
|
t.Errorf("FullVersion() should start with %q, got %q", expectedPrefix, fv)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.HasSuffix(fv, Version()) {
|
||||||
|
t.Errorf("FullVersion() should end with version %q, got %q", Version(), fv)
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user