add migration tools
This commit is contained in:
parent
80f14e4fe1
commit
dfd777bec5
461
README.md
461
README.md
@ -1,3 +1,462 @@
|
||||
# 🍥 Nigiri
|
||||
|
||||
Delicious, hand-crafted in-memory database with JSON persistence.
|
||||
Delicious, hand-crafted in-memory database with JSON persistence.
|
||||
|
||||
## Features
|
||||
|
||||
- **Type-safe** operations with Go generics
|
||||
- **Schema validation** with constraints and relationships
|
||||
- **Automatic indexing** with custom index builders
|
||||
- **Built-in migrations** with CLI commands
|
||||
- **Relationship validation** between entities
|
||||
- **Atomic operations** with backup/restore
|
||||
- **Thread-safe** concurrent access
|
||||
- **Zero external dependencies**
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
go get github.com/yourusername/nigiri
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Define Your Models
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import "your-app/nigiri"
|
||||
|
||||
type User struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name" db:"required,unique"`
|
||||
Email string `json:"email" db:"unique"`
|
||||
Age int `json:"age"`
|
||||
Posts []*Post `json:"posts,omitempty"`
|
||||
}
|
||||
|
||||
type Post struct {
|
||||
ID int `json:"id"`
|
||||
Title string `json:"title" db:"required"`
|
||||
Content string `json:"content"`
|
||||
AuthorID int `json:"author_id" db:"fkey:user"`
|
||||
Author *User `json:"author,omitempty"`
|
||||
}
|
||||
|
||||
func (u *User) Validate() error {
|
||||
if u.Age < 0 || u.Age > 150 {
|
||||
return fmt.Errorf("invalid age: %d", u.Age)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Set Up Your Database
|
||||
|
||||
```go
|
||||
func main() {
|
||||
// Create collection (database)
|
||||
db := nigiri.NewCollection("./data")
|
||||
|
||||
// Create and register stores (tables)
|
||||
userStore := nigiri.NewBaseStore[User]()
|
||||
postStore := nigiri.NewBaseStore[Post]()
|
||||
|
||||
db.Add("users", userStore)
|
||||
db.Add("posts", postStore)
|
||||
|
||||
// Load existing data
|
||||
if err := db.Load(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Your app logic here...
|
||||
createUser(db)
|
||||
|
||||
// Save all changes
|
||||
if err := db.Save(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Basic CRUD Operations
|
||||
|
||||
```go
|
||||
func createUser(db *nigiri.Collection) {
|
||||
users := nigiri.Get[User](db, "users")
|
||||
|
||||
// Create new user
|
||||
user := &User{
|
||||
Name: "John Doe",
|
||||
Email: "john@example.com",
|
||||
Age: 30,
|
||||
}
|
||||
|
||||
id, err := users.Create(user)
|
||||
if err != nil {
|
||||
log.Printf("Error creating user: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("Created user with ID: %d\n", id)
|
||||
|
||||
// Find user
|
||||
foundUser, exists := users.Find(id)
|
||||
if exists {
|
||||
fmt.Printf("Found: %s\n", foundUser.Name)
|
||||
}
|
||||
|
||||
// Update user
|
||||
foundUser.Age = 31
|
||||
if err := users.Update(id, foundUser); err != nil {
|
||||
log.Printf("Error updating user: %v", err)
|
||||
}
|
||||
|
||||
// List all users
|
||||
allUsers := users.GetAll()
|
||||
for id, user := range allUsers {
|
||||
fmt.Printf("User %d: %s\n", id, user.Name)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Schema Constraints
|
||||
|
||||
Use `db` tags to define constraints:
|
||||
|
||||
```go
|
||||
type Product struct {
|
||||
ID int `json:"id"`
|
||||
SKU string `json:"sku" db:"unique,required"`
|
||||
Name string `json:"name" db:"required"`
|
||||
Price float64 `json:"price"`
|
||||
CategoryID int `json:"category_id" db:"fkey:category"`
|
||||
Description string `json:"description" db:"index"`
|
||||
}
|
||||
```
|
||||
|
||||
### Available Constraints
|
||||
|
||||
- `required` - Field cannot be empty/zero value
|
||||
- `unique` - Field value must be unique across all records
|
||||
- `index` - Create an index for fast lookups
|
||||
- `index:custom_name` - Create named index
|
||||
- `fkey:target` - Foreign key reference to another store
|
||||
|
||||
## Indexing System
|
||||
|
||||
### Automatic Indices
|
||||
|
||||
Indices are automatically created for fields with `unique` and `index` constraints:
|
||||
|
||||
```go
|
||||
// This creates an automatic index
|
||||
type User struct {
|
||||
Email string `json:"email" db:"unique"` // Creates email_idx
|
||||
Name string `json:"name" db:"index"` // Creates name_idx
|
||||
}
|
||||
|
||||
// Use the index for fast lookups
|
||||
user, found := users.LookupByIndex("email_idx", "john@example.com")
|
||||
```
|
||||
|
||||
### Custom Indices
|
||||
|
||||
Register custom indices for complex queries:
|
||||
|
||||
```go
|
||||
func setupIndices(users *nigiri.BaseStore[User]) {
|
||||
// Group users by age
|
||||
users.RegisterIndex("by_age", nigiri.BuildIntGroupIndex(func(u *User) int {
|
||||
return u.Age
|
||||
}))
|
||||
|
||||
// Case-insensitive name lookup
|
||||
users.RegisterIndex("name_lower", nigiri.BuildCaseInsensitiveLookupIndex(func(u *User) string {
|
||||
return u.Name
|
||||
}))
|
||||
|
||||
// Sorted by creation date
|
||||
users.RegisterIndex("by_date", nigiri.BuildSortedListIndex(func(a, b *User) bool {
|
||||
return a.ID < b.ID // Assuming ID represents creation order
|
||||
}))
|
||||
}
|
||||
|
||||
// Use custom indices
|
||||
thirtyYearOlds := users.GroupByIndex("by_age", 30)
|
||||
user, found := users.LookupByIndex("name_lower", "john doe")
|
||||
sortedUsers := users.AllSorted("by_date")
|
||||
```
|
||||
|
||||
## Relationships
|
||||
|
||||
Nigiri automatically detects and validates relationships:
|
||||
|
||||
```go
|
||||
type User struct {
|
||||
ID int `json:"id"`
|
||||
Posts []*Post `json:"posts"` // One-to-many: detected automatically
|
||||
}
|
||||
|
||||
type Post struct {
|
||||
ID int `json:"id"`
|
||||
AuthorID int `json:"author_id"`
|
||||
Author *User `json:"author"` // Many-to-one: detected automatically
|
||||
}
|
||||
```
|
||||
|
||||
**Relationship Types:**
|
||||
- `*EntityType` - Many-to-one relationship
|
||||
- `[]*EntityType` - One-to-many relationship
|
||||
|
||||
Foreign key validation ensures referenced entities exist.
|
||||
|
||||
## Migrations
|
||||
|
||||
Nigiri includes a powerful migration system for schema evolution.
|
||||
|
||||
### CLI Integration
|
||||
|
||||
```go
|
||||
func main() {
|
||||
db := nigiri.NewCollection("./data")
|
||||
// ... setup stores ...
|
||||
|
||||
// CLI migration support
|
||||
if len(os.Args) > 1 && os.Args[1] == "migrate" {
|
||||
cli := db.CreateMigrationCLI()
|
||||
if err := cli.Run(os.Args[2:]); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Normal app logic...
|
||||
}
|
||||
```
|
||||
|
||||
### Built-in Migration Commands
|
||||
|
||||
```bash
|
||||
# Rename a field
|
||||
your-app migrate users.json 'rename Name to FullName'
|
||||
|
||||
# Add new field with default value
|
||||
your-app migrate users.json 'add Email string to user'
|
||||
|
||||
# Remove deprecated field
|
||||
your-app migrate users.json 'remove LegacyField from user'
|
||||
|
||||
# Change field type
|
||||
your-app migrate users.json 'change Age to int'
|
||||
```
|
||||
|
||||
### Migration Scripts
|
||||
|
||||
Create `migrations/001_user_schema.txt`:
|
||||
```
|
||||
# User schema updates
|
||||
users.json: rename Name to FullName
|
||||
users.json: add Email string to user
|
||||
users.json: add CreatedAt time to user
|
||||
posts.json: rename AuthorName to Author
|
||||
posts.json: add Status string to post
|
||||
```
|
||||
|
||||
Run the script:
|
||||
```bash
|
||||
your-app migrate migrations/001_user_schema.txt
|
||||
```
|
||||
|
||||
### Custom Migration Commands
|
||||
|
||||
Register domain-specific migration commands:
|
||||
|
||||
```go
|
||||
func setupMigrations(db *nigiri.Collection) {
|
||||
// Custom slugify command
|
||||
slugPattern := regexp.MustCompile(`^slugify\s+(\w+)\s+from\s+(\w+)$`)
|
||||
db.RegisterMigrationCommand("slugify", slugPattern, func(items []map[string]any, cmd *nigiri.MigrationCommand) error {
|
||||
for _, item := range items {
|
||||
if sourceVal, exists := item[cmd.To]; exists {
|
||||
if sourceStr, ok := sourceVal.(string); ok {
|
||||
slug := strings.ToLower(strings.ReplaceAll(sourceStr, " ", "-"))
|
||||
item[cmd.Field] = slug
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// Usage: your-app migrate posts.json 'slugify slug from Title'
|
||||
```
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### Custom Validation
|
||||
|
||||
Implement the `Validatable` interface:
|
||||
|
||||
```go
|
||||
func (u *User) Validate() error {
|
||||
if u.Age < 13 {
|
||||
return fmt.Errorf("user must be at least 13 years old")
|
||||
}
|
||||
if !strings.Contains(u.Email, "@") {
|
||||
return fmt.Errorf("invalid email format")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
```
|
||||
|
||||
### Filtering and Querying
|
||||
|
||||
```go
|
||||
// Filter with custom function
|
||||
activeUsers := users.FilterByIndex("by_date", func(u *User) bool {
|
||||
return u.Active && u.Age >= 18
|
||||
})
|
||||
|
||||
// Complex queries with multiple indices
|
||||
recentPosts := posts.GroupByIndex("by_author", userID)
|
||||
popularPosts := posts.FilterByIndex("by_popularity", func(p *Post) bool {
|
||||
return p.Views > 1000
|
||||
})
|
||||
```
|
||||
|
||||
### Unsafe Operations
|
||||
|
||||
For performance-critical bulk operations:
|
||||
|
||||
```go
|
||||
// Disable validation for bulk inserts
|
||||
for i, user := range users {
|
||||
users.AddUnsafe(i, user)
|
||||
}
|
||||
|
||||
// Rebuild indices once after bulk operations
|
||||
users.RebuildIndices()
|
||||
```
|
||||
|
||||
### Singleton Pattern
|
||||
|
||||
Create singleton stores for configuration:
|
||||
|
||||
```go
|
||||
var GetConfig = nigiri.NewSingleton(func() *ConfigStore {
|
||||
return nigiri.NewBaseStore[Config]()
|
||||
})
|
||||
|
||||
// Usage
|
||||
config := GetConfig()
|
||||
```
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
your-app/
|
||||
├── data/
|
||||
│ ├── users.json
|
||||
│ ├── posts.json
|
||||
│ └── categories.json
|
||||
├── migrations/
|
||||
│ ├── 001_initial_schema.txt
|
||||
│ └── 002_add_timestamps.txt
|
||||
└── main.go
|
||||
```
|
||||
|
||||
## JSON Format
|
||||
|
||||
Data is stored as JSON arrays with consistent formatting:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"id": 1,
|
||||
"name": "John Doe",
|
||||
"email": "john@example.com",
|
||||
"age": 30
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "Jane Smith",
|
||||
"email": "jane@example.com",
|
||||
"age": 25
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
```go
|
||||
// Constraint violations
|
||||
user := &User{Email: "duplicate@example.com"}
|
||||
if err := users.Add(1, user); err != nil {
|
||||
if strings.Contains(err.Error(), "already exists") {
|
||||
// Handle duplicate email
|
||||
}
|
||||
}
|
||||
|
||||
// Validation errors
|
||||
user := &User{Age: -5}
|
||||
if err := users.Add(2, user); err != nil {
|
||||
// Handle validation failure
|
||||
}
|
||||
|
||||
// Relationship violations
|
||||
post := &Post{AuthorID: 999} // Non-existent user
|
||||
if err := posts.Add(1, post); err != nil {
|
||||
if strings.Contains(err.Error(), "foreign key violation") {
|
||||
// Handle missing reference
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Performance Tips
|
||||
|
||||
1. **Use indices** for frequently queried fields
|
||||
2. **Batch operations** with unsafe methods + rebuild indices
|
||||
3. **Lazy load relationships** to avoid circular references
|
||||
4. **Pre-allocate maps** for large datasets
|
||||
5. **Use filtered indices** to avoid full scans
|
||||
|
||||
## Thread Safety
|
||||
|
||||
All operations are thread-safe with read-write mutexes:
|
||||
|
||||
```go
|
||||
// Safe concurrent access
|
||||
go func() {
|
||||
users.Create(&User{Name: "User 1"})
|
||||
}()
|
||||
|
||||
go func() {
|
||||
users.Create(&User{Name: "User 2"})
|
||||
}()
|
||||
```
|
||||
|
||||
## Migration Safety
|
||||
|
||||
- **Automatic backups** (.backup files) before each migration
|
||||
- **Atomic operations** with temporary files
|
||||
- **Validation** ensures migrated data loads correctly
|
||||
- **Rollback support** by restoring from backups
|
||||
|
||||
## Contributing
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a feature branch
|
||||
3. Add tests for new functionality
|
||||
4. Ensure all tests pass
|
||||
5. Submit a pull request
|
||||
|
||||
## License
|
||||
|
||||
MIT License - see LICENSE file for details.
|
||||
|
||||
---
|
||||
|
||||
**Nigiri** - Simple, type-safe JSON persistence for Go applications. 🍣
|
@ -4,6 +4,7 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sync"
|
||||
)
|
||||
|
||||
@ -16,15 +17,17 @@ type StoreManager interface {
|
||||
}
|
||||
|
||||
type Collection struct {
|
||||
baseDir string
|
||||
stores map[string]StoreManager
|
||||
mu sync.RWMutex
|
||||
baseDir string
|
||||
stores map[string]StoreManager
|
||||
migrator *Migrator
|
||||
mu sync.RWMutex
|
||||
}
|
||||
|
||||
func NewCollection(baseDir string) *Collection {
|
||||
return &Collection{
|
||||
baseDir: baseDir,
|
||||
stores: make(map[string]StoreManager),
|
||||
baseDir: baseDir,
|
||||
stores: make(map[string]StoreManager),
|
||||
migrator: nil, // Lazy initialized
|
||||
}
|
||||
}
|
||||
|
||||
@ -109,3 +112,31 @@ func (c *Collection) EntityExists(entityName string, id int) bool {
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Migration methods
|
||||
func (c *Collection) GetMigrator() *Migrator {
|
||||
if c.migrator == nil {
|
||||
c.migrator = NewMigrator()
|
||||
}
|
||||
return c.migrator
|
||||
}
|
||||
|
||||
func (c *Collection) RegisterMigrationCommand(name string, pattern *regexp.Regexp, handler MigrationHandler) {
|
||||
c.GetMigrator().RegisterCommand(name, pattern, handler)
|
||||
}
|
||||
|
||||
func (c *Collection) MigrateStore(storeName, command string) error {
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
|
||||
path := filepath.Join(c.baseDir, storeName+".json")
|
||||
return c.GetMigrator().MigrateFile(path, command)
|
||||
}
|
||||
|
||||
func (c *Collection) RunMigrationScript(scriptFile string) error {
|
||||
return c.GetMigrator().RunScript(c.baseDir, scriptFile)
|
||||
}
|
||||
|
||||
func (c *Collection) CreateMigrationCLI() *MigrationCLI {
|
||||
return NewMigrationCLI(c)
|
||||
}
|
||||
|
301
migrate.go
Normal file
301
migrate.go
Normal file
@ -0,0 +1,301 @@
|
||||
package nigiri
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type MigrationCommand struct {
|
||||
Action string
|
||||
Field string
|
||||
To string
|
||||
Type string
|
||||
Store string
|
||||
}
|
||||
|
||||
type MigrationHandler func(data []map[string]any, cmd *MigrationCommand) error
|
||||
|
||||
type Migrator struct {
|
||||
handlers map[string]MigrationHandler
|
||||
patterns map[string]*regexp.Regexp
|
||||
}
|
||||
|
||||
func NewMigrator() *Migrator {
|
||||
m := &Migrator{
|
||||
handlers: make(map[string]MigrationHandler),
|
||||
patterns: make(map[string]*regexp.Regexp),
|
||||
}
|
||||
|
||||
// Register built-in commands
|
||||
m.RegisterCommand("rename", regexp.MustCompile(`^rename\s+(\w+)\s+to\s+(\w+)$`), m.handleRename)
|
||||
m.RegisterCommand("add", regexp.MustCompile(`^add\s+(\w+)\s+(\w+)\s+to\s+(\w+)$`), m.handleAdd)
|
||||
m.RegisterCommand("remove", regexp.MustCompile(`^remove\s+(\w+)\s+from\s+(\w+)$`), m.handleRemove)
|
||||
m.RegisterCommand("change", regexp.MustCompile(`^change\s+(\w+)\s+to\s+(\w+)$`), m.handleChange)
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
func (m *Migrator) RegisterCommand(name string, pattern *regexp.Regexp, handler MigrationHandler) {
|
||||
m.patterns[name] = pattern
|
||||
m.handlers[name] = handler
|
||||
}
|
||||
|
||||
func (m *Migrator) ParseCommand(input string) (*MigrationCommand, string, error) {
|
||||
input = strings.TrimSpace(input)
|
||||
|
||||
for name, pattern := range m.patterns {
|
||||
if matches := pattern.FindStringSubmatch(input); matches != nil {
|
||||
cmd := &MigrationCommand{Action: name}
|
||||
|
||||
switch name {
|
||||
case "rename":
|
||||
cmd.Field, cmd.To = matches[1], matches[2]
|
||||
case "add":
|
||||
cmd.Field, cmd.Type, cmd.Store = matches[1], matches[2], matches[3]
|
||||
case "remove":
|
||||
cmd.Field, cmd.Store = matches[1], matches[2]
|
||||
case "change":
|
||||
cmd.Field, cmd.Type = matches[1], matches[2]
|
||||
default:
|
||||
// Allow custom commands to handle their own parsing
|
||||
cmd.Field = matches[1]
|
||||
if len(matches) > 2 {
|
||||
cmd.To = matches[2]
|
||||
}
|
||||
if len(matches) > 3 {
|
||||
cmd.Type = matches[3]
|
||||
}
|
||||
if len(matches) > 4 {
|
||||
cmd.Store = matches[4]
|
||||
}
|
||||
}
|
||||
|
||||
return cmd, name, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, "", fmt.Errorf("unknown command: %s", input)
|
||||
}
|
||||
|
||||
func (m *Migrator) ApplyCommand(data []byte, cmdStr string) ([]byte, error) {
|
||||
cmd, name, err := m.ParseCommand(cmdStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var items []map[string]any
|
||||
if err := json.Unmarshal(data, &items); err != nil {
|
||||
return nil, fmt.Errorf("invalid JSON: %w", err)
|
||||
}
|
||||
|
||||
handler := m.handlers[name]
|
||||
if err := handler(items, cmd); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return json.MarshalIndent(items, "", "\t")
|
||||
}
|
||||
|
||||
func (m *Migrator) MigrateFile(filename, command string) error {
|
||||
data, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("read file: %w", err)
|
||||
}
|
||||
|
||||
result, err := m.ApplyCommand(data, command)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Backup and write
|
||||
backupPath := filename + ".backup"
|
||||
if err := os.WriteFile(backupPath, data, 0644); err != nil {
|
||||
return fmt.Errorf("create backup: %w", err)
|
||||
}
|
||||
|
||||
if err := os.WriteFile(filename, result, 0644); err != nil {
|
||||
return fmt.Errorf("write result: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("✓ Applied: %s\n", command)
|
||||
fmt.Printf("✓ Backup: %s\n", backupPath)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Migrator) RunScript(dataDir, scriptFile string) error {
|
||||
file, err := os.Open(scriptFile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("open script: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
scanner := bufio.NewScanner(file)
|
||||
lineNum := 0
|
||||
|
||||
for scanner.Scan() {
|
||||
lineNum++
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
|
||||
if line == "" || strings.HasPrefix(line, "#") {
|
||||
continue
|
||||
}
|
||||
|
||||
parts := strings.SplitN(line, ":", 2)
|
||||
if len(parts) != 2 {
|
||||
return fmt.Errorf("line %d: invalid format, use 'file.json: command'", lineNum)
|
||||
}
|
||||
|
||||
filename := strings.TrimSpace(parts[0])
|
||||
command := strings.TrimSpace(parts[1])
|
||||
fullPath := filepath.Join(dataDir, filename)
|
||||
|
||||
fmt.Printf("Line %d: %s -> %s\n", lineNum, filename, command)
|
||||
|
||||
if err := m.MigrateFile(fullPath, command); err != nil {
|
||||
return fmt.Errorf("line %d: %w", lineNum, err)
|
||||
}
|
||||
}
|
||||
|
||||
return scanner.Err()
|
||||
}
|
||||
|
||||
// Built-in handlers
|
||||
func (m *Migrator) handleRename(items []map[string]any, cmd *MigrationCommand) error {
|
||||
for _, item := range items {
|
||||
if val, exists := item[cmd.Field]; exists {
|
||||
item[cmd.To] = val
|
||||
delete(item, cmd.Field)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Migrator) handleAdd(items []map[string]any, cmd *MigrationCommand) error {
|
||||
defaultVal := getDefaultValue(cmd.Type)
|
||||
for _, item := range items {
|
||||
if _, exists := item[cmd.Field]; !exists {
|
||||
item[cmd.Field] = defaultVal
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Migrator) handleRemove(items []map[string]any, cmd *MigrationCommand) error {
|
||||
for _, item := range items {
|
||||
delete(item, cmd.Field)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Migrator) handleChange(items []map[string]any, cmd *MigrationCommand) error {
|
||||
for _, item := range items {
|
||||
if val, exists := item[cmd.Field]; exists {
|
||||
converted, err := convertType(val, cmd.Type)
|
||||
if err != nil {
|
||||
return fmt.Errorf("type conversion failed: %w", err)
|
||||
}
|
||||
item[cmd.Field] = converted
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func getDefaultValue(fieldType string) any {
|
||||
switch fieldType {
|
||||
case "string":
|
||||
return ""
|
||||
case "int":
|
||||
return 0
|
||||
case "bool":
|
||||
return false
|
||||
case "time":
|
||||
return time.Now().Format(time.RFC3339)
|
||||
case "float":
|
||||
return 0.0
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func convertType(val any, targetType string) (any, error) {
|
||||
switch targetType {
|
||||
case "string":
|
||||
return fmt.Sprintf("%v", val), nil
|
||||
case "int":
|
||||
if str, ok := val.(string); ok {
|
||||
return strconv.Atoi(str)
|
||||
}
|
||||
if f, ok := val.(float64); ok {
|
||||
return int(f), nil
|
||||
}
|
||||
return val, nil
|
||||
case "float":
|
||||
if str, ok := val.(string); ok {
|
||||
return strconv.ParseFloat(str, 64)
|
||||
}
|
||||
if i, ok := val.(int); ok {
|
||||
return float64(i), nil
|
||||
}
|
||||
return val, nil
|
||||
case "bool":
|
||||
if str, ok := val.(string); ok {
|
||||
return strconv.ParseBool(str)
|
||||
}
|
||||
return val, nil
|
||||
default:
|
||||
return val, nil
|
||||
}
|
||||
}
|
||||
|
||||
// CLI wrapper for host applications
|
||||
type MigrationCLI struct {
|
||||
collection *Collection
|
||||
}
|
||||
|
||||
func NewMigrationCLI(collection *Collection) *MigrationCLI {
|
||||
return &MigrationCLI{collection: collection}
|
||||
}
|
||||
|
||||
func (cli *MigrationCLI) Run(args []string) error {
|
||||
if len(args) < 1 {
|
||||
return fmt.Errorf("usage: migrate <file.json> 'command' OR migrate <script.txt>")
|
||||
}
|
||||
|
||||
if strings.HasSuffix(args[0], ".txt") {
|
||||
// Script mode
|
||||
return cli.collection.RunMigrationScript(args[0])
|
||||
} else {
|
||||
// Single command mode
|
||||
if len(args) < 2 {
|
||||
return fmt.Errorf("usage: migrate <file.json> 'command'")
|
||||
}
|
||||
|
||||
// Extract store name from filename
|
||||
filename := args[0]
|
||||
storeName := strings.TrimSuffix(filepath.Base(filename), ".json")
|
||||
command := args[1]
|
||||
|
||||
return cli.collection.MigrateStore(storeName, command)
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *MigrationCLI) PrintUsage() {
|
||||
fmt.Println("Migration Commands:")
|
||||
fmt.Println(" rename oldfield to newfield")
|
||||
fmt.Println(" add fieldname type to store")
|
||||
fmt.Println(" remove fieldname from store")
|
||||
fmt.Println(" change fieldname to type")
|
||||
fmt.Println()
|
||||
fmt.Println("Types: string, int, float, bool, time")
|
||||
fmt.Println()
|
||||
fmt.Println("Usage:")
|
||||
fmt.Println(" migrate users.json 'rename Name to FullName'")
|
||||
fmt.Println(" migrate migrations/001_schema.txt")
|
||||
}
|
@ -130,3 +130,42 @@ func (bs *BaseStore[T]) SaveData(dataPath string) error {
|
||||
fmt.Printf("Saved %d items to %s\n", len(bs.items), dataPath)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Migration support methods
|
||||
func (bs *BaseStore[T]) ValidateAfterMigration(filename string) error {
|
||||
// Try to load the migrated file to ensure it's valid
|
||||
tempStore := NewBaseStore[T]()
|
||||
if err := tempStore.LoadFromJSON(filename); err != nil {
|
||||
return fmt.Errorf("migration validation failed: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (bs *BaseStore[T]) RestoreFromBackup(filename string) error {
|
||||
backupPath := filename + ".backup"
|
||||
if _, err := os.Stat(backupPath); os.IsNotExist(err) {
|
||||
return fmt.Errorf("backup file does not exist: %s", backupPath)
|
||||
}
|
||||
|
||||
if err := os.Rename(backupPath, filename); err != nil {
|
||||
return fmt.Errorf("failed to restore from backup: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("✓ Restored from backup: %s\n", backupPath)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (bs *BaseStore[T]) CreateMigrationCheckpoint(filename string) error {
|
||||
checkpointPath := filename + ".checkpoint"
|
||||
data, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read file for checkpoint: %w", err)
|
||||
}
|
||||
|
||||
if err := os.WriteFile(checkpointPath, data, 0644); err != nil {
|
||||
return fmt.Errorf("failed to create checkpoint: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("✓ Created checkpoint: %s\n", checkpointPath)
|
||||
return nil
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ func ParseSchema[T any]() *SchemaInfo {
|
||||
|
||||
func detectRelationship(fieldName string, fieldType reflect.Type) *FieldConstraint {
|
||||
switch fieldType.Kind() {
|
||||
case reflect.Ptr:
|
||||
case reflect.Pointer:
|
||||
// *EntityType = many-to-one
|
||||
elemType := fieldType.Elem()
|
||||
if isEntityType(elemType) {
|
||||
@ -111,7 +111,7 @@ func detectRelationship(fieldName string, fieldType reflect.Type) *FieldConstrai
|
||||
case reflect.Slice:
|
||||
// []*EntityType = one-to-many
|
||||
elemType := fieldType.Elem()
|
||||
if elemType.Kind() == reflect.Ptr {
|
||||
if elemType.Kind() == reflect.Pointer {
|
||||
ptrTargetType := elemType.Elem()
|
||||
if isEntityType(ptrTargetType) {
|
||||
return &FieldConstraint{
|
||||
|
73
store.go
73
store.go
@ -3,6 +3,7 @@ package nigiri
|
||||
import (
|
||||
"fmt"
|
||||
"maps"
|
||||
"os"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
@ -549,3 +550,75 @@ func NewSingleton[S any](initFunc func() *S) func() *S {
|
||||
return store
|
||||
}
|
||||
}
|
||||
|
||||
func (bs *BaseStore[T]) ApplyMigration(migrationFunc func(map[int]*T) error) error {
|
||||
bs.mu.Lock()
|
||||
defer bs.mu.Unlock()
|
||||
|
||||
// Create a copy for safe migration
|
||||
itemsCopy := make(map[int]*T, len(bs.items))
|
||||
maps.Copy(itemsCopy, bs.items)
|
||||
|
||||
if err := migrationFunc(itemsCopy); err != nil {
|
||||
return fmt.Errorf("migration failed: %w", err)
|
||||
}
|
||||
|
||||
// If migration succeeded, update the store
|
||||
bs.items = itemsCopy
|
||||
bs.rebuildIndicesUnsafe()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (bs *BaseStore[T]) MigrateFromJSON(filename string, migrationCommands []string, migrator *Migrator) error {
|
||||
// Read original data
|
||||
data, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read file: %w", err)
|
||||
}
|
||||
|
||||
// Apply each migration command
|
||||
result := data
|
||||
for _, command := range migrationCommands {
|
||||
result, err = migrator.ApplyCommand(result, command)
|
||||
if err != nil {
|
||||
return fmt.Errorf("command '%s' failed: %w", command, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Create backup
|
||||
backupPath := filename + ".backup"
|
||||
if err := os.WriteFile(backupPath, data, 0644); err != nil {
|
||||
return fmt.Errorf("failed to create backup: %w", err)
|
||||
}
|
||||
|
||||
// Write migrated data
|
||||
tempFile := filename + ".migrated"
|
||||
if err := os.WriteFile(tempFile, result, 0644); err != nil {
|
||||
return fmt.Errorf("failed to write migrated data: %w", err)
|
||||
}
|
||||
|
||||
// Validate migrated data can be loaded
|
||||
if err := bs.ValidateAfterMigration(tempFile); err != nil {
|
||||
os.Remove(tempFile)
|
||||
return err
|
||||
}
|
||||
|
||||
// Replace original file
|
||||
if err := os.Rename(tempFile, filename); err != nil {
|
||||
return fmt.Errorf("failed to replace original file: %w", err)
|
||||
}
|
||||
|
||||
// Reload the store with migrated data
|
||||
return bs.LoadFromJSON(filename)
|
||||
}
|
||||
|
||||
func (bs *BaseStore[T]) BatchMigrate(filename string, commands []string) error {
|
||||
if len(commands) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Use the collection's migrator if available
|
||||
// This would typically be called through Collection.MigrateStore
|
||||
migrator := NewMigrator()
|
||||
return bs.MigrateFromJSON(filename, commands, migrator)
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user