From 80f14e4fe19cceac8cc8e0e123729034b8e74c26 Mon Sep 17 00:00:00 2001 From: Sky Johnson Date: Fri, 15 Aug 2025 18:35:39 -0500 Subject: [PATCH] add relationships and simplify the interface --- collection.go | 172 +++++++++++------------------------------- constraints.go | 119 ----------------------------- persistence.go | 7 +- schema.go | 199 +++++++++++++++++++++++++++++++++++++++++++++++++ store.go | 194 +++++++++++++++++++++++++---------------------- 5 files changed, 350 insertions(+), 341 deletions(-) delete mode 100644 constraints.go create mode 100644 schema.go diff --git a/collection.go b/collection.go index da8e3b2..0417669 100644 --- a/collection.go +++ b/collection.go @@ -7,21 +7,20 @@ import ( "sync" ) -// StoreManager interface for stores that can load/save type StoreManager interface { LoadData(path string) error SaveData(path string) error Clear() + EntityExists(id int) bool + SetValidator(validator any) } -// Collection manages multiple stores in a directory type Collection struct { baseDir string stores map[string]StoreManager mu sync.RWMutex } -// NewCollection creates a new collection in the specified directory func NewCollection(baseDir string) *Collection { return &Collection{ baseDir: baseDir, @@ -29,102 +28,69 @@ func NewCollection(baseDir string) *Collection { } } -// Register adds a store to the collection with a given name -func (c *Collection) Register(name string, store StoreManager) { +// Add registers store and auto-loads if file exists +func (c *Collection) Add(name string, store StoreManager) error { c.mu.Lock() defer c.mu.Unlock() + c.stores[name] = store -} + store.SetValidator(c) -// Unregister removes a store from the collection -func (c *Collection) Unregister(name string) { - c.mu.Lock() - defer c.mu.Unlock() - delete(c.stores, name) -} - -// GetStore retrieves a registered store by name -func (c *Collection) GetStore(name string) (StoreManager, bool) { - c.mu.RLock() - defer c.mu.RUnlock() - store, exists := c.stores[name] - return store, exists -} - -// LoadAll loads all registered stores from their JSON files -func (c *Collection) LoadAll() error { - c.mu.RLock() - defer c.mu.RUnlock() - - if err := os.MkdirAll(c.baseDir, 0755); err != nil { - return fmt.Errorf("failed to create base directory: %w", err) + // Auto-load if file exists + path := filepath.Join(c.baseDir, name+".json") + if _, err := os.Stat(path); err == nil { + return store.LoadData(path) } + return nil +} - var firstError error - for name, store := range c.stores { - path := filepath.Join(c.baseDir, name+".json") - if err := store.LoadData(path); err != nil && firstError == nil { - firstError = fmt.Errorf("failed to load %s: %w", name, err) +// Get returns typed store +func Get[T any](c *Collection, name string) *BaseStore[T] { + c.mu.RLock() + defer c.mu.RUnlock() + + if store, exists := c.stores[name]; exists { + if typed, ok := store.(*BaseStore[T]); ok { + return typed } } - - return firstError + return nil } -// SaveAll saves all registered stores to their JSON files -func (c *Collection) SaveAll() error { +// Load loads all stores +func (c *Collection) Load() error { c.mu.RLock() defer c.mu.RUnlock() - if err := os.MkdirAll(c.baseDir, 0755); err != nil { - return fmt.Errorf("failed to create base directory: %w", err) - } + os.MkdirAll(c.baseDir, 0755) - var firstError error for name, store := range c.stores { path := filepath.Join(c.baseDir, name+".json") - if err := store.SaveData(path); err != nil && firstError == nil { - firstError = fmt.Errorf("failed to save %s: %w", name, err) + if err := store.LoadData(path); err != nil { + return fmt.Errorf("load %s: %w", name, err) } } - - return firstError + return nil } -// LoadStore loads a specific store by name -func (c *Collection) LoadStore(name string) error { +// Save saves all stores +func (c *Collection) Save() error { c.mu.RLock() defer c.mu.RUnlock() - store, exists := c.stores[name] - if !exists { - return fmt.Errorf("store %s not registered", name) - } + os.MkdirAll(c.baseDir, 0755) - path := filepath.Join(c.baseDir, name+".json") - return store.LoadData(path) + for name, store := range c.stores { + path := filepath.Join(c.baseDir, name+".json") + if err := store.SaveData(path); err != nil { + return fmt.Errorf("save %s: %w", name, err) + } + } + return nil } -// SaveStore saves a specific store by name -func (c *Collection) SaveStore(name string) error { - c.mu.RLock() - defer c.mu.RUnlock() - - store, exists := c.stores[name] - if !exists { - return fmt.Errorf("store %s not registered", name) - } - - if err := os.MkdirAll(c.baseDir, 0755); err != nil { - return fmt.Errorf("failed to create base directory: %w", err) - } - - path := filepath.Join(c.baseDir, name+".json") - return store.SaveData(path) -} - -// ClearAll clears all registered stores -func (c *Collection) ClearAll() { +// Clear clears all stores +func (c *Collection) Clear() { c.mu.RLock() defer c.mu.RUnlock() @@ -133,63 +99,13 @@ func (c *Collection) ClearAll() { } } -// ClearStore clears a specific store by name -func (c *Collection) ClearStore(name string) error { +// EntityExists implements relationship validation +func (c *Collection) EntityExists(entityName string, id int) bool { c.mu.RLock() defer c.mu.RUnlock() - store, exists := c.stores[name] - if !exists { - return fmt.Errorf("store %s not registered", name) + if store, exists := c.stores[entityName]; exists { + return store.EntityExists(id) } - - store.Clear() - return nil -} - -// ListStores returns names of all registered stores -func (c *Collection) ListStores() []string { - c.mu.RLock() - defer c.mu.RUnlock() - - names := make([]string, 0, len(c.stores)) - for name := range c.stores { - names = append(names, name) - } - return names -} - -// GetPath returns the file path for a store name -func (c *Collection) GetPath(name string) string { - return filepath.Join(c.baseDir, name+".json") -} - -// StoreExists checks if a store file exists on disk -func (c *Collection) StoreExists(name string) bool { - path := c.GetPath(name) - _, err := os.Stat(path) - return err == nil -} - -// RemoveStoreFile removes the JSON file for a store -func (c *Collection) RemoveStoreFile(name string) error { - path := c.GetPath(name) - err := os.Remove(path) - if os.IsNotExist(err) { - return nil // Already doesn't exist - } - return err -} - -// Helper function to create typed store accessor -func GetTypedStore[T any](collection *Collection, name string) (*BaseStore[T], bool) { - store, exists := collection.GetStore(name) - if !exists { - return nil, false - } - - if typedStore, ok := store.(*BaseStore[T]); ok { - return typedStore, true - } - return nil, false + return false } diff --git a/constraints.go b/constraints.go deleted file mode 100644 index f761f8a..0000000 --- a/constraints.go +++ /dev/null @@ -1,119 +0,0 @@ -package nigiri - -import ( - "reflect" - "strings" -) - -// Constraint types -type ConstraintType string - -const ( - ConstraintUnique ConstraintType = "unique" - ConstraintForeign ConstraintType = "fkey" - ConstraintRequired ConstraintType = "required" - ConstraintIndex ConstraintType = "index" -) - -type FieldConstraint struct { - Type ConstraintType - Field string - Target string // for foreign keys: "table.field" - IndexName string // for custom index names -} - -type SchemaInfo struct { - Fields map[string]reflect.Type - Constraints map[string][]FieldConstraint - Indices map[string]string // field -> index name -} - -// ParseSchema extracts constraints from struct tags -func ParseSchema[T any]() *SchemaInfo { - var zero T - t := reflect.TypeOf(zero) - if t.Kind() == reflect.Ptr { - t = t.Elem() - } - - schema := &SchemaInfo{ - Fields: make(map[string]reflect.Type), - Constraints: make(map[string][]FieldConstraint), - Indices: make(map[string]string), - } - - for i := 0; i < t.NumField(); i++ { - field := t.Field(i) - fieldName := field.Name - schema.Fields[fieldName] = field.Type - - dbTag := field.Tag.Get("db") - if dbTag == "" { - continue - } - - constraints := parseDBTag(fieldName, dbTag) - if len(constraints) > 0 { - schema.Constraints[fieldName] = constraints - } - - // Auto-create indices for unique and indexed fields - for _, constraint := range constraints { - if constraint.Type == ConstraintUnique || constraint.Type == ConstraintIndex { - indexName := constraint.IndexName - if indexName == "" { - indexName = fieldName + "_idx" - } - schema.Indices[fieldName] = indexName - } - } - } - - return schema -} - -func parseDBTag(fieldName, tag string) []FieldConstraint { - var constraints []FieldConstraint - parts := strings.Split(tag, ",") - - for _, part := range parts { - part = strings.TrimSpace(part) - if part == "" { - continue - } - - switch { - case part == "unique": - constraints = append(constraints, FieldConstraint{ - Type: ConstraintUnique, - Field: fieldName, - }) - case part == "required": - constraints = append(constraints, FieldConstraint{ - Type: ConstraintRequired, - Field: fieldName, - }) - case part == "index": - constraints = append(constraints, FieldConstraint{ - Type: ConstraintIndex, - Field: fieldName, - }) - case strings.HasPrefix(part, "index:"): - indexName := strings.TrimPrefix(part, "index:") - constraints = append(constraints, FieldConstraint{ - Type: ConstraintIndex, - Field: fieldName, - IndexName: indexName, - }) - case strings.HasPrefix(part, "fkey:"): - target := strings.TrimPrefix(part, "fkey:") - constraints = append(constraints, FieldConstraint{ - Type: ConstraintForeign, - Field: fieldName, - Target: target, - }) - } - } - - return constraints -} diff --git a/persistence.go b/persistence.go index e1adf09..306f84b 100644 --- a/persistence.go +++ b/persistence.go @@ -9,7 +9,6 @@ import ( "sort" ) -// LoadFromJSON loads items from JSON using reflection func (bs *BaseStore[T]) LoadFromJSON(filename string) error { bs.mu.Lock() defer bs.mu.Unlock() @@ -68,7 +67,6 @@ func (bs *BaseStore[T]) LoadFromJSON(filename string) error { return nil } -// SaveToJSON saves items to JSON atomically with consistent ID ordering func (bs *BaseStore[T]) SaveToJSON(filename string) error { bs.mu.RLock() defer bs.mu.RUnlock() @@ -105,7 +103,6 @@ func (bs *BaseStore[T]) SaveToJSON(filename string) error { return nil } -// LoadData loads from JSON file or starts empty func (bs *BaseStore[T]) LoadData(dataPath string) error { if err := bs.LoadFromJSON(dataPath); err != nil { if os.IsNotExist(err) { @@ -116,13 +113,11 @@ func (bs *BaseStore[T]) LoadData(dataPath string) error { } fmt.Printf("Loaded %d items from %s\n", len(bs.items), dataPath) - bs.RebuildIndices() // Rebuild indices after loading + bs.RebuildIndices() return nil } -// SaveData saves to JSON file func (bs *BaseStore[T]) SaveData(dataPath string) error { - // Ensure directory exists dataDir := filepath.Dir(dataPath) if err := os.MkdirAll(dataDir, 0755); err != nil { return fmt.Errorf("failed to create data directory: %w", err) diff --git a/schema.go b/schema.go new file mode 100644 index 0000000..097f54f --- /dev/null +++ b/schema.go @@ -0,0 +1,199 @@ +package nigiri + +import ( + "reflect" + "strings" +) + +type ConstraintType string + +const ( + ConstraintUnique ConstraintType = "unique" + ConstraintForeign ConstraintType = "fkey" + ConstraintRequired ConstraintType = "required" + ConstraintIndex ConstraintType = "index" + ConstraintOneToOne ConstraintType = "one_to_one" + ConstraintOneToMany ConstraintType = "one_to_many" + ConstraintManyToOne ConstraintType = "many_to_one" + ConstraintManyToMany ConstraintType = "many_to_many" +) + +type RelationshipType string + +const ( + RelationshipOneToOne RelationshipType = "one_to_one" + RelationshipOneToMany RelationshipType = "one_to_many" + RelationshipManyToOne RelationshipType = "many_to_one" + RelationshipManyToMany RelationshipType = "many_to_many" +) + +type FieldConstraint struct { + Type ConstraintType + Field string + Target string + IndexName string + Relationship RelationshipType + TargetType reflect.Type +} + +type SchemaInfo struct { + Fields map[string]reflect.Type + Constraints map[string][]FieldConstraint + Indices map[string]string + Relationships map[string]FieldConstraint +} + +func ParseSchema[T any]() *SchemaInfo { + var zero T + t := reflect.TypeOf(zero) + if t.Kind() == reflect.Ptr { + t = t.Elem() + } + + schema := &SchemaInfo{ + Fields: make(map[string]reflect.Type), + Constraints: make(map[string][]FieldConstraint), + Indices: make(map[string]string), + Relationships: make(map[string]FieldConstraint), + } + + for i := 0; i < t.NumField(); i++ { + field := t.Field(i) + fieldName := field.Name + fieldType := field.Type + schema.Fields[fieldName] = fieldType + + // Check for relationship patterns in field type + if relationship := detectRelationship(fieldName, fieldType); relationship != nil { + schema.Relationships[fieldName] = *relationship + schema.Constraints[fieldName] = append(schema.Constraints[fieldName], *relationship) + } + + // Parse explicit db tags + dbTag := field.Tag.Get("db") + if dbTag != "" { + constraints := parseDBTag(fieldName, dbTag) + if len(constraints) > 0 { + schema.Constraints[fieldName] = append(schema.Constraints[fieldName], constraints...) + } + } + + // Auto-create indices for unique and indexed fields + for _, constraint := range schema.Constraints[fieldName] { + if constraint.Type == ConstraintUnique || constraint.Type == ConstraintIndex { + indexName := constraint.IndexName + if indexName == "" { + indexName = fieldName + "_idx" + } + schema.Indices[fieldName] = indexName + } + } + } + + return schema +} + +func detectRelationship(fieldName string, fieldType reflect.Type) *FieldConstraint { + switch fieldType.Kind() { + case reflect.Ptr: + // *EntityType = many-to-one + elemType := fieldType.Elem() + if isEntityType(elemType) { + return &FieldConstraint{ + Type: ConstraintManyToOne, + Field: fieldName, + Relationship: RelationshipManyToOne, + TargetType: elemType, + Target: getEntityName(elemType), + } + } + + case reflect.Slice: + // []*EntityType = one-to-many + elemType := fieldType.Elem() + if elemType.Kind() == reflect.Ptr { + ptrTargetType := elemType.Elem() + if isEntityType(ptrTargetType) { + return &FieldConstraint{ + Type: ConstraintOneToMany, + Field: fieldName, + Relationship: RelationshipOneToMany, + TargetType: ptrTargetType, + Target: getEntityName(ptrTargetType), + } + } + } + } + + return nil +} + +func isEntityType(t reflect.Type) bool { + if t.Kind() != reflect.Struct { + return false + } + + // Check if it has an ID field + for i := 0; i < t.NumField(); i++ { + field := t.Field(i) + if field.Name == "ID" && field.Type.Kind() == reflect.Int { + return true + } + } + + return false +} + +func getEntityName(t reflect.Type) string { + name := t.Name() + if name == "" { + name = t.String() + } + return strings.ToLower(name) +} + +func parseDBTag(fieldName, tag string) []FieldConstraint { + var constraints []FieldConstraint + parts := strings.Split(tag, ",") + + for _, part := range parts { + part = strings.TrimSpace(part) + if part == "" { + continue + } + + switch { + case part == "unique": + constraints = append(constraints, FieldConstraint{ + Type: ConstraintUnique, + Field: fieldName, + }) + case part == "required": + constraints = append(constraints, FieldConstraint{ + Type: ConstraintRequired, + Field: fieldName, + }) + case part == "index": + constraints = append(constraints, FieldConstraint{ + Type: ConstraintIndex, + Field: fieldName, + }) + case strings.HasPrefix(part, "index:"): + indexName := strings.TrimPrefix(part, "index:") + constraints = append(constraints, FieldConstraint{ + Type: ConstraintIndex, + Field: fieldName, + IndexName: indexName, + }) + case strings.HasPrefix(part, "fkey:"): + target := strings.TrimPrefix(part, "fkey:") + constraints = append(constraints, FieldConstraint{ + Type: ConstraintForeign, + Field: fieldName, + Target: target, + }) + } + } + + return constraints +} diff --git a/store.go b/store.go index dab3e8f..d808432 100644 --- a/store.go +++ b/store.go @@ -9,15 +9,12 @@ import ( "sync" ) -// Validatable interface for entities that can validate themselves type Validatable interface { Validate() error } -// IndexBuilder function type for building custom indices type IndexBuilder[T any] func(allItems map[int]*T) any -// BaseStore provides generic storage with index management and constraints type BaseStore[T any] struct { items map[int]*T maxID int @@ -26,10 +23,10 @@ type BaseStore[T any] struct { indices map[string]any indexBuilders map[string]IndexBuilder[T] schema *SchemaInfo - uniqueIndices map[string]map[any]int // field -> value -> id + uniqueIndices map[string]map[any]int + validator any } -// NewBaseStore creates a new base store for type T with schema parsing func NewBaseStore[T any]() *BaseStore[T] { var zero T schema := ParseSchema[T]() @@ -44,7 +41,6 @@ func NewBaseStore[T any]() *BaseStore[T] { uniqueIndices: make(map[string]map[any]int), } - // Initialize unique indices for fieldName, constraints := range schema.Constraints { for _, constraint := range constraints { if constraint.Type == ConstraintUnique { @@ -53,9 +49,7 @@ func NewBaseStore[T any]() *BaseStore[T] { } } - // Auto-register indices for indexed fields store.registerSchemaIndices() - return store } @@ -65,7 +59,19 @@ func (bs *BaseStore[T]) registerSchemaIndices() { } } -// ValidateConstraints checks all constraints for an item +func (bs *BaseStore[T]) SetValidator(validator any) { + bs.mu.Lock() + defer bs.mu.Unlock() + bs.validator = validator +} + +func (bs *BaseStore[T]) EntityExists(id int) bool { + bs.mu.RLock() + defer bs.mu.RUnlock() + _, exists := bs.items[id] + return exists +} + func (bs *BaseStore[T]) ValidateConstraints(id int, item *T) error { itemValue := reflect.ValueOf(item).Elem() @@ -89,7 +95,61 @@ func (bs *BaseStore[T]) ValidateConstraints(id int, item *T) error { } } } + return nil +} +func (bs *BaseStore[T]) validateRelationships(item *T) error { + if bs.validator == nil { + return nil + } + + validator, ok := bs.validator.(interface { + EntityExists(entityName string, id int) bool + }) + if !ok { + return nil + } + + itemValue := reflect.ValueOf(item).Elem() + for fieldName, constraint := range bs.schema.Relationships { + fieldValue := itemValue.FieldByName(fieldName) + if !fieldValue.IsValid() { + continue + } + + switch constraint.Relationship { + case RelationshipManyToOne: + if !fieldValue.IsNil() { + targetItem := fieldValue.Elem() + idField := targetItem.FieldByName("ID") + if idField.IsValid() { + targetID := int(idField.Int()) + if !validator.EntityExists(constraint.Target, targetID) { + return fmt.Errorf("foreign key violation: %s references non-existent %s.%d", + fieldName, constraint.Target, targetID) + } + } + } + + case RelationshipOneToMany: + if !fieldValue.IsNil() { + for i := 0; i < fieldValue.Len(); i++ { + elem := fieldValue.Index(i) + if !elem.IsNil() { + targetItem := elem.Elem() + idField := targetItem.FieldByName("ID") + if idField.IsValid() { + targetID := int(idField.Int()) + if !validator.EntityExists(constraint.Target, targetID) { + return fmt.Errorf("foreign key violation: %s[%d] references non-existent %s.%d", + fieldName, i, constraint.Target, targetID) + } + } + } + } + } + } + } return nil } @@ -115,14 +175,12 @@ func (bs *BaseStore[T]) updateUniqueIndices(id int, item *T, add bool) { } } -// RegisterIndex registers an index builder function func (bs *BaseStore[T]) RegisterIndex(name string, builder IndexBuilder[T]) { bs.mu.Lock() defer bs.mu.Unlock() bs.indexBuilders[name] = builder } -// GetIndex retrieves a named index func (bs *BaseStore[T]) GetIndex(name string) (any, bool) { bs.mu.RLock() defer bs.mu.RUnlock() @@ -130,7 +188,6 @@ func (bs *BaseStore[T]) GetIndex(name string) (any, bool) { return index, exists } -// RebuildIndices rebuilds all registered indices func (bs *BaseStore[T]) RebuildIndices() { bs.mu.Lock() defer bs.mu.Unlock() @@ -146,69 +203,71 @@ func (bs *BaseStore[T]) rebuildIndicesUnsafe() { } } -// AddWithValidation adds item with constraint validation and index rebuild -func (bs *BaseStore[T]) AddWithValidation(id int, item *T) error { +// Main CRUD operations with validation +func (bs *BaseStore[T]) Add(id int, item *T) error { bs.mu.Lock() defer bs.mu.Unlock() - // Validate constraints if err := bs.ValidateConstraints(id, item); err != nil { return err } - // Custom validation + if err := bs.validateRelationships(item); err != nil { + return err + } + if validatable, ok := any(item).(Validatable); ok { if err := validatable.Validate(); err != nil { return err } } - // Update unique indices bs.updateUniqueIndices(id, item, true) - bs.items[id] = item if id > bs.maxID { bs.maxID = id } - bs.rebuildIndicesUnsafe() return nil } -// AddWithRebuild adds item with validation and index rebuild -func (bs *BaseStore[T]) AddWithRebuild(id int, item *T) error { - return bs.AddWithValidation(id, item) +func (bs *BaseStore[T]) Update(id int, item *T) error { + return bs.Add(id, item) } -// RemoveWithValidation removes item and updates constraints -func (bs *BaseStore[T]) RemoveWithValidation(id int) { +func (bs *BaseStore[T]) Remove(id int) { bs.mu.Lock() defer bs.mu.Unlock() if item, exists := bs.items[id]; exists { bs.updateUniqueIndices(id, item, false) } - delete(bs.items, id) bs.rebuildIndicesUnsafe() } -// RemoveWithRebuild removes item and rebuilds indices -func (bs *BaseStore[T]) RemoveWithRebuild(id int) { - bs.RemoveWithValidation(id) +func (bs *BaseStore[T]) Create(item *T) (int, error) { + id := bs.GetNextID() + err := bs.Add(id, item) + return id, err } -// UpdateWithValidation updates item with validation and index rebuild -func (bs *BaseStore[T]) UpdateWithValidation(id int, item *T) error { - return bs.AddWithValidation(id, item) +// Unsafe operations for performance +func (bs *BaseStore[T]) AddUnsafe(id int, item *T) { + bs.mu.Lock() + defer bs.mu.Unlock() + bs.items[id] = item + if id > bs.maxID { + bs.maxID = id + } } -// UpdateWithRebuild updates item with validation and index rebuild -func (bs *BaseStore[T]) UpdateWithRebuild(id int, item *T) error { - return bs.AddWithValidation(id, item) +func (bs *BaseStore[T]) RemoveUnsafe(id int) { + bs.mu.Lock() + defer bs.mu.Unlock() + delete(bs.items, id) } -// Find retrieves an item by ID func (bs *BaseStore[T]) Find(id int) (*T, bool) { bs.mu.RLock() defer bs.mu.RUnlock() @@ -216,7 +275,18 @@ func (bs *BaseStore[T]) Find(id int) (*T, bool) { return item, exists } -// AllSorted returns all items using named sorted index +func (bs *BaseStore[T]) GetByID(id int) (*T, bool) { + return bs.Find(id) +} + +func (bs *BaseStore[T]) GetAll() map[int]*T { + bs.mu.RLock() + defer bs.mu.RUnlock() + result := make(map[int]*T, len(bs.items)) + maps.Copy(result, bs.items) + return result +} + func (bs *BaseStore[T]) AllSorted(indexName string) []*T { bs.mu.RLock() defer bs.mu.RUnlock() @@ -233,7 +303,6 @@ func (bs *BaseStore[T]) AllSorted(indexName string) []*T { } } - // Fallback: return all items by ID order ids := make([]int, 0, len(bs.items)) for id := range bs.items { ids = append(ids, id) @@ -247,7 +316,6 @@ func (bs *BaseStore[T]) AllSorted(indexName string) []*T { return result } -// LookupByIndex finds single item using string lookup index func (bs *BaseStore[T]) LookupByIndex(indexName, key string) (*T, bool) { bs.mu.RLock() defer bs.mu.RUnlock() @@ -264,7 +332,6 @@ func (bs *BaseStore[T]) LookupByIndex(indexName, key string) (*T, bool) { return nil, false } -// GroupByIndex returns items grouped by key func (bs *BaseStore[T]) GroupByIndex(indexName string, key any) []*T { bs.mu.RLock() defer bs.mu.RUnlock() @@ -300,7 +367,6 @@ func (bs *BaseStore[T]) GroupByIndex(indexName string, key any) []*T { return []*T{} } -// FilterByIndex returns items matching filter criteria func (bs *BaseStore[T]) FilterByIndex(indexName string, filterFunc func(*T) bool) []*T { bs.mu.RLock() defer bs.mu.RUnlock() @@ -329,7 +395,6 @@ func (bs *BaseStore[T]) FilterByIndex(indexName string, filterFunc func(*T) bool return result } -// GetNextID returns the next available ID atomically func (bs *BaseStore[T]) GetNextID() int { bs.mu.Lock() defer bs.mu.Unlock() @@ -337,45 +402,12 @@ func (bs *BaseStore[T]) GetNextID() int { return bs.maxID } -// GetByID retrieves an item by ID -func (bs *BaseStore[T]) GetByID(id int) (*T, bool) { - return bs.Find(id) -} - -// Add adds an item to the store (no validation) -func (bs *BaseStore[T]) Add(id int, item *T) { - bs.mu.Lock() - defer bs.mu.Unlock() - bs.items[id] = item - if id > bs.maxID { - bs.maxID = id - } -} - -// Remove removes an item from the store (no validation) -func (bs *BaseStore[T]) Remove(id int) { - bs.mu.Lock() - defer bs.mu.Unlock() - delete(bs.items, id) -} - -// GetAll returns all items -func (bs *BaseStore[T]) GetAll() map[int]*T { - bs.mu.RLock() - defer bs.mu.RUnlock() - result := make(map[int]*T, len(bs.items)) - maps.Copy(result, bs.items) - return result -} - -// Clear removes all items func (bs *BaseStore[T]) Clear() { bs.mu.Lock() defer bs.mu.Unlock() bs.items = make(map[int]*T) bs.maxID = 0 - // Clear unique indices for fieldName := range bs.uniqueIndices { bs.uniqueIndices[fieldName] = make(map[any]int) } @@ -384,8 +416,6 @@ func (bs *BaseStore[T]) Clear() { } // Index Builder Functions - -// BuildFieldLookupIndex creates index for any field by name func BuildFieldLookupIndex[T any](fieldName string) IndexBuilder[T] { return func(allItems map[int]*T) any { index := make(map[string]int) @@ -403,7 +433,6 @@ func BuildFieldLookupIndex[T any](fieldName string) IndexBuilder[T] { } } -// BuildStringLookupIndex creates string-to-ID mapping func BuildStringLookupIndex[T any](keyFunc func(*T) string) IndexBuilder[T] { return func(allItems map[int]*T) any { index := make(map[string]int) @@ -415,7 +444,6 @@ func BuildStringLookupIndex[T any](keyFunc func(*T) string) IndexBuilder[T] { } } -// BuildCaseInsensitiveLookupIndex creates lowercase string-to-ID mapping func BuildCaseInsensitiveLookupIndex[T any](keyFunc func(*T) string) IndexBuilder[T] { return func(allItems map[int]*T) any { index := make(map[string]int) @@ -427,7 +455,6 @@ func BuildCaseInsensitiveLookupIndex[T any](keyFunc func(*T) string) IndexBuilde } } -// BuildIntGroupIndex creates int-to-[]ID mapping func BuildIntGroupIndex[T any](keyFunc func(*T) int) IndexBuilder[T] { return func(allItems map[int]*T) any { index := make(map[int][]int) @@ -436,7 +463,6 @@ func BuildIntGroupIndex[T any](keyFunc func(*T) int) IndexBuilder[T] { index[key] = append(index[key], id) } - // Sort each group by ID for key := range index { sort.Ints(index[key]) } @@ -445,7 +471,6 @@ func BuildIntGroupIndex[T any](keyFunc func(*T) int) IndexBuilder[T] { } } -// BuildStringGroupIndex creates string-to-[]ID mapping func BuildStringGroupIndex[T any](keyFunc func(*T) string) IndexBuilder[T] { return func(allItems map[int]*T) any { index := make(map[string][]int) @@ -454,7 +479,6 @@ func BuildStringGroupIndex[T any](keyFunc func(*T) string) IndexBuilder[T] { index[key] = append(index[key], id) } - // Sort each group by ID for key := range index { sort.Ints(index[key]) } @@ -463,7 +487,6 @@ func BuildStringGroupIndex[T any](keyFunc func(*T) string) IndexBuilder[T] { } } -// BuildSortedListIndex creates sorted []ID list func BuildSortedListIndex[T any](sortFunc func(*T, *T) bool) IndexBuilder[T] { return func(allItems map[int]*T) any { ids := make([]int, 0, len(allItems)) @@ -479,7 +502,6 @@ func BuildSortedListIndex[T any](sortFunc func(*T, *T) bool) IndexBuilder[T] { } } -// BuildFilteredIntGroupIndex creates int-to-[]ID mapping for items passing filter func BuildFilteredIntGroupIndex[T any](filterFunc func(*T) bool, keyFunc func(*T) int) IndexBuilder[T] { return func(allItems map[int]*T) any { index := make(map[int][]int) @@ -490,7 +512,6 @@ func BuildFilteredIntGroupIndex[T any](filterFunc func(*T) bool, keyFunc func(*T } } - // Sort each group by ID for key := range index { sort.Ints(index[key]) } @@ -499,7 +520,6 @@ func BuildFilteredIntGroupIndex[T any](filterFunc func(*T) bool, keyFunc func(*T } } -// BuildFilteredStringGroupIndex creates string-to-[]ID mapping for items passing filter func BuildFilteredStringGroupIndex[T any](filterFunc func(*T) bool, keyFunc func(*T) string) IndexBuilder[T] { return func(allItems map[int]*T) any { index := make(map[string][]int) @@ -510,7 +530,6 @@ func BuildFilteredStringGroupIndex[T any](filterFunc func(*T) bool, keyFunc func } } - // Sort each group by ID for key := range index { sort.Ints(index[key]) } @@ -519,7 +538,6 @@ func BuildFilteredStringGroupIndex[T any](filterFunc func(*T) bool, keyFunc func } } -// NewSingleton creates singleton store pattern with sync.Once func NewSingleton[S any](initFunc func() *S) func() *S { var store *S var once sync.Once