move opcodes to common, remove cps, begin new packet parser
This commit is contained in:
parent
adde1e9b8e
commit
62149041fc
@ -1,6 +1,6 @@
|
||||
package opcodes
|
||||
|
||||
// Protocol opcodes for UDP packet headers
|
||||
// Protocol
|
||||
const (
|
||||
OpSessionRequest = 0x01 // Initial connection request from client
|
||||
OpSessionResponse = 0x02 // Server response to session request
|
||||
@ -17,7 +17,7 @@ const (
|
||||
OpOutOfSession = 0x1D // Packet received outside valid session
|
||||
)
|
||||
|
||||
// Login server application opcodes
|
||||
// Login server
|
||||
const (
|
||||
// Core login operations
|
||||
OpLoginRequestMsg = 0x2000 // Initial login request from client
|
||||
@ -75,7 +75,7 @@ const (
|
||||
OpUpdateInventoryMsg = 0x2071 // Character inventory update
|
||||
)
|
||||
|
||||
// Game server application opcodes
|
||||
// Game server
|
||||
const (
|
||||
// Server initialization and zone management
|
||||
OpESInitMsg = 0x0010 // Server initialization message
|
52
internal/common/types.go
Normal file
52
internal/common/types.go
Normal file
@ -0,0 +1,52 @@
|
||||
package common
|
||||
|
||||
type EQ2DataType int
|
||||
|
||||
const (
|
||||
TypeInt8 EQ2DataType = iota
|
||||
TypeInt16
|
||||
TypeInt32
|
||||
TypeInt64
|
||||
TypeSInt8
|
||||
TypeSInt16
|
||||
TypeSInt32
|
||||
TypeSInt64
|
||||
TypeChar
|
||||
TypeFloat
|
||||
TypeDouble
|
||||
TypeString8
|
||||
TypeString16
|
||||
TypeString32
|
||||
TypeColor
|
||||
TypeEquipment
|
||||
TypeItem
|
||||
TypeArray
|
||||
)
|
||||
|
||||
// Core EQ2 structures
|
||||
type EQ2Color struct {
|
||||
Red uint8
|
||||
Green uint8
|
||||
Blue uint8
|
||||
}
|
||||
|
||||
type EQ2EquipmentItem struct {
|
||||
Type uint16
|
||||
Color EQ2Color
|
||||
Highlight EQ2Color
|
||||
}
|
||||
|
||||
type EQ2String8 struct {
|
||||
Size uint8
|
||||
Data string
|
||||
}
|
||||
|
||||
type EQ2String16 struct {
|
||||
Size uint16
|
||||
Data string
|
||||
}
|
||||
|
||||
type EQ2String32 struct {
|
||||
Size uint32
|
||||
Data string
|
||||
}
|
@ -1,216 +0,0 @@
|
||||
package cps
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// Compiler compiles packet definitions into optimized structures
|
||||
type Compiler struct {
|
||||
packets map[string]*PacketDef
|
||||
}
|
||||
|
||||
// NewCompiler creates a compiler with parsed packet definitions
|
||||
func NewCompiler(packets map[string]*PacketDef) *Compiler {
|
||||
return &Compiler{packets: packets}
|
||||
}
|
||||
|
||||
// Compile compiles a packet structure for a specific client version
|
||||
func (c *Compiler) Compile(packetName string, clientVersion int) (*CompiledStruct, error) {
|
||||
packet, exists := c.packets[packetName]
|
||||
if !exists {
|
||||
return nil, fmt.Errorf("packet %s not found", packetName)
|
||||
}
|
||||
|
||||
// Find best matching version (highest <= clientVersion)
|
||||
var bestVersion int = -1
|
||||
for version := range packet.Versions {
|
||||
if version <= clientVersion && version > bestVersion {
|
||||
bestVersion = version
|
||||
}
|
||||
}
|
||||
|
||||
if bestVersion == -1 {
|
||||
return nil, fmt.Errorf("no compatible version found for %s (client version %d)", packetName, clientVersion)
|
||||
}
|
||||
|
||||
// Build cascaded field list
|
||||
fields, fieldOrder, err := c.buildCascadedFields(packet, bestVersion)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Resolve positions and create final structure
|
||||
compiledFields, totalSize := c.resolvePositions(fields, fieldOrder)
|
||||
|
||||
return &CompiledStruct{
|
||||
Name: packetName,
|
||||
Version: bestVersion,
|
||||
Fields: compiledFields,
|
||||
Size: totalSize,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// buildCascadedFields builds the final field list by cascading from v1 to target version
|
||||
func (c *Compiler) buildCascadedFields(packet *PacketDef, targetVersion int) (map[string]*FieldDef, []string, error) {
|
||||
result := make(map[string]*FieldDef)
|
||||
var allFieldOrder []string
|
||||
|
||||
// Get all versions up to target, sorted
|
||||
var versions []int
|
||||
for version := range packet.Versions {
|
||||
if version <= targetVersion {
|
||||
versions = append(versions, version)
|
||||
}
|
||||
}
|
||||
sort.Ints(versions)
|
||||
|
||||
// Apply each version in order
|
||||
for _, version := range versions {
|
||||
versionDef := packet.Versions[version]
|
||||
|
||||
// Add new fields in their original order
|
||||
for _, fieldName := range versionDef.FieldOrder {
|
||||
fieldDef := versionDef.Fields[fieldName]
|
||||
if fieldDef.Remove {
|
||||
delete(result, fieldName)
|
||||
// Remove from order list
|
||||
for i, name := range allFieldOrder {
|
||||
if name == fieldName {
|
||||
allFieldOrder = append(allFieldOrder[:i], allFieldOrder[i+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Create copy to avoid modifying original
|
||||
newField := *fieldDef
|
||||
|
||||
// If field exists, preserve position if not explicitly set
|
||||
if existing, exists := result[fieldName]; exists {
|
||||
if newField.Position == -1 && existing.Position >= 0 {
|
||||
newField.Position = existing.Position
|
||||
}
|
||||
if newField.After == "" && existing.After != "" {
|
||||
newField.After = existing.After
|
||||
}
|
||||
if newField.Before == "" && existing.Before != "" {
|
||||
newField.Before = existing.Before
|
||||
}
|
||||
} else {
|
||||
// New field - add to order
|
||||
allFieldOrder = append(allFieldOrder, fieldName)
|
||||
}
|
||||
result[fieldName] = &newField
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result, allFieldOrder, nil
|
||||
}
|
||||
|
||||
// resolvePositions resolves field positions and creates ordered field list
|
||||
func (c *Compiler) resolvePositions(fields map[string]*FieldDef, originalOrder []string) ([]*CompiledField, int) {
|
||||
var compiledFields []*CompiledField
|
||||
var fieldOrder []string
|
||||
|
||||
// Handle explicitly positioned fields first
|
||||
positioned := make(map[int]string)
|
||||
var maxPos int
|
||||
|
||||
for name, field := range fields {
|
||||
if field.Position >= 0 {
|
||||
positioned[field.Position] = name
|
||||
if field.Position > maxPos {
|
||||
maxPos = field.Position
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Build initial order from positioned fields
|
||||
for i := 0; i <= maxPos; i++ {
|
||||
if name, exists := positioned[i]; exists {
|
||||
fieldOrder = append(fieldOrder, name)
|
||||
}
|
||||
}
|
||||
|
||||
// Handle @after/@before positioning
|
||||
for _, name := range originalOrder {
|
||||
field := fields[name]
|
||||
if field.Position >= 0 {
|
||||
continue // Already handled
|
||||
}
|
||||
|
||||
if field.After != "" {
|
||||
// Insert after referenced field
|
||||
for i, existing := range fieldOrder {
|
||||
if existing == field.After {
|
||||
fieldOrder = append(fieldOrder[:i+1], append([]string{name}, fieldOrder[i+1:]...)...)
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if field.Before != "" {
|
||||
// Insert before referenced field
|
||||
for i, existing := range fieldOrder {
|
||||
if existing == field.Before {
|
||||
fieldOrder = append(fieldOrder[:i], append([]string{name}, fieldOrder[i:]...)...)
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Append in original order
|
||||
fieldOrder = append(fieldOrder, name)
|
||||
}
|
||||
}
|
||||
|
||||
// Create compiled fields with calculated offsets
|
||||
offset := 0
|
||||
dynamic := false
|
||||
|
||||
for _, name := range fieldOrder {
|
||||
field := fields[name]
|
||||
|
||||
compiledField := &CompiledField{
|
||||
Name: name,
|
||||
Type: field.Type,
|
||||
Size: field.Size,
|
||||
Offset: offset,
|
||||
Dynamic: field.Size == -1,
|
||||
}
|
||||
|
||||
compiledFields = append(compiledFields, compiledField)
|
||||
|
||||
// Calculate offset for next field
|
||||
if field.Size == -1 {
|
||||
dynamic = true
|
||||
} else if !dynamic {
|
||||
fieldSize := c.getTypeSize(field.Type)
|
||||
if field.Size > 0 {
|
||||
fieldSize *= field.Size
|
||||
}
|
||||
offset += fieldSize
|
||||
}
|
||||
}
|
||||
|
||||
totalSize := offset
|
||||
if dynamic {
|
||||
totalSize = -1
|
||||
}
|
||||
|
||||
return compiledFields, totalSize
|
||||
}
|
||||
|
||||
// getTypeSize returns the byte size of a data type
|
||||
func (c *Compiler) getTypeSize(t EQ2Type) int {
|
||||
switch t {
|
||||
case TypeInt8:
|
||||
return 1
|
||||
case TypeInt16:
|
||||
return 2
|
||||
case TypeInt32, TypeFloat32, TypeColor:
|
||||
return 4
|
||||
case TypeInt64:
|
||||
return 8
|
||||
default:
|
||||
return -1 // Variable size
|
||||
}
|
||||
}
|
@ -1,474 +0,0 @@
|
||||
package cps
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
const testCSS = `
|
||||
CreateCharacter {
|
||||
.v1 {
|
||||
account_id: int32;
|
||||
server_id: int32;
|
||||
name: string16;
|
||||
race: int8;
|
||||
gender: int8;
|
||||
deity: int8;
|
||||
class: int8;
|
||||
level: int8;
|
||||
starting_zone: int8;
|
||||
unknown1: int8[2];
|
||||
race_file: string16;
|
||||
skin_color: float[3];
|
||||
eye_color: float[3];
|
||||
hair_color1: float[3];
|
||||
hair_color2: float[3];
|
||||
hair_highlight: float[3];
|
||||
unknown2: int8[26];
|
||||
hair_file: string16;
|
||||
hair_type_color: float[3];
|
||||
hair_type_highlight_color: float[3];
|
||||
face_file: string16;
|
||||
hair_face_color: float[3];
|
||||
hair_face_highlight_color: float[3];
|
||||
chest_file: string16;
|
||||
shirt_color: float[3];
|
||||
unknown_chest_color: float[3];
|
||||
legs_file: string16;
|
||||
pants_color: float[3];
|
||||
unknown_legs_color: float[3];
|
||||
unknown9: float[3];
|
||||
eyes2: float[3];
|
||||
ears: float[3];
|
||||
eye_brows: float[3];
|
||||
cheeks: float[3];
|
||||
lips: float[3];
|
||||
chin: float[3];
|
||||
nose: float[3];
|
||||
body_size: float;
|
||||
body_age: float;
|
||||
}
|
||||
|
||||
.v373 {
|
||||
unknown0: int32 @0; /* Insert at beginning */
|
||||
}
|
||||
|
||||
.v562 {
|
||||
unknown0: int8; /* Override type from v373 */
|
||||
unknown1: int32 @1; /* New field after unknown0 */
|
||||
unknown3: int8 @3; /* Insert at position 3 */
|
||||
|
||||
/* Override color types */
|
||||
skin_color: color;
|
||||
skin_color2: color @after(skin_color);
|
||||
eye_color: color;
|
||||
hair_color1: color;
|
||||
hair_color2: color;
|
||||
hair_highlight: color;
|
||||
hair_type_color: color;
|
||||
hair_type_highlight_color: color;
|
||||
hair_face_color: color;
|
||||
hair_face_highlight_color: color;
|
||||
shirt_color: color;
|
||||
unknown_chest_color: color;
|
||||
pants_color: color;
|
||||
unknown_legs_color: color;
|
||||
unknown9: color;
|
||||
|
||||
/* Add wing support */
|
||||
wing_file: string16 @after(hair_face_highlight_color);
|
||||
wing_color1: color;
|
||||
wing_color2: color;
|
||||
|
||||
/* Add SOGA system */
|
||||
soga_version: int8;
|
||||
soga_race_file: string16;
|
||||
soga_skin_color: color;
|
||||
soga_eye_color: color;
|
||||
soga_hair_color1: color;
|
||||
soga_hair_color2: color;
|
||||
soga_hair_highlight: color;
|
||||
soga_unknown11: int8[26];
|
||||
soga_hair_file: string16;
|
||||
soga_hair_type_color: color;
|
||||
soga_hair_type_highlight_color: color;
|
||||
soga_face_file: string16;
|
||||
soga_hair_face_color: color;
|
||||
soga_hair_face_highlight_color: color;
|
||||
soga_wing_file: string16;
|
||||
soga_wing_color1: color;
|
||||
soga_wing_color2: color;
|
||||
soga_chest_file: string16;
|
||||
soga_shirt_color: color;
|
||||
soga_unknown_chest_color: color;
|
||||
soga_legs_file: string16;
|
||||
soga_pants_color: color;
|
||||
soga_unknown_legs_color: color;
|
||||
soga_unknown12: color;
|
||||
soga_eyes2: float[3];
|
||||
soga_ears: float[3];
|
||||
soga_eye_brows: float[3];
|
||||
soga_cheeks: float[3];
|
||||
soga_lips: float[3];
|
||||
soga_chin: float[3];
|
||||
soga_nose: float[3];
|
||||
soga_body_size: float;
|
||||
soga_body_age: float;
|
||||
}
|
||||
|
||||
.v57080 {
|
||||
unknown10: int16 @after(starting_zone);
|
||||
unknown_skin_color2: color @after(eye_color);
|
||||
/* Remove skin_color2 positioning */
|
||||
skin_color2: none;
|
||||
}
|
||||
}
|
||||
|
||||
BadLanguageFilter {
|
||||
.v1 {
|
||||
num_words: int16;
|
||||
words_array: string16[];
|
||||
}
|
||||
}`
|
||||
|
||||
func TestParser(t *testing.T) {
|
||||
parser := NewParser(testCSS)
|
||||
packets, err := parser.Parse()
|
||||
if err != nil {
|
||||
t.Fatalf("Parse failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify CreateCharacter packet exists
|
||||
createChar, exists := packets["CreateCharacter"]
|
||||
if !exists {
|
||||
t.Fatal("CreateCharacter packet not found")
|
||||
}
|
||||
|
||||
// Verify versions
|
||||
expectedVersions := []int{1, 373, 562, 57080}
|
||||
for _, version := range expectedVersions {
|
||||
if _, exists := createChar.Versions[version]; !exists {
|
||||
t.Errorf("Version %d not found", version)
|
||||
}
|
||||
}
|
||||
|
||||
// Verify v1 has expected fields
|
||||
v1 := createChar.Versions[1]
|
||||
expectedV1Fields := []string{"account_id", "name", "race", "skin_color"}
|
||||
for _, fieldName := range expectedV1Fields {
|
||||
if _, exists := v1.Fields[fieldName]; !exists {
|
||||
t.Errorf("v1 missing field: %s", fieldName)
|
||||
}
|
||||
}
|
||||
|
||||
// Verify field types
|
||||
if v1.Fields["account_id"].Type != TypeInt32 {
|
||||
t.Error("account_id should be int32")
|
||||
}
|
||||
if v1.Fields["name"].Type != TypeString16 {
|
||||
t.Error("name should be string16")
|
||||
}
|
||||
if v1.Fields["skin_color"].Type != TypeFloat32 || v1.Fields["skin_color"].Size != 3 {
|
||||
t.Error("skin_color should be float[3]")
|
||||
}
|
||||
|
||||
// Verify v373 override
|
||||
v373 := createChar.Versions[373]
|
||||
if v373.Fields["unknown0"].Type != TypeInt32 || v373.Fields["unknown0"].Position != 0 {
|
||||
t.Error("v373 unknown0 should be int32 @0")
|
||||
}
|
||||
|
||||
// Verify v562 type override
|
||||
v562 := createChar.Versions[562]
|
||||
if v562.Fields["unknown0"].Type != TypeInt8 {
|
||||
t.Error("v562 should override unknown0 to int8")
|
||||
}
|
||||
if v562.Fields["skin_color"].Type != TypeColor {
|
||||
t.Error("v562 should override skin_color to color")
|
||||
}
|
||||
|
||||
// Verify positioning
|
||||
if v562.Fields["unknown1"].Position != 1 {
|
||||
t.Error("unknown1 should have position 1")
|
||||
}
|
||||
if v562.Fields["skin_color2"].After != "skin_color" {
|
||||
t.Error("skin_color2 should be after skin_color")
|
||||
}
|
||||
|
||||
// Verify removal
|
||||
v57080 := createChar.Versions[57080]
|
||||
if !v57080.Fields["skin_color2"].Remove {
|
||||
t.Error("skin_color2 should be removed in v57080")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompiler(t *testing.T) {
|
||||
manager := MustLoadCSS(testCSS)
|
||||
|
||||
// Test v1 compilation
|
||||
packet, err := manager.GetPacket("CreateCharacter", 1)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to compile v1: %v", err)
|
||||
}
|
||||
|
||||
if packet.Name != "CreateCharacter" || packet.Version != 1 {
|
||||
t.Error("Incorrect packet name or version")
|
||||
}
|
||||
|
||||
// Verify field order and types
|
||||
if len(packet.Fields) == 0 {
|
||||
t.Fatal("No fields in compiled packet")
|
||||
}
|
||||
|
||||
// First field should be account_id
|
||||
if packet.Fields[0].Name != "account_id" || packet.Fields[0].Type != TypeInt32 {
|
||||
t.Error("First field should be account_id:int32")
|
||||
}
|
||||
|
||||
// Test v373 compilation (should inherit v1 + unknown0 at beginning)
|
||||
packet373, err := manager.GetPacket("CreateCharacter", 373)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to compile v373: %v", err)
|
||||
}
|
||||
|
||||
// Should have more fields than v1
|
||||
if len(packet373.Fields) <= len(packet.Fields) {
|
||||
t.Error("v373 should have more fields than v1")
|
||||
}
|
||||
|
||||
// First field should be unknown0
|
||||
if packet373.Fields[0].Name != "unknown0" || packet373.Fields[0].Type != TypeInt32 {
|
||||
t.Error("v373 first field should be unknown0:int32")
|
||||
}
|
||||
|
||||
// Test v562 compilation (type overrides)
|
||||
packet562, err := manager.GetPacket("CreateCharacter", 562)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to compile v562: %v", err)
|
||||
}
|
||||
|
||||
// Find skin_color field and verify it's now color type
|
||||
var skinColorField *CompiledField
|
||||
for _, field := range packet562.Fields {
|
||||
if field.Name == "skin_color" {
|
||||
skinColorField = field
|
||||
break
|
||||
}
|
||||
}
|
||||
if skinColorField == nil || skinColorField.Type != TypeColor {
|
||||
t.Error("v562 skin_color should be color type")
|
||||
}
|
||||
|
||||
// Verify unknown0 type override
|
||||
if packet562.Fields[0].Type != TypeInt8 {
|
||||
t.Error("v562 unknown0 should be int8 (overridden from int32)")
|
||||
}
|
||||
|
||||
// Test field removal in v57080
|
||||
packet57080, err := manager.GetPacket("CreateCharacter", 57080)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to compile v57080: %v", err)
|
||||
}
|
||||
|
||||
// skin_color2 should not exist
|
||||
for _, field := range packet57080.Fields {
|
||||
if field.Name == "skin_color2" {
|
||||
t.Error("skin_color2 should be removed in v57080")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestVersionSelection(t *testing.T) {
|
||||
manager := MustLoadCSS(testCSS)
|
||||
|
||||
// Test exact version match
|
||||
packet, err := manager.GetPacket("CreateCharacter", 562)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get v562: %v", err)
|
||||
}
|
||||
if packet.Version != 562 {
|
||||
t.Error("Should get exact version match")
|
||||
}
|
||||
|
||||
// Test version fallback (should get highest <= requested)
|
||||
packet, err = manager.GetPacket("CreateCharacter", 500)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get v500 fallback: %v", err)
|
||||
}
|
||||
if packet.Version != 373 {
|
||||
t.Error("Should fallback to v373 for client v500")
|
||||
}
|
||||
|
||||
// Test future version (should get highest available)
|
||||
packet, err = manager.GetPacket("CreateCharacter", 99999)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get future version: %v", err)
|
||||
}
|
||||
if packet.Version != 57080 {
|
||||
t.Error("Should get highest version for future client")
|
||||
}
|
||||
|
||||
// Test too old client
|
||||
_, err = manager.GetPacket("CreateCharacter", 0)
|
||||
if err == nil {
|
||||
t.Error("Should fail for client version 0")
|
||||
}
|
||||
}
|
||||
|
||||
func TestSerialization(t *testing.T) {
|
||||
manager := MustLoadCSS(testCSS)
|
||||
serializer := NewSerializer(manager)
|
||||
|
||||
// Test v1 serialization
|
||||
data := map[string]any{
|
||||
"account_id": int32(12345),
|
||||
"server_id": int32(1),
|
||||
"name": "TestChar",
|
||||
"race": int8(1),
|
||||
"gender": int8(1),
|
||||
"deity": int8(0),
|
||||
"class": int8(1),
|
||||
"level": int8(1),
|
||||
"skin_color": []float32{1.0, 0.8, 0.6},
|
||||
}
|
||||
|
||||
bytes, err := serializer.Serialize("CreateCharacter", 1, data)
|
||||
if err != nil {
|
||||
t.Fatalf("Serialization failed: %v", err)
|
||||
}
|
||||
|
||||
if len(bytes) == 0 {
|
||||
t.Fatal("Serialized data is empty")
|
||||
}
|
||||
|
||||
// Test deserialization
|
||||
parsed, err := serializer.Deserialize("CreateCharacter", 1, bytes)
|
||||
if err != nil {
|
||||
t.Fatalf("Deserialization failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify key fields
|
||||
if parsed["account_id"].(int32) != 12345 {
|
||||
t.Error("account_id not preserved")
|
||||
}
|
||||
if parsed["name"].(string) != "TestChar" {
|
||||
t.Error("name not preserved")
|
||||
}
|
||||
if parsed["race"].(int8) != 1 {
|
||||
t.Error("race not preserved")
|
||||
}
|
||||
|
||||
// Verify float array
|
||||
skinColor := parsed["skin_color"].([]float32)
|
||||
if len(skinColor) != 3 || skinColor[0] != 1.0 {
|
||||
t.Error("skin_color array not preserved")
|
||||
}
|
||||
}
|
||||
|
||||
func TestColorSerialization(t *testing.T) {
|
||||
manager := MustLoadCSS(testCSS)
|
||||
serializer := NewSerializer(manager)
|
||||
|
||||
// Test v562 with color types
|
||||
data := map[string]any{
|
||||
"account_id": int32(12345),
|
||||
"name": "TestChar",
|
||||
"skin_color": uint32(0xFF0000FF), // Red color
|
||||
"eye_color": uint32(0x0000FFFF), // Blue color
|
||||
"wing_color1": uint32(0x00FF00FF), // Green color
|
||||
}
|
||||
|
||||
bytes, err := serializer.Serialize("CreateCharacter", 562, data)
|
||||
if err != nil {
|
||||
t.Fatalf("v562 serialization failed: %v", err)
|
||||
}
|
||||
|
||||
parsed, err := serializer.Deserialize("CreateCharacter", 562, bytes)
|
||||
if err != nil {
|
||||
t.Fatalf("v562 deserialization failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify colors preserved
|
||||
if parsed["skin_color"].(uint32) != 0xFF0000FF {
|
||||
t.Error("skin_color not preserved")
|
||||
}
|
||||
if parsed["eye_color"].(uint32) != 0x0000FFFF {
|
||||
t.Error("eye_color not preserved")
|
||||
}
|
||||
}
|
||||
|
||||
func TestBadLanguageFilter(t *testing.T) {
|
||||
manager := MustLoadCSS(testCSS)
|
||||
|
||||
packet, err := manager.GetPacket("BadLanguageFilter", 1)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get BadLanguageFilter: %v", err)
|
||||
}
|
||||
|
||||
if len(packet.Fields) != 2 {
|
||||
t.Error("BadLanguageFilter should have 2 fields")
|
||||
}
|
||||
|
||||
// Verify dynamic array
|
||||
wordsField := packet.Fields[1]
|
||||
if wordsField.Name != "words_array" || !wordsField.Dynamic {
|
||||
t.Error("words_array should be dynamic")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCaching(t *testing.T) {
|
||||
manager := MustLoadCSS(testCSS)
|
||||
|
||||
// Get same packet twice
|
||||
packet1, err := manager.GetPacket("CreateCharacter", 562)
|
||||
if err != nil {
|
||||
t.Fatalf("First get failed: %v", err)
|
||||
}
|
||||
|
||||
packet2, err := manager.GetPacket("CreateCharacter", 562)
|
||||
if err != nil {
|
||||
t.Fatalf("Second get failed: %v", err)
|
||||
}
|
||||
|
||||
// Should be same pointer (cached)
|
||||
if packet1 != packet2 {
|
||||
t.Error("Packets should be cached and identical")
|
||||
}
|
||||
|
||||
// Clear cache and verify new instance
|
||||
manager.ClearCache()
|
||||
packet3, err := manager.GetPacket("CreateCharacter", 562)
|
||||
if err != nil {
|
||||
t.Fatalf("Third get failed: %v", err)
|
||||
}
|
||||
|
||||
if packet1 == packet3 {
|
||||
t.Error("After cache clear, should get new instance")
|
||||
}
|
||||
}
|
||||
|
||||
func TestErrorCases(t *testing.T) {
|
||||
// Test invalid CSS
|
||||
invalidCSS := `CreateCharacter { .v1 { invalid syntax } }`
|
||||
manager := NewManager()
|
||||
err := manager.LoadCSS(invalidCSS)
|
||||
if err == nil {
|
||||
t.Error("Should fail on invalid CSS")
|
||||
}
|
||||
|
||||
// Test missing packet
|
||||
manager = MustLoadCSS(testCSS)
|
||||
_, err = manager.GetPacket("NonExistent", 1)
|
||||
if err == nil {
|
||||
t.Error("Should fail on missing packet")
|
||||
}
|
||||
|
||||
// Test serialization with wrong data types
|
||||
serializer := NewSerializer(manager)
|
||||
badData := map[string]any{
|
||||
"account_id": "not_a_number", // Wrong type
|
||||
}
|
||||
_, err = serializer.Serialize("CreateCharacter", 1, badData)
|
||||
if err == nil {
|
||||
t.Error("Should fail on wrong data type")
|
||||
}
|
||||
}
|
@ -1,105 +0,0 @@
|
||||
package cps
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Manager provides runtime packet management with caching
|
||||
type Manager struct {
|
||||
compiler *Compiler
|
||||
cache sync.Map // map[string]*CompiledStruct
|
||||
}
|
||||
|
||||
// NewManager creates a packet manager
|
||||
func NewManager() *Manager {
|
||||
return &Manager{}
|
||||
}
|
||||
|
||||
// LoadCSS loads and parses CSS packet definitions
|
||||
func (m *Manager) LoadCSS(content string) error {
|
||||
parser := NewParser(content)
|
||||
packets, err := parser.Parse()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
m.compiler = NewCompiler(packets)
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetPacket returns a compiled packet structure (cached)
|
||||
func (m *Manager) GetPacket(name string, version int) (*CompiledStruct, error) {
|
||||
key := fmt.Sprintf("%s_%d", name, version)
|
||||
|
||||
if cached, ok := m.cache.Load(key); ok {
|
||||
return cached.(*CompiledStruct), nil
|
||||
}
|
||||
|
||||
compiled, err := m.compiler.Compile(name, version)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
m.cache.Store(key, compiled)
|
||||
return compiled, nil
|
||||
}
|
||||
|
||||
// ListPackets returns names of all available packets
|
||||
func (m *Manager) ListPackets() []string {
|
||||
if m.compiler == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
var names []string
|
||||
for name := range m.compiler.packets {
|
||||
names = append(names, name)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
// GetVersions returns all available versions for a packet
|
||||
func (m *Manager) GetVersions(packetName string) []int {
|
||||
if m.compiler == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
packet, exists := m.compiler.packets[packetName]
|
||||
if !exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
var versions []int
|
||||
for version := range packet.Versions {
|
||||
versions = append(versions, version)
|
||||
}
|
||||
return versions
|
||||
}
|
||||
|
||||
// ClearCache clears the compiled packet cache
|
||||
func (m *Manager) ClearCache() {
|
||||
m.cache = sync.Map{}
|
||||
}
|
||||
|
||||
// Convenience functions
|
||||
|
||||
// MustLoadCSS loads CSS definitions and panics on error
|
||||
func MustLoadCSS(content string) *Manager {
|
||||
manager := NewManager()
|
||||
if err := manager.LoadCSS(content); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return manager
|
||||
}
|
||||
|
||||
// QuickSerialize is a convenience function for one-off serialization
|
||||
func QuickSerialize(manager *Manager, packetName string, version int, data map[string]any) ([]byte, error) {
|
||||
serializer := NewSerializer(manager)
|
||||
return serializer.Serialize(packetName, version, data)
|
||||
}
|
||||
|
||||
// QuickDeserialize is a convenience function for one-off deserialization
|
||||
func QuickDeserialize(manager *Manager, packetName string, version int, data []byte) (map[string]any, error) {
|
||||
serializer := NewSerializer(manager)
|
||||
return serializer.Deserialize(packetName, version, data)
|
||||
}
|
@ -1,320 +0,0 @@
|
||||
package cps
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Parser handles CSS-like packet definition parsing
|
||||
type Parser struct {
|
||||
content string
|
||||
}
|
||||
|
||||
// NewParser creates a parser for CSS-like packet definitions
|
||||
func NewParser(content string) *Parser {
|
||||
return &Parser{content: content}
|
||||
}
|
||||
|
||||
// Parse parses the CSS content and returns packet definitions
|
||||
func (p *Parser) Parse() (map[string]*PacketDef, error) {
|
||||
packets := make(map[string]*PacketDef)
|
||||
|
||||
// Remove comments and normalize
|
||||
cleaned := p.removeComments(p.content)
|
||||
|
||||
// Parse packets using lexer approach
|
||||
i := 0
|
||||
for i < len(cleaned) {
|
||||
// Skip whitespace
|
||||
for i < len(cleaned) && (cleaned[i] == ' ' || cleaned[i] == '\t' || cleaned[i] == '\n' || cleaned[i] == '\r') {
|
||||
i++
|
||||
}
|
||||
if i >= len(cleaned) {
|
||||
break
|
||||
}
|
||||
|
||||
// Look for packet name (word followed by {)
|
||||
nameStart := i
|
||||
for i < len(cleaned) && ((cleaned[i] >= 'A' && cleaned[i] <= 'Z') || (cleaned[i] >= 'a' && cleaned[i] <= 'z') || (cleaned[i] >= '0' && cleaned[i] <= '9') || cleaned[i] == '_') {
|
||||
i++
|
||||
}
|
||||
if i == nameStart {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
packetName := cleaned[nameStart:i]
|
||||
|
||||
// Skip whitespace to find opening brace
|
||||
for i < len(cleaned) && (cleaned[i] == ' ' || cleaned[i] == '\t' || cleaned[i] == '\n' || cleaned[i] == '\r') {
|
||||
i++
|
||||
}
|
||||
if i >= len(cleaned) || cleaned[i] != '{' {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
// Find matching closing brace
|
||||
braceCount := 1
|
||||
contentStart := i + 1
|
||||
i++
|
||||
|
||||
for i < len(cleaned) && braceCount > 0 {
|
||||
if cleaned[i] == '{' {
|
||||
braceCount++
|
||||
} else if cleaned[i] == '}' {
|
||||
braceCount--
|
||||
}
|
||||
if braceCount > 0 {
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
if braceCount != 0 {
|
||||
return nil, fmt.Errorf("unmatched brace for packet %s", packetName)
|
||||
}
|
||||
|
||||
// Extract packet body
|
||||
packetBody := cleaned[contentStart:i]
|
||||
|
||||
packet, err := p.parsePacket(packetName, packetBody)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parsing packet %s: %w", packetName, err)
|
||||
}
|
||||
|
||||
packets[packetName] = packet
|
||||
i++ // Skip closing brace
|
||||
}
|
||||
|
||||
return packets, nil
|
||||
}
|
||||
|
||||
// removeComments strips CSS-style comments
|
||||
func (p *Parser) removeComments(content string) string {
|
||||
commentRegex := regexp.MustCompile(`/\*.*?\*/`)
|
||||
return commentRegex.ReplaceAllString(content, "")
|
||||
}
|
||||
|
||||
// parsePacket parses a single packet definition
|
||||
func (p *Parser) parsePacket(name, body string) (*PacketDef, error) {
|
||||
packet := &PacketDef{
|
||||
Name: name,
|
||||
Versions: make(map[int]*VersionDef),
|
||||
}
|
||||
|
||||
i := 0
|
||||
for i < len(body) {
|
||||
// Skip whitespace
|
||||
for i < len(body) && (body[i] == ' ' || body[i] == '\t' || body[i] == '\n' || body[i] == '\r') {
|
||||
i++
|
||||
}
|
||||
if i >= len(body) {
|
||||
break
|
||||
}
|
||||
|
||||
// Look for .v pattern
|
||||
if i+2 < len(body) && body[i] == '.' && body[i+1] == 'v' {
|
||||
i += 2
|
||||
|
||||
// Parse version number
|
||||
versionStart := i
|
||||
for i < len(body) && body[i] >= '0' && body[i] <= '9' {
|
||||
i++
|
||||
}
|
||||
if i == versionStart {
|
||||
return nil, fmt.Errorf("invalid version number after .v")
|
||||
}
|
||||
|
||||
versionNum, err := strconv.Atoi(body[versionStart:i])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid version number: %s", body[versionStart:i])
|
||||
}
|
||||
|
||||
// Skip whitespace to find opening brace
|
||||
for i < len(body) && (body[i] == ' ' || body[i] == '\t' || body[i] == '\n' || body[i] == '\r') {
|
||||
i++
|
||||
}
|
||||
if i >= len(body) || body[i] != '{' {
|
||||
return nil, fmt.Errorf("expected opening brace for version %d", versionNum)
|
||||
}
|
||||
|
||||
// Find matching closing brace
|
||||
braceCount := 1
|
||||
contentStart := i + 1
|
||||
i++
|
||||
|
||||
for i < len(body) && braceCount > 0 {
|
||||
if body[i] == '{' {
|
||||
braceCount++
|
||||
} else if body[i] == '}' {
|
||||
braceCount--
|
||||
}
|
||||
if braceCount > 0 {
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
if braceCount != 0 {
|
||||
return nil, fmt.Errorf("unmatched brace for version %d", versionNum)
|
||||
}
|
||||
|
||||
// Extract content between braces
|
||||
versionBody := body[contentStart:i]
|
||||
|
||||
fields, fieldOrder, err := p.parseFields(versionBody)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parsing version %d: %w", versionNum, err)
|
||||
}
|
||||
|
||||
packet.Versions[versionNum] = &VersionDef{
|
||||
Version: versionNum,
|
||||
Fields: fields,
|
||||
FieldOrder: fieldOrder,
|
||||
}
|
||||
|
||||
i++ // Skip closing brace
|
||||
} else {
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
return packet, nil
|
||||
}
|
||||
|
||||
// findMatchingBrace is no longer needed with lexer approach
|
||||
|
||||
// parseFields parses field definitions within a version block
|
||||
func (p *Parser) parseFields(fieldsBody string) (map[string]*FieldDef, []string, error) {
|
||||
fields := make(map[string]*FieldDef)
|
||||
var fieldOrder []string
|
||||
|
||||
// Split by semicolon and parse each field
|
||||
fieldLines := strings.Split(fieldsBody, ";")
|
||||
|
||||
for _, line := range fieldLines {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
field, err := p.parseField(line)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if field != nil {
|
||||
fields[field.Name] = field
|
||||
fieldOrder = append(fieldOrder, field.Name)
|
||||
}
|
||||
}
|
||||
|
||||
return fields, fieldOrder, nil
|
||||
}
|
||||
|
||||
// parseField parses a single field definition
|
||||
func (p *Parser) parseField(line string) (*FieldDef, error) {
|
||||
// Handle field_name: type @position syntax
|
||||
parts := strings.Split(line, ":")
|
||||
if len(parts) != 2 {
|
||||
return nil, fmt.Errorf("invalid field syntax: %s", line)
|
||||
}
|
||||
|
||||
name := strings.TrimSpace(parts[0])
|
||||
typePart := strings.TrimSpace(parts[1])
|
||||
|
||||
field := &FieldDef{
|
||||
Name: name,
|
||||
Position: -1,
|
||||
}
|
||||
|
||||
// Handle special "none" type for removal
|
||||
if typePart == "none" {
|
||||
field.Remove = true
|
||||
return field, nil
|
||||
}
|
||||
|
||||
// Parse position modifiers (@0, @after(field), etc.)
|
||||
if strings.Contains(typePart, "@") {
|
||||
parts := strings.Split(typePart, "@")
|
||||
typePart = strings.TrimSpace(parts[0])
|
||||
positionPart := strings.TrimSpace(parts[1])
|
||||
|
||||
if err := p.parsePosition(field, positionPart); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Parse type and array syntax
|
||||
if err := p.parseType(field, typePart); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return field, nil
|
||||
}
|
||||
|
||||
// parsePosition parses position modifiers like @0, @after(field)
|
||||
func (p *Parser) parsePosition(field *FieldDef, positionPart string) error {
|
||||
// Try to parse as numeric position
|
||||
if pos, err := strconv.Atoi(positionPart); err == nil {
|
||||
field.Position = pos
|
||||
return nil
|
||||
}
|
||||
|
||||
if strings.HasPrefix(positionPart, "after(") && strings.HasSuffix(positionPart, ")") {
|
||||
refField := positionPart[6 : len(positionPart)-1]
|
||||
field.After = refField
|
||||
return nil
|
||||
}
|
||||
|
||||
if strings.HasPrefix(positionPart, "before(") && strings.HasSuffix(positionPart, ")") {
|
||||
refField := positionPart[7 : len(positionPart)-1]
|
||||
field.Before = refField
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("invalid position syntax: %s", positionPart)
|
||||
}
|
||||
|
||||
// parseType parses type definitions including arrays
|
||||
func (p *Parser) parseType(field *FieldDef, typePart string) error {
|
||||
// Handle array syntax type[size] or type[]
|
||||
if strings.Contains(typePart, "[") {
|
||||
arrayRegex := regexp.MustCompile(`(\w+)\[(\d*)\]`)
|
||||
matches := arrayRegex.FindStringSubmatch(typePart)
|
||||
if len(matches) != 3 {
|
||||
return fmt.Errorf("invalid array syntax: %s", typePart)
|
||||
}
|
||||
|
||||
baseType := matches[1]
|
||||
sizeStr := matches[2]
|
||||
|
||||
eq2Type, exists := typeMap[baseType]
|
||||
if !exists {
|
||||
return fmt.Errorf("unknown type: %s", baseType)
|
||||
}
|
||||
|
||||
field.Type = eq2Type
|
||||
|
||||
if sizeStr == "" {
|
||||
field.Size = -1 // Dynamic array
|
||||
} else {
|
||||
size, err := strconv.Atoi(sizeStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid array size: %s", sizeStr)
|
||||
}
|
||||
field.Size = size
|
||||
}
|
||||
} else {
|
||||
// Scalar type
|
||||
eq2Type, exists := typeMap[typePart]
|
||||
if !exists {
|
||||
return fmt.Errorf("unknown type: %s", typePart)
|
||||
}
|
||||
field.Type = eq2Type
|
||||
field.Size = 0
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -1,285 +0,0 @@
|
||||
package cps
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
)
|
||||
|
||||
// Serializer handles packet serialization/deserialization
|
||||
type Serializer struct {
|
||||
manager *Manager
|
||||
}
|
||||
|
||||
// NewSerializer creates a packet serializer
|
||||
func NewSerializer(manager *Manager) *Serializer {
|
||||
return &Serializer{manager: manager}
|
||||
}
|
||||
|
||||
// Serialize converts packet data to bytes
|
||||
func (s *Serializer) Serialize(packetName string, version int, data map[string]any) ([]byte, error) {
|
||||
packet, err := s.manager.GetPacket(packetName, version)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var buf []byte
|
||||
|
||||
for _, field := range packet.Fields {
|
||||
value, exists := data[field.Name]
|
||||
if !exists {
|
||||
value = s.getZeroValue(field)
|
||||
}
|
||||
|
||||
fieldBytes, err := s.serializeField(field, value)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("serializing field %s: %w", field.Name, err)
|
||||
}
|
||||
|
||||
buf = append(buf, fieldBytes...)
|
||||
}
|
||||
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
// Deserialize parses bytes into packet data
|
||||
func (s *Serializer) Deserialize(packetName string, version int, data []byte) (map[string]any, error) {
|
||||
packet, err := s.manager.GetPacket(packetName, version)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make(map[string]any)
|
||||
offset := 0
|
||||
|
||||
for _, field := range packet.Fields {
|
||||
value, consumed, err := s.deserializeField(field, data[offset:])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("deserializing field %s: %w", field.Name, err)
|
||||
}
|
||||
|
||||
result[field.Name] = value
|
||||
offset += consumed
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// serializeField serializes a single field value
|
||||
func (s *Serializer) serializeField(field *CompiledField, value any) ([]byte, error) {
|
||||
switch field.Type {
|
||||
case TypeInt8:
|
||||
v, ok := value.(int8)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected int8, got %T", value)
|
||||
}
|
||||
return []byte{byte(v)}, nil
|
||||
|
||||
case TypeInt16:
|
||||
v, ok := value.(int16)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected int16, got %T", value)
|
||||
}
|
||||
buf := make([]byte, 2)
|
||||
binary.LittleEndian.PutUint16(buf, uint16(v))
|
||||
return buf, nil
|
||||
|
||||
case TypeInt32:
|
||||
v, ok := value.(int32)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected int32, got %T", value)
|
||||
}
|
||||
buf := make([]byte, 4)
|
||||
binary.LittleEndian.PutUint32(buf, uint32(v))
|
||||
return buf, nil
|
||||
|
||||
case TypeInt64:
|
||||
v, ok := value.(int64)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected int64, got %T", value)
|
||||
}
|
||||
buf := make([]byte, 8)
|
||||
binary.LittleEndian.PutUint64(buf, uint64(v))
|
||||
return buf, nil
|
||||
|
||||
case TypeString8:
|
||||
v, ok := value.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected string, got %T", value)
|
||||
}
|
||||
buf := make([]byte, 1+len(v))
|
||||
buf[0] = byte(len(v))
|
||||
copy(buf[1:], v)
|
||||
return buf, nil
|
||||
|
||||
case TypeString16:
|
||||
v, ok := value.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected string, got %T", value)
|
||||
}
|
||||
buf := make([]byte, 2+len(v))
|
||||
binary.LittleEndian.PutUint16(buf[0:2], uint16(len(v)))
|
||||
copy(buf[2:], v)
|
||||
return buf, nil
|
||||
|
||||
case TypeString32:
|
||||
v, ok := value.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected string, got %T", value)
|
||||
}
|
||||
buf := make([]byte, 4+len(v))
|
||||
binary.LittleEndian.PutUint32(buf[0:4], uint32(len(v)))
|
||||
copy(buf[4:], v)
|
||||
return buf, nil
|
||||
|
||||
case TypeColor:
|
||||
v, ok := value.(uint32)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected uint32, got %T", value)
|
||||
}
|
||||
buf := make([]byte, 4)
|
||||
binary.LittleEndian.PutUint32(buf, v)
|
||||
return buf, nil
|
||||
|
||||
case TypeFloat32:
|
||||
if field.Size > 0 {
|
||||
// Array of floats
|
||||
v, ok := value.([]float32)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected []float32, got %T", value)
|
||||
}
|
||||
buf := make([]byte, len(v)*4)
|
||||
for i, f := range v {
|
||||
binary.LittleEndian.PutUint32(buf[i*4:], math.Float32bits(f))
|
||||
}
|
||||
return buf, nil
|
||||
} else {
|
||||
// Single float
|
||||
v, ok := value.(float32)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected float32, got %T", value)
|
||||
}
|
||||
buf := make([]byte, 4)
|
||||
binary.LittleEndian.PutUint32(buf, math.Float32bits(v))
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported field type: %v", field.Type)
|
||||
}
|
||||
}
|
||||
|
||||
// deserializeField deserializes a single field value
|
||||
func (s *Serializer) deserializeField(field *CompiledField, data []byte) (any, int, error) {
|
||||
switch field.Type {
|
||||
case TypeInt8:
|
||||
if len(data) < 1 {
|
||||
return nil, 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
return int8(data[0]), 1, nil
|
||||
|
||||
case TypeInt16:
|
||||
if len(data) < 2 {
|
||||
return nil, 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
return int16(binary.LittleEndian.Uint16(data[0:2])), 2, nil
|
||||
|
||||
case TypeInt32:
|
||||
if len(data) < 4 {
|
||||
return nil, 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
return int32(binary.LittleEndian.Uint32(data[0:4])), 4, nil
|
||||
|
||||
case TypeInt64:
|
||||
if len(data) < 8 {
|
||||
return nil, 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
return int64(binary.LittleEndian.Uint64(data[0:8])), 8, nil
|
||||
|
||||
case TypeString8:
|
||||
if len(data) < 1 {
|
||||
return nil, 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
strlen := int(data[0])
|
||||
if len(data) < 1+strlen {
|
||||
return nil, 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
return string(data[1 : 1+strlen]), 1 + strlen, nil
|
||||
|
||||
case TypeString16:
|
||||
if len(data) < 2 {
|
||||
return nil, 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
strlen := int(binary.LittleEndian.Uint16(data[0:2]))
|
||||
if len(data) < 2+strlen {
|
||||
return nil, 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
return string(data[2 : 2+strlen]), 2 + strlen, nil
|
||||
|
||||
case TypeString32:
|
||||
if len(data) < 4 {
|
||||
return nil, 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
strlen := int(binary.LittleEndian.Uint32(data[0:4]))
|
||||
if len(data) < 4+strlen {
|
||||
return nil, 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
return string(data[4 : 4+strlen]), 4 + strlen, nil
|
||||
|
||||
case TypeColor:
|
||||
if len(data) < 4 {
|
||||
return nil, 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
return binary.LittleEndian.Uint32(data[0:4]), 4, nil
|
||||
|
||||
case TypeFloat32:
|
||||
if field.Size > 0 {
|
||||
// Array of floats
|
||||
if len(data) < field.Size*4 {
|
||||
return nil, 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
floats := make([]float32, field.Size)
|
||||
for i := 0; i < field.Size; i++ {
|
||||
bits := binary.LittleEndian.Uint32(data[i*4:])
|
||||
floats[i] = math.Float32frombits(bits)
|
||||
}
|
||||
return floats, field.Size * 4, nil
|
||||
} else {
|
||||
// Single float
|
||||
if len(data) < 4 {
|
||||
return nil, 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
bits := binary.LittleEndian.Uint32(data[0:4])
|
||||
return math.Float32frombits(bits), 4, nil
|
||||
}
|
||||
|
||||
default:
|
||||
return nil, 0, fmt.Errorf("unsupported field type: %v", field.Type)
|
||||
}
|
||||
}
|
||||
|
||||
// getZeroValue returns appropriate zero value for field type
|
||||
func (s *Serializer) getZeroValue(field *CompiledField) any {
|
||||
switch field.Type {
|
||||
case TypeInt8:
|
||||
return int8(0)
|
||||
case TypeInt16:
|
||||
return int16(0)
|
||||
case TypeInt32:
|
||||
return int32(0)
|
||||
case TypeInt64:
|
||||
return int64(0)
|
||||
case TypeString8, TypeString16, TypeString32:
|
||||
return ""
|
||||
case TypeColor:
|
||||
return uint32(0)
|
||||
case TypeFloat32:
|
||||
if field.Size > 0 {
|
||||
return make([]float32, field.Size)
|
||||
}
|
||||
return float32(0)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
@ -1,69 +0,0 @@
|
||||
package cps
|
||||
|
||||
// EQ2Type represents supported data types in EQ2 protocol
|
||||
type EQ2Type int
|
||||
|
||||
const (
|
||||
TypeInt8 EQ2Type = iota
|
||||
TypeInt16
|
||||
TypeInt32
|
||||
TypeInt64
|
||||
TypeFloat32
|
||||
TypeString8
|
||||
TypeString16
|
||||
TypeString32
|
||||
TypeColor
|
||||
)
|
||||
|
||||
var typeMap = map[string]EQ2Type{
|
||||
"int8": TypeInt8,
|
||||
"int16": TypeInt16,
|
||||
"int32": TypeInt32,
|
||||
"int64": TypeInt64,
|
||||
"float": TypeFloat32,
|
||||
"string8": TypeString8,
|
||||
"string16": TypeString16,
|
||||
"string32": TypeString32,
|
||||
"color": TypeColor,
|
||||
}
|
||||
|
||||
// FieldDef represents a field definition with position and type info
|
||||
type FieldDef struct {
|
||||
Name string
|
||||
Type EQ2Type
|
||||
Size int // Array size, 0 for scalar, -1 for dynamic
|
||||
Position int // Explicit position, -1 for append
|
||||
After string
|
||||
Before string
|
||||
Remove bool // true if field should be removed in this version
|
||||
}
|
||||
|
||||
// VersionDef holds field definitions for a specific version
|
||||
type VersionDef struct {
|
||||
Version int
|
||||
Fields map[string]*FieldDef
|
||||
FieldOrder []string // Preserve parsing order
|
||||
}
|
||||
|
||||
// PacketDef represents a complete packet with all versions
|
||||
type PacketDef struct {
|
||||
Name string
|
||||
Versions map[int]*VersionDef
|
||||
}
|
||||
|
||||
// CompiledField represents a field in the final compiled structure
|
||||
type CompiledField struct {
|
||||
Name string
|
||||
Type EQ2Type
|
||||
Size int
|
||||
Offset int // Byte offset for fixed-size packets
|
||||
Dynamic bool
|
||||
}
|
||||
|
||||
// CompiledStruct represents a compiled packet structure for a specific version
|
||||
type CompiledStruct struct {
|
||||
Name string
|
||||
Version int
|
||||
Fields []*CompiledField
|
||||
Size int // Total size if fixed, -1 if dynamic
|
||||
}
|
1102
internal/packets/parser/parser.go
Normal file
1102
internal/packets/parser/parser.go
Normal file
File diff suppressed because it is too large
Load Diff
@ -3,7 +3,7 @@ package udp
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/binary"
|
||||
"eq2emu/internal/opcodes"
|
||||
"eq2emu/internal/common/opcodes"
|
||||
"errors"
|
||||
"net"
|
||||
"sync"
|
||||
|
@ -3,7 +3,7 @@ package udp
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"eq2emu/internal/opcodes"
|
||||
"eq2emu/internal/common/opcodes"
|
||||
"errors"
|
||||
"fmt"
|
||||
)
|
||||
|
@ -2,7 +2,7 @@ package udp
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"eq2emu/internal/opcodes"
|
||||
"eq2emu/internal/common/opcodes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"sort"
|
||||
|
@ -1,7 +1,7 @@
|
||||
package udp
|
||||
|
||||
import (
|
||||
"eq2emu/internal/opcodes"
|
||||
"eq2emu/internal/common/opcodes"
|
||||
"fmt"
|
||||
"net"
|
||||
"sync"
|
||||
|
Loading…
x
Reference in New Issue
Block a user