restructure

This commit is contained in:
Robert Janetzko 2022-04-14 15:39:18 +00:00
parent 597f26b4dd
commit a9f3597edf
17 changed files with 13272 additions and 26103 deletions

2
analyze/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
*.json
analyze

View file

@ -1,22 +1,9 @@
package main
import (
"encoding/json"
"encoding/xml"
"flag"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"text/template"
"github.com/iancoleman/strcase"
"github.com/robertjanetzko/LegendsBrowser2/backend/util"
"github.com/robertjanetzko/LegendsBrowser2/analyze/df"
)
func main() {
@ -25,541 +12,10 @@ func main() {
flag.Parse()
if len(*a) > 0 {
Analyze(*a)
df.Analyze(*a)
}
if *g {
fmt.Println("Generating")
Generate()
df.Generate()
}
}
func Analyze(filex string) {
fmt.Println("Search...", filex)
files, err := filepath.Glob(filex + "/*.xml")
if err != nil {
log.Fatal(err)
}
fmt.Println(files)
a := NewAnalyzeData()
for _, file := range files {
analyze(file, a)
}
file, _ := json.MarshalIndent(a, "", " ")
_ = ioutil.WriteFile("analyze.json", file, 0644)
createMetadata(a)
}
func Generate() {
data, err := ioutil.ReadFile("analyze.json")
if err != nil {
return
}
a := NewAnalyzeData()
json.Unmarshal(data, a)
createMetadata(a)
}
type FieldData struct {
IsString bool
Multiple bool
Base bool
Plus bool
}
func NewFieldData() *FieldData {
return &FieldData{}
}
type AnalyzeData struct {
// Types map[string]bool
Fields map[string]*FieldData
}
func NewAnalyzeData() *AnalyzeData {
return &AnalyzeData{
// Types: make(map[string]bool, 0),
Fields: make(map[string]*FieldData, 0),
}
}
func (a *AnalyzeData) GetField(s string) *FieldData {
if f, ok := a.Fields[s]; ok {
return f
} else {
f := &FieldData{}
a.Fields[s] = f
return f
}
}
func analyze(file string, a *AnalyzeData) error {
xmlFile, err := os.Open(file)
if err != nil {
fmt.Println(err)
}
plus := strings.HasSuffix(file, "_plus.xml")
fmt.Println("Successfully Opened", file)
defer xmlFile.Close()
converter := util.NewConvertReader(xmlFile)
return analyzeElement(xml.NewDecoder(converter), a, make([]string, 0), plus)
}
const PATH_SEPARATOR = "|"
func analyzeElement(d *xml.Decoder, a *AnalyzeData, path []string, plus bool) error {
if len(path) > 1 {
s := strings.Join(path, PATH_SEPARATOR)
fd := NewFieldData()
a.Fields[s] = fd
if plus {
fd.Plus = true
} else {
fd.Base = true
}
}
var (
data []byte
)
value := true
fields := make(map[string]bool)
Loop:
for {
tok, err := d.Token()
if err == io.EOF {
break Loop
} else if err != nil {
return err
}
switch t := tok.(type) {
case xml.StartElement:
value = false
// a.Types[strings.Join(path, PATH_SEPARATOR)] = true
newPath := append(path, t.Name.Local)
if _, ok := fields[t.Name.Local]; ok {
a.Fields[strings.Join(newPath, PATH_SEPARATOR)].Multiple = true
}
fields[t.Name.Local] = true
analyzeElement(d, a, newPath, plus)
case xml.CharData:
data = append(data, t...)
case xml.EndElement:
if value {
if _, err := strconv.Atoi(string(data)); err != nil {
a.Fields[strings.Join(path, PATH_SEPARATOR)].IsString = true
}
}
if t.Name.Local == "type" {
path[len(path)-2] = path[len(path)-2] + "+" + strcase.ToCamel(string(data))
}
return nil
}
}
return nil
}
var allowedTyped = map[string]bool{
"df_world|historical_events|historical_event": true,
"df_world|historical_event_collections|historical_event_collection": true,
}
func filterSubtypes(data *map[string]*FieldData) []string {
filtered := make(map[string]*FieldData)
for k, v := range *data {
path := strings.Split(k, PATH_SEPARATOR)
for index, seg := range path {
if strings.Contains(seg, "+") {
base := seg[:strings.Index(seg, "+")]
basePath := strings.Join(append(path[:index], base), PATH_SEPARATOR)
if allowedTyped[basePath] {
path[index] = seg
}
}
}
filtered[strings.Join(path, PATH_SEPARATOR)] = v
}
*data = filtered
list := util.Keys(filtered)
sort.Strings(list)
return list
}
func getSubtypes(objectTypes []string, k string) *[]string {
subtypes := make(map[string]bool)
for _, t := range objectTypes {
if strings.HasPrefix(t, k+"+") && !strings.Contains(t[len(k):], PATH_SEPARATOR) {
subtypes[t[strings.LastIndex(t, "+")+1:]] = true
}
}
keys := util.Keys(subtypes)
sort.Strings(keys)
if len(keys) > 0 {
return &keys
}
return nil
}
func getSubtypeOf(k string) *string {
if strings.Contains(k, PATH_SEPARATOR) {
last := k[strings.LastIndex(k, PATH_SEPARATOR)+1:]
if strings.Contains(last, "+") {
base := strcase.ToCamel(last[:strings.Index(last, "+")])
return &base
}
}
return nil
}
func createMetadata(a *AnalyzeData) {
// ts := filterSubtypes(a.Types)
fs := filterSubtypes(&a.Fields)
var objectTypes []string
for k := range a.Fields {
path := strings.Split(k, PATH_SEPARATOR)
if len(path) >= 2 {
objectTypes = append(objectTypes, strings.Join(path[:len(path)-1], PATH_SEPARATOR))
}
}
// for _, s := range fs {
// fmt.Println(s)
// }
objects := make(map[string]Object, 0)
for _, k := range objectTypes {
if ok, _ := isArray(k, fs); !ok {
n := k
if strings.Contains(k, PATH_SEPARATOR) {
n = k[strings.LastIndex(k, PATH_SEPARATOR)+1:]
}
if n == "" {
continue
}
objFields := make(map[string]Field, 0)
for _, f := range fs {
if strings.HasPrefix(f, k+PATH_SEPARATOR) {
fn := f[len(k)+1:]
if !strings.Contains(fn, PATH_SEPARATOR) {
legend := ""
if a.Fields[f].Base && a.Fields[f].Plus {
legend = "both"
} else if a.Fields[f].Base {
legend = "base"
} else if a.Fields[f].Plus {
legend = "plus"
}
field := Field{
Name: strcase.ToCamel(fn),
Type: "int",
Multiple: a.Fields[f].Multiple,
Legend: legend,
}
if ok, elements := isArray(f, fs); ok {
el := elements[strings.LastIndex(elements, PATH_SEPARATOR)+1:]
if _, ok := a.Fields[elements+PATH_SEPARATOR+"id"]; ok {
field.Type = "map"
} else {
field.Type = "array"
}
field.ElementType = &(el)
} else if ok, _ := isObject(f, fs); ok {
field.Type = "object"
} else if a.Fields[f].IsString {
field.Type = "string"
}
objFields[fn] = field
}
}
}
objects[n] = Object{
Name: strcase.ToCamel(n),
Id: a.Fields[k+PATH_SEPARATOR+"id"] != nil,
Named: a.Fields[k+PATH_SEPARATOR+"name"] != nil,
Typed: a.Fields[k+PATH_SEPARATOR+"type"] != nil,
SubTypes: getSubtypes(objectTypes, k),
SubTypeOf: getSubtypeOf(k),
Fields: objFields,
}
}
}
file, _ := json.MarshalIndent(objects, "", " ")
_ = ioutil.WriteFile("model.json", file, 0644)
f, err := os.Create("df/model.go")
defer f.Close()
err = packageTemplate.Execute(f, struct {
Objects map[string]Object
}{
Objects: objects,
})
if err != nil {
fmt.Println(err)
}
}
func isArray(typ string, types []string) (bool, string) {
fc := 0
elements := ""
if !strings.Contains(typ, PATH_SEPARATOR) || strings.Contains(typ[strings.LastIndex(typ, PATH_SEPARATOR):], "+") {
return false, ""
}
for _, t := range types {
if !strings.HasPrefix(t, typ+PATH_SEPARATOR) {
continue
}
if strings.Contains(t[len(typ)+1:], PATH_SEPARATOR) {
continue
}
fc++
elements = t
}
return fc == 1, elements
}
func isObject(typ string, types []string) (bool, string) {
fc := 0
for _, t := range types {
if !strings.HasPrefix(t, typ+PATH_SEPARATOR) {
continue
}
fc++
}
return fc > 0, typ
}
type Object struct {
Name string `json:"name"`
Id bool `json:"id,omitempty"`
Named bool `json:"named,omitempty"`
Typed bool `json:"typed,omitempty"`
SubTypes *[]string `json:"subtypes,omitempty"`
SubTypeOf *string `json:"subtypeof,omitempty"`
Fields map[string]Field `json:"fields"`
}
type Field struct {
Name string `json:"name"`
Type string `json:"type"`
Multiple bool `json:"multiple,omitempty"`
ElementType *string `json:"elements,omitempty"`
Legend string `json:"legend"`
}
func (f Field) TypeLine(objects map[string]Object) string {
n := f.Name
if n == "Id" || n == "Name" {
n = n + "_"
}
m := ""
if f.Multiple {
m = "[]"
}
t := f.Type
if f.Type == "array" {
t = "[]*" + objects[*f.ElementType].Name
}
if f.Type == "map" {
t = "map[int]*" + objects[*f.ElementType].Name
}
if f.Type == "object" {
t = "*" + f.Name
}
j := fmt.Sprintf("`json:\"%s\" legend:\"%s\"`", strcase.ToLowerCamel(f.Name), f.Legend)
return fmt.Sprintf("%s %s%s %s", n, m, t, j)
}
func (f Field) StartAction() string {
n := f.Name
if n == "Id" || n == "Name" {
n = n + "_"
}
if f.Type == "object" {
p := fmt.Sprintf("v, _ := parse%s(d, &t)", f.Name)
if !f.Multiple {
return fmt.Sprintf("%s\nobj.%s = v", p, n)
} else {
return fmt.Sprintf("%s\nobj.%s = append(obj.%s, v)", p, n, n)
}
}
if f.Type == "array" || f.Type == "map" {
el := strcase.ToCamel(*f.ElementType)
gen := fmt.Sprintf("parse%s", el)
if f.Type == "array" {
return fmt.Sprintf("parseArray(d, &obj.%s, %s)", f.Name, gen)
}
if f.Type == "map" {
return fmt.Sprintf("obj.%s = make(map[int]*%s)\nparseMap(d, &obj.%s, %s)", f.Name, el, f.Name, gen)
}
}
if f.Type == "int" || f.Type == "string" {
return "data = nil"
}
return ""
}
func (f Field) EndAction() string {
n := f.Name
if n == "Id" || n == "Name" {
n = n + "_"
}
if !f.Multiple {
if f.Type == "int" {
return fmt.Sprintf("obj.%s = n(data)", n)
} else if f.Type == "string" {
return fmt.Sprintf("obj.%s = string(data)", n)
}
} else {
if f.Type == "int" {
return fmt.Sprintf("obj.%s = append(obj.%s, n(data))", n, n)
} else if f.Type == "string" {
return fmt.Sprintf("obj.%s = append(obj.%s, string(data))", n, n)
}
}
return ""
}
var packageTemplate = template.Must(template.New("").Parse(`// Code generated by go generate; DO NOT EDIT.
package df
import (
"encoding/xml"
"strconv"
"github.com/iancoleman/strcase"
)
{{- range $name, $obj := .Objects }}
type {{ $obj.Name }} struct {
{{- range $fname, $field := $obj.Fields }}
{{- if not (and (eq $fname "type") (not (not $obj.SubTypes))) }}
{{ $field.TypeLine $.Objects }}
{{- end }}
{{- end }}
{{- if not (not $obj.SubTypes) }}
Details any
{{- end }}
}
{{- if $obj.Id }}
func (x *{{ $obj.Name }}) Id() int { return x.Id_ }
{{- end }}
{{- if $obj.Named }}
func (x *{{ $obj.Name }}) Name() string { return x.Name_ }
{{- end }}
{{- end }}
// Parser
func n(d []byte) int {
v, _ := strconv.Atoi(string(d))
return v
}
{{- range $name, $obj := .Objects }}
func parse{{ $obj.Name }}(d *xml.Decoder, start *xml.StartElement) (*{{ $obj.Name }}, error) {
var (
obj = {{ $obj.Name }}{}
data []byte
)
for {
tok, err := d.Token()
if err != nil {
return nil, err
}
switch t := tok.(type) {
case xml.StartElement:
switch t.Name.Local {
{{- range $fname, $field := $obj.Fields }}
case "{{ $fname }}":
{{ $field.StartAction }}
{{- end }}
default:
// fmt.Println("unknown field", t.Name.Local)
d.Skip()
}
case xml.CharData:
data = append(data, t...)
case xml.EndElement:
if t.Name.Local == start.Name.Local {
return &obj, nil
}
switch t.Name.Local {
{{- range $fname, $field := $obj.Fields }}
case "{{ $fname }}":
{{- if and (eq $fname "type") (not (not $obj.SubTypes)) }}
var err error
switch strcase.ToCamel(string(data)) {
{{- range $sub := $obj.SubTypes }}
case "{{ $sub }}":
obj.Details, err = parse{{ $obj.Name }}{{ $sub }}(d, start)
{{- end }}
default:
d.Skip()
}
if err != nil {
return nil, err
}
return &obj, nil
{{- else }}
{{ $field.EndAction }}
{{- end }}
{{- end }}
default:
// fmt.Println("unknown field", t.Name.Local)
}
}
}
}
{{- end }}
`))

235
analyze/df/analyze.go Normal file
View file

@ -0,0 +1,235 @@
package df
import (
"bytes"
"encoding/json"
"fmt"
"go/format"
"io/ioutil"
"os"
"path/filepath"
"sort"
"strings"
"github.com/iancoleman/strcase"
"github.com/robertjanetzko/LegendsBrowser2/backend/util"
)
func Analyze(filex string) error {
fmt.Println("Search...", filex)
files, err := filepath.Glob(filex + "/*.xml")
if err != nil {
return err
}
fmt.Println(files)
a := NewAnalyzeData()
for _, file := range files {
analyze(file, a)
}
return a.Save()
}
func Generate() error {
a, err := LoadAnalyzeData()
if err != nil {
return err
}
return createMetadata(a)
}
var allowedTyped = map[string]bool{
"df_world|historical_events|historical_event": true,
"df_world|historical_event_collections|historical_event_collection": true,
}
func filterSubtypes(data *map[string]*FieldData) []string {
filtered := make(map[string]*FieldData)
for k, v := range *data {
path := strings.Split(k, PATH_SEPARATOR)
for index, seg := range path {
if strings.Contains(seg, "+") {
base := seg[:strings.Index(seg, "+")]
basePath := strings.Join(append(path[:index], base), PATH_SEPARATOR)
if allowedTyped[basePath] {
path[index] = seg
}
}
}
filtered[strings.Join(path, PATH_SEPARATOR)] = v
}
*data = filtered
list := util.Keys(filtered)
sort.Strings(list)
return list
}
func getSubtypes(objectTypes []string, k string) *[]string {
subtypes := make(map[string]bool)
for _, t := range objectTypes {
if strings.HasPrefix(t, k+"+") && !strings.Contains(t[len(k):], PATH_SEPARATOR) {
subtypes[t[strings.LastIndex(t, "+")+1:]] = true
}
}
keys := util.Keys(subtypes)
sort.Strings(keys)
if len(keys) > 0 {
return &keys
}
return nil
}
func getSubtypeOf(k string) *string {
if strings.Contains(k, PATH_SEPARATOR) {
last := k[strings.LastIndex(k, PATH_SEPARATOR)+1:]
if strings.Contains(last, "+") {
base := strcase.ToCamel(last[:strings.Index(last, "+")])
return &base
}
}
return nil
}
func createMetadata(a *AnalyzeData) error {
fs := filterSubtypes(&a.Fields)
var objectTypes []string
for k := range a.Fields {
path := strings.Split(k, PATH_SEPARATOR)
if len(path) >= 2 {
objectTypes = append(objectTypes, strings.Join(path[:len(path)-1], PATH_SEPARATOR))
}
}
objects := make(map[string]Object, 0)
for _, k := range objectTypes {
if ok, _ := isArray(k, fs); !ok {
n := k
if strings.Contains(k, PATH_SEPARATOR) {
n = k[strings.LastIndex(k, PATH_SEPARATOR)+1:]
}
if n == "" {
continue
}
objFields := make(map[string]Field, 0)
for _, f := range fs {
if strings.HasPrefix(f, k+PATH_SEPARATOR) {
fn := f[len(k)+1:]
if !strings.Contains(fn, PATH_SEPARATOR) {
legend := ""
if a.Fields[f].Base && a.Fields[f].Plus {
legend = "both"
} else if a.Fields[f].Base {
legend = "base"
} else if a.Fields[f].Plus {
legend = "plus"
}
field := Field{
Name: strcase.ToCamel(fn),
Type: "int",
Multiple: a.Fields[f].Multiple,
Legend: legend,
}
if ok, elements := isArray(f, fs); ok {
el := elements[strings.LastIndex(elements, PATH_SEPARATOR)+1:]
if _, ok := a.Fields[elements+PATH_SEPARATOR+"id"]; ok {
field.Type = "map"
} else {
field.Type = "array"
}
field.ElementType = &(el)
} else if ok, _ := isObject(f, fs); ok {
field.Type = "object"
} else if a.Fields[f].IsString {
field.Type = "string"
}
objFields[fn] = field
}
}
}
objects[n] = Object{
Name: strcase.ToCamel(n),
Id: a.Fields[k+PATH_SEPARATOR+"id"] != nil,
Named: a.Fields[k+PATH_SEPARATOR+"name"] != nil,
Typed: a.Fields[k+PATH_SEPARATOR+"type"] != nil,
SubTypes: getSubtypes(objectTypes, k),
SubTypeOf: getSubtypeOf(k),
Fields: objFields,
}
}
}
return generateCode(&objects)
}
func generateCode(objects *map[string]Object) error {
file, _ := json.MarshalIndent(objects, "", " ")
_ = ioutil.WriteFile("model.json", file, 0644)
f, err := os.Create("../backend/model/model.go")
if err != nil {
return err
}
defer f.Close()
var buf bytes.Buffer
err = packageTemplate.Execute(&buf, struct {
Objects *map[string]Object
}{
Objects: objects,
})
if err != nil {
return err
}
p, err := format.Source(buf.Bytes())
if err != nil {
return err
}
_, err = f.Write(p)
return err
}
func isArray(typ string, types []string) (bool, string) {
fc := 0
elements := ""
if !strings.Contains(typ, PATH_SEPARATOR) || strings.Contains(typ[strings.LastIndex(typ, PATH_SEPARATOR):], "+") {
return false, ""
}
for _, t := range types {
if !strings.HasPrefix(t, typ+PATH_SEPARATOR) {
continue
}
if strings.Contains(t[len(typ)+1:], PATH_SEPARATOR) {
continue
}
fc++
elements = t
}
return fc == 1, elements
}
func isObject(typ string, types []string) (bool, string) {
fc := 0
for _, t := range types {
if !strings.HasPrefix(t, typ+PATH_SEPARATOR) {
continue
}
fc++
}
return fc > 0, typ
}

208
analyze/df/generate.go Normal file
View file

@ -0,0 +1,208 @@
package df
import (
"fmt"
"text/template"
"github.com/iancoleman/strcase"
)
type Object struct {
Name string `json:"name"`
Id bool `json:"id,omitempty"`
Named bool `json:"named,omitempty"`
Typed bool `json:"typed,omitempty"`
SubTypes *[]string `json:"subtypes,omitempty"`
SubTypeOf *string `json:"subtypeof,omitempty"`
Fields map[string]Field `json:"fields"`
}
type Field struct {
Name string `json:"name"`
Type string `json:"type"`
Multiple bool `json:"multiple,omitempty"`
ElementType *string `json:"elements,omitempty"`
Legend string `json:"legend"`
}
func (f Field) TypeLine(objects map[string]Object) string {
n := f.Name
if n == "Id" || n == "Name" {
n = n + "_"
}
m := ""
if f.Multiple {
m = "[]"
}
t := f.Type
if f.Type == "array" {
t = "[]*" + objects[*f.ElementType].Name
}
if f.Type == "map" {
t = "map[int]*" + objects[*f.ElementType].Name
}
if f.Type == "object" {
t = "*" + f.Name
}
j := fmt.Sprintf("`json:\"%s\" legend:\"%s\"`", strcase.ToLowerCamel(f.Name), f.Legend)
return fmt.Sprintf("%s %s%s %s", n, m, t, j)
}
func (f Field) StartAction() string {
n := f.Name
if n == "Id" || n == "Name" {
n = n + "_"
}
if f.Type == "object" {
p := fmt.Sprintf("v, _ := parse%s(d, &t)", f.Name)
if !f.Multiple {
return fmt.Sprintf("%s\nobj.%s = v", p, n)
} else {
return fmt.Sprintf("%s\nobj.%s = append(obj.%s, v)", p, n, n)
}
}
if f.Type == "array" || f.Type == "map" {
el := strcase.ToCamel(*f.ElementType)
gen := fmt.Sprintf("parse%s", el)
if f.Type == "array" {
return fmt.Sprintf("parseArray(d, &obj.%s, %s)", f.Name, gen)
}
if f.Type == "map" {
return fmt.Sprintf("obj.%s = make(map[int]*%s)\nparseMap(d, &obj.%s, %s)", f.Name, el, f.Name, gen)
}
}
if f.Type == "int" || f.Type == "string" {
return "data = nil"
}
return ""
}
func (f Field) EndAction() string {
n := f.Name
if n == "Id" || n == "Name" {
n = n + "_"
}
if !f.Multiple {
if f.Type == "int" {
return fmt.Sprintf("obj.%s = n(data)", n)
} else if f.Type == "string" {
return fmt.Sprintf("obj.%s = string(data)", n)
}
} else {
if f.Type == "int" {
return fmt.Sprintf("obj.%s = append(obj.%s, n(data))", n, n)
} else if f.Type == "string" {
return fmt.Sprintf("obj.%s = append(obj.%s, string(data))", n, n)
}
}
return ""
}
var packageTemplate = template.Must(template.New("").Parse(`// Code generated by legendsbrowser; DO NOT EDIT.
package model
import (
"encoding/xml"
"strconv"
"github.com/iancoleman/strcase"
)
{{- range $name, $obj := .Objects }}
type {{ $obj.Name }} struct {
{{- range $fname, $field := $obj.Fields }}
{{- if not (and (eq $fname "type") (not (not $obj.SubTypes))) }}
{{ $field.TypeLine $.Objects }}
{{- end }}
{{- end }}
{{- if not (not $obj.SubTypes) }}
Details any
{{- end }}
}
{{- if $obj.Id }}
func (x *{{ $obj.Name }}) Id() int { return x.Id_ }
{{- end }}
{{- if $obj.Named }}
func (x *{{ $obj.Name }}) Name() string { return x.Name_ }
{{- end }}
{{- end }}
// Parser
func n(d []byte) int {
v, _ := strconv.Atoi(string(d))
return v
}
{{- range $name, $obj := .Objects }}
func parse{{ $obj.Name }}(d *xml.Decoder, start *xml.StartElement) (*{{ $obj.Name }}, error) {
var (
obj = {{ $obj.Name }}{}
data []byte
)
for {
tok, err := d.Token()
if err != nil {
return nil, err
}
switch t := tok.(type) {
case xml.StartElement:
switch t.Name.Local {
{{- range $fname, $field := $obj.Fields }}
case "{{ $fname }}":
{{ $field.StartAction }}
{{- end }}
default:
// fmt.Println("unknown field", t.Name.Local)
d.Skip()
}
case xml.CharData:
data = append(data, t...)
case xml.EndElement:
if t.Name.Local == start.Name.Local {
return &obj, nil
}
switch t.Name.Local {
{{- range $fname, $field := $obj.Fields }}
case "{{ $fname }}":
{{- if and (eq $fname "type") (not (not $obj.SubTypes)) }}
var err error
switch strcase.ToCamel(string(data)) {
{{- range $sub := $obj.SubTypes }}
case "{{ $sub }}":
obj.Details, err = parse{{ $obj.Name }}{{ $sub }}(d, start)
{{- end }}
default:
d.Skip()
}
if err != nil {
return nil, err
}
return &obj, nil
{{- else }}
{{ $field.EndAction }}
{{- end }}
{{- end }}
default:
// fmt.Println("unknown field", t.Name.Local)
}
}
}
}
{{- end }}
`))

143
analyze/df/structure.go Normal file
View file

@ -0,0 +1,143 @@
package df
import (
"encoding/json"
"encoding/xml"
"fmt"
"io"
"io/ioutil"
"os"
"strconv"
"strings"
"github.com/iancoleman/strcase"
"github.com/robertjanetzko/LegendsBrowser2/backend/util"
)
type FieldData struct {
IsString bool
Multiple bool
Base bool
Plus bool
}
func NewFieldData() *FieldData {
return &FieldData{}
}
type AnalyzeData struct {
Fields map[string]*FieldData
}
func NewAnalyzeData() *AnalyzeData {
return &AnalyzeData{
Fields: make(map[string]*FieldData, 0),
}
}
func (a *AnalyzeData) Save() error {
file, err := json.MarshalIndent(a, "", " ")
if err != nil {
return err
}
return ioutil.WriteFile("analyze.json", file, 0644)
}
func LoadAnalyzeData() (*AnalyzeData, error) {
data, err := ioutil.ReadFile("analyze.json")
if err != nil {
return nil, err
}
a := NewAnalyzeData()
json.Unmarshal(data, a)
return a, nil
}
func (a *AnalyzeData) GetField(s string) *FieldData {
if f, ok := a.Fields[s]; ok {
return f
} else {
f := &FieldData{}
a.Fields[s] = f
return f
}
}
func analyze(file string, a *AnalyzeData) error {
xmlFile, err := os.Open(file)
if err != nil {
fmt.Println(err)
}
plus := strings.HasSuffix(file, "_plus.xml")
fmt.Println("Successfully Opened", file)
defer xmlFile.Close()
converter := util.NewConvertReader(xmlFile)
return analyzeElement(xml.NewDecoder(converter), a, make([]string, 0), plus)
}
const PATH_SEPARATOR = "|"
func analyzeElement(d *xml.Decoder, a *AnalyzeData, path []string, plus bool) error {
if len(path) > 1 {
s := strings.Join(path, PATH_SEPARATOR)
fd := NewFieldData()
a.Fields[s] = fd
if plus {
fd.Plus = true
} else {
fd.Base = true
}
}
var (
data []byte
)
value := true
fields := make(map[string]bool)
Loop:
for {
tok, err := d.Token()
if err == io.EOF {
break Loop
} else if err != nil {
return err
}
switch t := tok.(type) {
case xml.StartElement:
value = false
newPath := append(path, t.Name.Local)
if _, ok := fields[t.Name.Local]; ok {
a.Fields[strings.Join(newPath, PATH_SEPARATOR)].Multiple = true
}
fields[t.Name.Local] = true
analyzeElement(d, a, newPath, plus)
case xml.CharData:
data = append(data, t...)
case xml.EndElement:
if value {
if _, err := strconv.Atoi(string(data)); err != nil {
a.Fields[strings.Join(path, PATH_SEPARATOR)].IsString = true
}
}
if t.Name.Local == "type" {
path[len(path)-2] = path[len(path)-2] + "+" + strcase.ToCamel(string(data))
}
return nil
}
}
return nil
}

View file

@ -3,3 +3,5 @@ module github.com/robertjanetzko/LegendsBrowser2/analyze
go 1.18
require github.com/iancoleman/strcase v0.2.0
require github.com/robertjanetzko/LegendsBrowser2/backend v0.0.0-20220414135947-77b720f8d215

View file

@ -0,0 +1,4 @@
github.com/iancoleman/strcase v0.2.0 h1:05I4QRnGpI0m37iZQRuskXh+w77mr6Z41lwQzuHLwW0=
github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
github.com/robertjanetzko/LegendsBrowser2/backend v0.0.0-20220414135947-77b720f8d215 h1:i23QGS93i7zoDyarU2dQLYDYxSo8MkatzhISsRQuljM=
github.com/robertjanetzko/LegendsBrowser2/backend v0.0.0-20220414135947-77b720f8d215/go.mod h1:b6NU94RVWS7nz92lGiifeWN1cLV2EeNSzx0Oop+Ma54=

File diff suppressed because it is too large Load diff

View file

@ -3,8 +3,6 @@ package main
import (
"flag"
"fmt"
"legendsbrowser/df"
"legendsbrowser/server"
"net/http"
_ "net/http/pprof"
"os"
@ -12,9 +10,11 @@ import (
"github.com/gorilla/mux"
"github.com/pkg/profile"
"github.com/robertjanetzko/LegendsBrowser2/backend/model"
"github.com/robertjanetzko/LegendsBrowser2/backend/server"
)
var world *df.DfWorld
var world *model.DfWorld
func main() {
f := flag.String("f", "", "open a file")
@ -26,7 +26,7 @@ func main() {
http.ListenAndServe(":8081", nil)
}()
w, err := df.Parse(*f)
w, err := model.Parse(*f)
if err != nil {
fmt.Println(err)
os.Exit(1)

View file

@ -1,88 +0,0 @@
package model
import (
"fmt"
"legendsbrowser/util"
"sort"
"strconv"
"strings"
"github.com/iancoleman/strcase"
)
func ListOtherElements[T Others](name string, items *[]T) {
fmt.Println()
fmt.Println(name)
fmt.Println()
m := make(map[string]map[string]bool)
cantInt := make(map[string]bool)
isObj := make(map[string]bool)
isMultiple := make(map[string]bool)
for _, item := range *items {
found := make(map[string]bool)
for _, el := range item.Others() {
t := name // item.Type()
if !m[el.XMLName.Local][t] {
if m[el.XMLName.Local] == nil {
m[el.XMLName.Local] = map[string]bool{}
}
m[el.XMLName.Local][t] = true
}
_, err := strconv.Atoi(el.Value)
if err != nil {
cantInt[el.XMLName.Local] = true
}
if strings.Contains(el.Value, "<") {
isObj[el.XMLName.Local] = true
}
if found[el.XMLName.Local] {
isMultiple[el.XMLName.Local] = true
}
found[el.XMLName.Local] = true
}
}
ks := util.Keys(m)
sort.Strings(ks)
for _, k := range ks {
events := util.Keys(m[k])
sort.Strings(events)
// fmt.Println(strconv.FormatBool(cantInt[k]) + " - " + k + ": " + strings.Join(events, ", "))
var mult string
if isMultiple[k] {
mult = "[]"
} else {
mult = ""
}
if isObj[k] {
fmt.Printf("// %s object\n", k)
} else if cantInt[k] {
fmt.Printf("%s *%sstring `xml:\"%s\" json:\"%s,omitempty\"`\n", strcase.ToCamel(k), mult, k, strcase.ToLowerCamel(k))
} else {
var types []string
if util.ContainsAny(k, "entity_id", "enid", "civ_id", "entity_1", "entity_2") {
types = append(types, "entity")
}
if util.ContainsAny(k, "site_id") {
types = append(types, "site")
}
if util.ContainsAny(k, "structure_id") {
types = append(types, "structure")
}
if util.ContainsAny(k, "hfid", "hist_figure_id", "hist_fig_id") {
types = append(types, "hf")
}
if util.ContainsAny(k, "wcid", "wc_id") {
types = append(types, "wc")
}
if util.ContainsAny(k, "artifact_id") {
types = append(types, "artifact")
}
typestr := strings.Join(types, ",")
if typestr != "" {
typestr = fmt.Sprintf(" legend:\"%s\"", typestr)
}
fmt.Printf("%s *%sint `xml:\"%s\" json:\"%s,omitempty\"%s`\n", strcase.ToCamel(k), mult, k, strcase.ToLowerCamel(k), typestr)
}
}
}

View file

@ -1,242 +0,0 @@
package model
type HistoricalEvent struct {
Id_ int `xml:"id" json:"id"`
Year int `xml:"year" json:"year"`
Seconds int `xml:"seconds72" json:"seconds72"`
TypedObject
ASupportMercEnid *int `xml:"a_support_merc_enid" json:"aSupportMercEnid,omitempty" legend:"entity"`
AccountShift *int `xml:"account_shift" json:"accountShift,omitempty"`
AcquirerEnid *int `xml:"acquirer_enid" json:"acquirerEnid,omitempty" legend:"entity"`
AcquirerHfid *int `xml:"acquirer_hfid" json:"acquirerHfid,omitempty" legend:"hf"`
Action *string `xml:"action" json:"action,omitempty"`
ActorHfid *int `xml:"actor_hfid" json:"actorHfid,omitempty" legend:"hf"`
AgreementId *int `xml:"agreement_id" json:"agreementId,omitempty"`
Allotment *int `xml:"allotment" json:"allotment,omitempty"`
AllotmentIndex *int `xml:"allotment_index" json:"allotmentIndex,omitempty"`
AllyDefenseBonus *int `xml:"ally_defense_bonus" json:"allyDefenseBonus,omitempty"`
AppointerHfid *int `xml:"appointer_hfid" json:"appointerHfid,omitempty" legend:"hf"`
ArrestingEnid *int `xml:"arresting_enid" json:"arrestingEnid,omitempty" legend:"entity"`
ArtifactId *int `xml:"artifact_id" json:"artifactId,omitempty" legend:"artifact"`
AttackerCivId *int `xml:"attacker_civ_id" json:"attackerCivId,omitempty" legend:"entity"`
AttackerGeneralHfid *int `xml:"attacker_general_hfid" json:"attackerGeneralHfid,omitempty" legend:"hf"`
AttackerHfid *int `xml:"attacker_hfid" json:"attackerHfid,omitempty" legend:"hf"`
AttackerMercEnid *int `xml:"attacker_merc_enid" json:"attackerMercEnid,omitempty" legend:"entity"`
BodyState *string `xml:"body_state" json:"bodyState,omitempty"`
BuilderHfid *int `xml:"builder_hfid" json:"builderHfid,omitempty" legend:"hf"`
BuildingProfileId *int `xml:"building_profile_id" json:"buildingProfileId,omitempty"`
Cause *string `xml:"cause" json:"cause,omitempty"`
ChangeeHfid *int `xml:"changee_hfid" json:"changeeHfid,omitempty" legend:"hf"`
ChangerHfid *int `xml:"changer_hfid" json:"changerHfid,omitempty" legend:"hf"`
Circumstance *string `xml:"circumstance" json:"circumstance,omitempty"`
CircumstanceId *int `xml:"circumstance_id" json:"circumstanceId,omitempty"`
CivEntityId *int `xml:"civ_entity_id" json:"civEntityId,omitempty" legend:"entity"`
CivId *int `xml:"civ_id" json:"civId,omitempty" legend:"entity"`
Claim *string `xml:"claim" json:"claim,omitempty"`
CoconspiratorBonus *int `xml:"coconspirator_bonus" json:"coconspiratorBonus,omitempty"`
CompetitorHfid *[]int `xml:"competitor_hfid" json:"competitorHfid,omitempty" legend:"hf"`
ConfessedAfterApbArrestEnid *int `xml:"confessed_after_apb_arrest_enid" json:"confessedAfterApbArrestEnid,omitempty" legend:"entity"`
ConspiratorHfid *[]int `xml:"conspirator_hfid" json:"conspiratorHfid,omitempty" legend:"hf"`
ContactHfid *int `xml:"contact_hfid" json:"contactHfid,omitempty" legend:"hf"`
ConvictIsContact *string `xml:"convict_is_contact" json:"convictIsContact,omitempty"`
ConvictedHfid *int `xml:"convicted_hfid" json:"convictedHfid,omitempty" legend:"hf"`
ConvicterEnid *int `xml:"convicter_enid" json:"convicterEnid,omitempty" legend:"entity"`
Coords *string `xml:"coords" json:"coords,omitempty"`
CorruptConvicterHfid *int `xml:"corrupt_convicter_hfid" json:"corruptConvicterHfid,omitempty" legend:"hf"`
CorruptorHfid *int `xml:"corruptor_hfid" json:"corruptorHfid,omitempty" legend:"hf"`
CorruptorIdentity *int `xml:"corruptor_identity" json:"corruptorIdentity,omitempty"`
CorruptorSeenAs *string `xml:"corruptor_seen_as" json:"corruptorSeenAs,omitempty"`
CreatorHfid *int `xml:"creator_hfid" json:"creatorHfid,omitempty" legend:"hf"`
Crime *string `xml:"crime" json:"crime,omitempty"`
DSupportMercEnid *int `xml:"d_support_merc_enid" json:"dSupportMercEnid,omitempty" legend:"entity"`
DeathPenalty *string `xml:"death_penalty" json:"deathPenalty,omitempty"`
DefenderCivId *int `xml:"defender_civ_id" json:"defenderCivId,omitempty" legend:"entity"`
DefenderGeneralHfid *int `xml:"defender_general_hfid" json:"defenderGeneralHfid,omitempty" legend:"hf"`
DefenderMercEnid *int `xml:"defender_merc_enid" json:"defenderMercEnid,omitempty" legend:"entity"`
Delegated *string `xml:"delegated" json:"delegated,omitempty"`
DestEntityId *int `xml:"dest_entity_id" json:"destEntityId,omitempty" legend:"entity"`
DestSiteId *int `xml:"dest_site_id" json:"destSiteId,omitempty" legend:"site"`
DestStructureId *int `xml:"dest_structure_id" json:"destStructureId,omitempty" legend:"structure"`
DestroyedStructureId *int `xml:"destroyed_structure_id" json:"destroyedStructureId,omitempty" legend:"structure"`
DestroyerEnid *int `xml:"destroyer_enid" json:"destroyerEnid,omitempty" legend:"entity"`
Detected *string `xml:"detected" json:"detected,omitempty"`
DidNotRevealAllInInterrogation *string `xml:"did_not_reveal_all_in_interrogation" json:"didNotRevealAllInInterrogation,omitempty"`
Dispute *string `xml:"dispute" json:"dispute,omitempty"`
DoerHfid *int `xml:"doer_hfid" json:"doerHfid,omitempty" legend:"hf"`
Entity1 *int `xml:"entity_1" json:"entity1,omitempty" legend:"entity"`
Entity2 *int `xml:"entity_2" json:"entity2,omitempty" legend:"entity"`
EntityId *int `xml:"entity_id" json:"entityId,omitempty" legend:"entity"`
EntityId1 *int `xml:"entity_id_1" json:"entityId1,omitempty" legend:"entity"`
EntityId2 *int `xml:"entity_id_2" json:"entityId2,omitempty" legend:"entity"`
Exiled *string `xml:"exiled" json:"exiled,omitempty"`
ExpelledCreature *[]int `xml:"expelled_creature" json:"expelledCreature,omitempty"`
ExpelledHfid *[]int `xml:"expelled_hfid" json:"expelledHfid,omitempty" legend:"hf"`
ExpelledNumber *[]int `xml:"expelled_number" json:"expelledNumber,omitempty"`
ExpelledPopId *[]int `xml:"expelled_pop_id" json:"expelledPopId,omitempty"`
FailedJudgmentTest *string `xml:"failed_judgment_test" json:"failedJudgmentTest,omitempty"`
FeatureLayerId *int `xml:"feature_layer_id" json:"featureLayerId,omitempty"`
First *string `xml:"first" json:"first,omitempty"`
FooledHfid *int `xml:"fooled_hfid" json:"fooledHfid,omitempty" legend:"hf"`
FormId *int `xml:"form_id" json:"formId,omitempty"`
FramerHfid *int `xml:"framer_hfid" json:"framerHfid,omitempty" legend:"hf"`
FromOriginal *string `xml:"from_original" json:"fromOriginal,omitempty"`
GamblerHfid *int `xml:"gambler_hfid" json:"gamblerHfid,omitempty" legend:"hf"`
GiverEntityId *int `xml:"giver_entity_id" json:"giverEntityId,omitempty" legend:"entity"`
GiverHistFigureId *int `xml:"giver_hist_figure_id" json:"giverHistFigureId,omitempty" legend:"hf"`
Group1Hfid *int `xml:"group_1_hfid" json:"group1Hfid,omitempty" legend:"hf"`
Group2Hfid *[]int `xml:"group_2_hfid" json:"group2Hfid,omitempty" legend:"hf"`
GroupHfid *[]int `xml:"group_hfid" json:"groupHfid,omitempty" legend:"hf"`
HeldFirmInInterrogation *string `xml:"held_firm_in_interrogation" json:"heldFirmInInterrogation,omitempty"`
HfRep1Of2 *string `xml:"hf_rep_1_of_2" json:"hfRep1Of2,omitempty"`
HfRep2Of1 *string `xml:"hf_rep_2_of_1" json:"hfRep2Of1,omitempty"`
Hfid *[]int `xml:"hfid" json:"hfid,omitempty" legend:"hf"`
Hfid1 *int `xml:"hfid1" json:"hfid1,omitempty" legend:"hf"`
Hfid2 *int `xml:"hfid2" json:"hfid2,omitempty" legend:"hf"`
HfidTarget *int `xml:"hfid_target" json:"hfidTarget,omitempty" legend:"hf"`
HistFigId *int `xml:"hist_fig_id" json:"histFigId,omitempty" legend:"hf"`
HistFigureId *int `xml:"hist_figure_id" json:"histFigureId,omitempty" legend:"hf"`
HonorId *int `xml:"honor_id" json:"honorId,omitempty"`
IdentityId *int `xml:"identity_id" json:"identityId,omitempty" legend:"entity"`
IdentityId1 *int `xml:"identity_id1" json:"identityId1,omitempty" legend:"entity"`
IdentityId2 *int `xml:"identity_id2" json:"identityId2,omitempty" legend:"entity"`
ImplicatedHfid *[]int `xml:"implicated_hfid" json:"implicatedHfid,omitempty" legend:"hf"`
Inherited *string `xml:"inherited" json:"inherited,omitempty"`
InitiatingEnid *int `xml:"initiating_enid" json:"initiatingEnid,omitempty" legend:"entity"`
InstigatorHfid *int `xml:"instigator_hfid" json:"instigatorHfid,omitempty" legend:"hf"`
Interaction *string `xml:"interaction" json:"interaction,omitempty"`
InterrogatorHfid *int `xml:"interrogator_hfid" json:"interrogatorHfid,omitempty" legend:"hf"`
JoinEntityId *int `xml:"join_entity_id" json:"joinEntityId,omitempty" legend:"entity"`
JoinedEntityId *int `xml:"joined_entity_id" json:"joinedEntityId,omitempty" legend:"entity"`
JoinerEntityId *int `xml:"joiner_entity_id" json:"joinerEntityId,omitempty" legend:"entity"`
JoiningEnid *[]int `xml:"joining_enid" json:"joiningEnid,omitempty" legend:"entity"`
Knowledge *string `xml:"knowledge" json:"knowledge,omitempty"`
LastOwnerHfid *int `xml:"last_owner_hfid" json:"lastOwnerHfid,omitempty" legend:"hf"`
LeaderHfid *int `xml:"leader_hfid" json:"leaderHfid,omitempty" legend:"hf"`
Link *string `xml:"link" json:"link,omitempty"`
LureHfid *int `xml:"lure_hfid" json:"lureHfid,omitempty" legend:"hf"`
MasterWcid *int `xml:"master_wcid" json:"masterWcid,omitempty" legend:"wc"`
Method *string `xml:"method" json:"method,omitempty"`
Modification *string `xml:"modification" json:"modification,omitempty"`
ModifierHfid *int `xml:"modifier_hfid" json:"modifierHfid,omitempty" legend:"hf"`
Mood *string `xml:"mood" json:"mood,omitempty"`
NameOnly *string `xml:"name_only" json:"nameOnly,omitempty"`
NewAbId *int `xml:"new_ab_id" json:"newAbId,omitempty"`
NewAccount *int `xml:"new_account" json:"newAccount,omitempty"`
NewCaste *string `xml:"new_caste" json:"newCaste,omitempty"`
NewEquipmentLevel *int `xml:"new_equipment_level" json:"newEquipmentLevel,omitempty"`
NewLeaderHfid *int `xml:"new_leader_hfid" json:"newLeaderHfid,omitempty" legend:"hf"`
NewRace *string `xml:"new_race" json:"newRace,omitempty"`
NewSiteCivId *int `xml:"new_site_civ_id" json:"newSiteCivId,omitempty" legend:"entity"`
OccasionId *int `xml:"occasion_id" json:"occasionId,omitempty"`
OldAbId *int `xml:"old_ab_id" json:"oldAbId,omitempty"`
OldAccount *int `xml:"old_account" json:"oldAccount,omitempty"`
OldCaste *string `xml:"old_caste" json:"oldCaste,omitempty"`
OldRace *string `xml:"old_race" json:"oldRace,omitempty"`
OverthrownHfid *int `xml:"overthrown_hfid" json:"overthrownHfid,omitempty" legend:"hf"`
PartialIncorporation *string `xml:"partial_incorporation" json:"partialIncorporation,omitempty"`
PersecutorEnid *int `xml:"persecutor_enid" json:"persecutorEnid,omitempty" legend:"entity"`
PersecutorHfid *int `xml:"persecutor_hfid" json:"persecutorHfid,omitempty" legend:"hf"`
PlotterHfid *int `xml:"plotter_hfid" json:"plotterHfid,omitempty" legend:"hf"`
PopFlid *int `xml:"pop_flid" json:"popFlid,omitempty"`
PopNumberMoved *int `xml:"pop_number_moved" json:"popNumberMoved,omitempty"`
PopRace *int `xml:"pop_race" json:"popRace,omitempty"`
PopSrid *int `xml:"pop_srid" json:"popSrid,omitempty"`
PosTakerHfid *int `xml:"pos_taker_hfid" json:"posTakerHfid,omitempty" legend:"hf"`
PositionId *int `xml:"position_id" json:"positionId,omitempty"`
PositionProfileId *int `xml:"position_profile_id" json:"positionProfileId,omitempty"`
PrisonMonths *int `xml:"prison_months" json:"prisonMonths,omitempty"`
ProductionZoneId *int `xml:"production_zone_id" json:"productionZoneId,omitempty"`
PromiseToHfid *int `xml:"promise_to_hfid" json:"promiseToHfid,omitempty" legend:"hf"`
PropertyConfiscatedFromHfid *[]int `xml:"property_confiscated_from_hfid" json:"propertyConfiscatedFromHfid,omitempty" legend:"hf"`
PurchasedUnowned *string `xml:"purchased_unowned" json:"purchasedUnowned,omitempty"`
Quality *int `xml:"quality" json:"quality,omitempty"`
Reason *string `xml:"reason" json:"reason,omitempty"`
ReasonId *int `xml:"reason_id" json:"reasonId,omitempty"`
RebuiltRuined *string `xml:"rebuilt_ruined" json:"rebuiltRuined,omitempty"`
ReceiverEntityId *int `xml:"receiver_entity_id" json:"receiverEntityId,omitempty" legend:"entity"`
ReceiverHistFigureId *int `xml:"receiver_hist_figure_id" json:"receiverHistFigureId,omitempty" legend:"hf"`
Relationship *string `xml:"relationship" json:"relationship,omitempty"`
RelevantEntityId *int `xml:"relevant_entity_id" json:"relevantEntityId,omitempty" legend:"entity"`
RelevantIdForMethod *int `xml:"relevant_id_for_method" json:"relevantIdForMethod,omitempty"`
RelevantPositionProfileId *int `xml:"relevant_position_profile_id" json:"relevantPositionProfileId,omitempty"`
ReligionId *int `xml:"religion_id" json:"religionId,omitempty"`
ResidentCivId *int `xml:"resident_civ_id" json:"residentCivId,omitempty" legend:"entity"`
Return *string `xml:"return" json:"return,omitempty"`
ScheduleId *int `xml:"schedule_id" json:"scheduleId,omitempty"`
SecretGoal *string `xml:"secret_goal" json:"secretGoal,omitempty"`
SeekerHfid *int `xml:"seeker_hfid" json:"seekerHfid,omitempty" legend:"hf"`
ShrineAmountDestroyed *int `xml:"shrine_amount_destroyed" json:"shrineAmountDestroyed,omitempty"`
SiteCivId *int `xml:"site_civ_id" json:"siteCivId,omitempty" legend:"entity"`
SiteEntityId *int `xml:"site_entity_id" json:"siteEntityId,omitempty" legend:"entity"`
SiteHfid *int `xml:"site_hfid" json:"siteHfid,omitempty" legend:"hf"`
SiteId *int `xml:"site_id" json:"siteId,omitempty" legend:"site"`
SiteId1 *int `xml:"site_id1" json:"siteId1,omitempty" legend:"site"`
SiteId2 *int `xml:"site_id2" json:"siteId2,omitempty" legend:"site"`
SiteId_1 *int `xml:"site_id_1" json:"siteId_1,omitempty" legend:"site"`
SiteId_2 *int `xml:"site_id_2" json:"siteId_2,omitempty" legend:"site"`
SitePropertyId *int `xml:"site_property_id" json:"sitePropertyId,omitempty"`
Situation *string `xml:"situation" json:"situation,omitempty"`
SlayerCaste *string `xml:"slayer_caste" json:"slayerCaste,omitempty"`
SlayerHfid *int `xml:"slayer_hfid" json:"slayerHfid,omitempty" legend:"hf"`
SlayerItemId *int `xml:"slayer_item_id" json:"slayerItemId,omitempty"`
SlayerRace *string `xml:"slayer_race" json:"slayerRace,omitempty"`
SlayerShooterItemId *int `xml:"slayer_shooter_item_id" json:"slayerShooterItemId,omitempty"`
SnatcherHfid *int `xml:"snatcher_hfid" json:"snatcherHfid,omitempty" legend:"hf"`
SourceEntityId *int `xml:"source_entity_id" json:"sourceEntityId,omitempty" legend:"entity"`
SourceSiteId *int `xml:"source_site_id" json:"sourceSiteId,omitempty" legend:"site"`
SourceStructureId *int `xml:"source_structure_id" json:"sourceStructureId,omitempty" legend:"structure"`
SpeakerHfid *int `xml:"speaker_hfid" json:"speakerHfid,omitempty" legend:"hf"`
State *string `xml:"state" json:"state,omitempty"`
StructureId *int `xml:"structure_id" json:"structureId,omitempty" legend:"structure"`
StudentHfid *int `xml:"student_hfid" json:"studentHfid,omitempty" legend:"hf"`
SubregionId *int `xml:"subregion_id" json:"subregionId,omitempty"`
Subtype *string `xml:"subtype" json:"subtype,omitempty"`
Successful *string `xml:"successful" json:"successful,omitempty"`
SurveiledContact *string `xml:"surveiled_contact" json:"surveiledContact,omitempty"`
SurveiledConvicted *string `xml:"surveiled_convicted" json:"surveiledConvicted,omitempty"`
TargetEnid *int `xml:"target_enid" json:"targetEnid,omitempty" legend:"entity"`
TargetHfid *int `xml:"target_hfid" json:"targetHfid,omitempty" legend:"hf"`
TargetIdentity *int `xml:"target_identity" json:"targetIdentity,omitempty"`
TargetSeenAs *string `xml:"target_seen_as" json:"targetSeenAs,omitempty"`
TeacherHfid *int `xml:"teacher_hfid" json:"teacherHfid,omitempty" legend:"hf"`
TopFacet *string `xml:"top_facet" json:"topFacet,omitempty"`
TopFacetModifier *int `xml:"top_facet_modifier" json:"topFacetModifier,omitempty"`
TopFacetRating *int `xml:"top_facet_rating" json:"topFacetRating,omitempty"`
TopRelationshipFactor *string `xml:"top_relationship_factor" json:"topRelationshipFactor,omitempty"`
TopRelationshipModifier *int `xml:"top_relationship_modifier" json:"topRelationshipModifier,omitempty"`
TopRelationshipRating *int `xml:"top_relationship_rating" json:"topRelationshipRating,omitempty"`
TopValue *string `xml:"top_value" json:"topValue,omitempty"`
TopValueModifier *int `xml:"top_value_modifier" json:"topValueModifier,omitempty"`
TopValueRating *int `xml:"top_value_rating" json:"topValueRating,omitempty"`
Topic *string `xml:"topic" json:"topic,omitempty"`
TraderEntityId *int `xml:"trader_entity_id" json:"traderEntityId,omitempty" legend:"entity"`
TraderHfid *int `xml:"trader_hfid" json:"traderHfid,omitempty" legend:"hf"`
TricksterHfid *int `xml:"trickster_hfid" json:"tricksterHfid,omitempty" legend:"hf"`
UnitId *int `xml:"unit_id" json:"unitId,omitempty"`
UnitType *string `xml:"unit_type" json:"unitType,omitempty"`
WantedAndRecognized *string `xml:"wanted_and_recognized" json:"wantedAndRecognized,omitempty"`
WcId *int `xml:"wc_id" json:"wcId,omitempty" legend:"wc"`
Wcid *int `xml:"wcid" json:"wcid,omitempty" legend:"wc"`
WinnerHfid *int `xml:"winner_hfid" json:"winnerHfid,omitempty" legend:"hf"`
WoundeeHfid *int `xml:"woundee_hfid" json:"woundeeHfid,omitempty" legend:"hf"`
WounderHfid *int `xml:"wounder_hfid" json:"wounderHfid,omitempty" legend:"hf"`
WrongfulConviction *string `xml:"wrongful_conviction" json:"wrongfulConviction,omitempty"`
//OtherElements
}
func (r *HistoricalEvent) Id() int { return r.Id_ }
func (r *HistoricalEvent) Name() string { return r.Type() }
type EventObject struct {
Events []*HistoricalEvent `json:"events"`
}
func (r *EventObject) GetEvents() []*HistoricalEvent { return r.Events }
func (r *EventObject) SetEvents(events []*HistoricalEvent) { r.Events = events }
type HasEvents interface {
GetEvents() []*HistoricalEvent
SetEvents([]*HistoricalEvent)
}

File diff suppressed because it is too large Load diff

View file

@ -1,16 +1,13 @@
package df
package model
import (
"encoding/xml"
"fmt"
"legendsbrowser/util"
"os"
"github.com/robertjanetzko/LegendsBrowser2/backend/util"
)
// type DfWorld struct{}
// func parseDfWorld(d *xml.Decoder, start *xml.StartElement) (*DfWorld, error) { return nil, nil }
func (e *HistoricalEvent) Name() string { return "" }
func (e *HistoricalEventCollection) Name() string { return "" }
@ -38,20 +35,6 @@ func Parse(file string) (*DfWorld, error) {
}
}
}
// return nil, errors.New("Fehler!")
}
type Identifiable interface {
Id() int
}
type Parsable interface {
Parse(d *xml.Decoder, start *xml.StartElement) error
}
type IdentifiableParsable interface {
Identifiable
Parsable
}
func parseArray[T any](d *xml.Decoder, dest *[]T, creator func(*xml.Decoder, *xml.StartElement) (T, error)) {

View file

@ -1,293 +0,0 @@
package model
import (
"encoding/xml"
"errors"
"fmt"
"legendsbrowser/util"
"os"
"reflect"
"strings"
)
type World struct {
XMLName xml.Name `xml:"df_world"`
Name string `xml:"name"`
AltName string `xml:"altname"`
OtherElements
RegionMap map[int]*Region `xml:"regions>region"`
UndergroundRegionMap map[int]*UndergroundRegion `xml:"underground_regions>underground_region"`
LandmassMap map[int]*Landmass `xml:"landmasses>landmass"`
SiteMap map[int]*Site `xml:"sites>site"`
WorldConstructionMap map[int]*WorldConstruction `xml:"world_constructions>world_construction"`
ArtifactMap map[int]*Artifact `xml:"artifacts>artifact"`
HistoricalFigureMap map[int]*HistoricalFigure `xml:"historical_figures>historical_figure"`
HistoricalEventMap map[int]*HistoricalEvent `xml:"historical_events>historical_event"`
HistoricalEventCollectionMap map[int]*HistoricalEventCollection `xml:"historical_event_collections>historical_event_collection"`
HistoricalEraMap map[int]*HistoricalEra `xml:"historical_eras>historical_era"`
EntityMap map[int]*Entity `xml:"entities>entity"`
DanceFormMap map[int]*DanceForm `xml:"dance_forms>dance_form"`
MusicalFormMap map[int]*MusicalForm `xml:"musical_forms>musical_form"`
PoeticFormMap map[int]*PoeticForm `xml:"poetic_forms>poetic_form"`
WrittenContentMap map[int]*WrittenContent `xml:"written_contents>written_content"`
}
func (w *World) Load(file string) {
xmlFile, err := os.Open(file)
if err != nil {
fmt.Println(err)
}
fmt.Println("Successfully Opened users.xml")
defer xmlFile.Close()
converter := util.NewConvertReader(xmlFile)
// byteValue, _ := io.ReadAll(converter)
// fmt.Println(len(byteValue))
fillTypes(reflect.TypeOf(w))
fmt.Println(types["Region"])
d := xml.NewDecoder(converter)
parseObject(d, nil, reflect.ValueOf(w))
// err = xml.Unmarshal(byteValue, w)
// if err != nil {
// fmt.Println(err)
// }
fmt.Println("World loaded")
}
var types = make(map[string]map[string]reflect.StructField)
func fillTypes(t reflect.Type) {
if t.Kind() == reflect.Pointer {
t = t.Elem()
}
if t.Kind() != reflect.Struct {
return
}
fmt.Println(t.Name())
if _, ok := types[t.Name()]; ok {
return
}
info := make(map[string]reflect.StructField)
DeepFields(t, &info, make([]int, 0))
types[t.Name()] = info
}
func DeepFields(t reflect.Type, info *map[string]reflect.StructField, index []int) {
for i := 0; i < t.NumField(); i++ {
f := t.Field(i)
f.Index = append(index, f.Index[0])
if xml, ok := f.Tag.Lookup("xml"); ok {
if p := strings.Index(xml, ">"); p >= 0 {
(*info)[xml[0:p]] = f
} else {
for _, s := range strings.Split(xml, "|") {
(*info)[s] = f
}
}
if f.Type.Kind() == reflect.Map || f.Type.Kind() == reflect.Slice {
fillTypes(f.Type.Elem())
}
fmt.Println(i, f)
}
if f.Type.Kind() == reflect.Struct && f.Anonymous {
DeepFields(f.Type, info, f.Index)
}
}
}
func parseObject(d *xml.Decoder, start *xml.StartElement, val reflect.Value) error {
if start == nil {
for {
tok, err := d.Token()
if err != nil {
return err
}
if t, ok := tok.(xml.StartElement); ok {
start = &t
break
}
}
}
if val.Kind() == reflect.Pointer {
val = val.Elem()
}
typ, ok := types[val.Type().Name()]
if !ok {
d.Skip()
return nil
}
Loop:
for {
tok, err := d.Token()
if err != nil {
return err
}
switch t := tok.(type) {
case xml.StartElement:
if ty, ok := typ[t.Name.Local]; ok {
if ty.Type.Kind() == reflect.Map {
fmt.Println(" ", t.Name.Local, val.Type().Name(), ty)
f := val.Field(ty.Index[0])
if f.IsNil() {
f.Set(reflect.MakeMapWithSize(ty.Type, 0))
}
parseMap(d, ty, f)
}
} else {
d.Skip()
}
// parseObject(d, &t, val)
case xml.EndElement:
break Loop
}
}
return nil
}
func parseMap(d *xml.Decoder, field reflect.StructField, dest reflect.Value) error {
x, ok := field.Tag.Lookup("xml")
if !ok {
return errors.New("no xml tag")
}
elementName := strings.Split(x, ">")[1]
var lastStart *xml.StartElement
var id int
Loop:
for {
tok, err := d.Token()
if err != nil {
return err
}
switch t := tok.(type) {
case xml.StartElement:
if t.Name.Local == elementName {
lastStart = &t
id = -1
} else if t.Name.Local == "id" {
if id != -1 {
return errors.New("ID at invalid place")
}
d.DecodeElement(&id, &t)
obj := dest.MapIndex(reflect.ValueOf(id))
if !obj.IsValid() {
obj = reflect.New(field.Type.Elem().Elem())
dest.SetMapIndex(reflect.ValueOf(id), obj)
obj.Elem().FieldByIndex(types[obj.Type().Elem().Name()]["id"].Index).SetInt(int64(id))
}
d.DecodeElement(obj.Interface(), lastStart)
} else {
fmt.Println("SKIP", elementName, t.Name.Local)
d.Skip()
}
case xml.EndElement:
if t.Name.Local != elementName {
break Loop
}
}
}
return nil
}
func (w *World) Process() {
// w.RegionMap = make(map[int]*Region)
// mapObjects(&w.Regions, &w.RegionMap)
// w.UndergroundRegionMap = make(map[int]*UndergroundRegion)
// mapObjects(&w.UndergroundRegions, &w.UndergroundRegionMap)
// w.LandmassMap = make(map[int]*Landmass)
// mapObjects(&w.Landmasses, &w.LandmassMap)
// w.SiteMap = make(map[int]*Site)
// mapObjects(&w.Sites, &w.SiteMap)
// w.WorldConstructionMap = make(map[int]*WorldConstruction)
// mapObjects(&w.WorldConstructions, &w.WorldConstructionMap)
// w.ArtifactMap = make(map[int]*Artifact)
// mapObjects(&w.Artifacts, &w.ArtifactMap)
// w.HistoricalFigureMap = make(map[int]*HistoricalFigure)
// mapObjects(&w.HistoricalFigures, &w.HistoricalFigureMap)
// w.HistoricalEventMap = make(map[int]*HistoricalEvent)
// mapObjects(&w.HistoricalEvents, &w.HistoricalEventMap)
// w.HistoricalEventCollectionMap = make(map[int]*HistoricalEventCollection)
// mapObjects(&w.HistoricalEventCollections, &w.HistoricalEventCollectionMap)
// w.EntityMap = make(map[int]*Entity)
// mapObjects(&w.Entities, &w.EntityMap)
w.processEvents()
}
func (w *World) processEvents() {
legendFields := make(map[string][]int)
t := reflect.TypeOf(HistoricalEvent{})
for i := 0; i < t.NumField(); i++ {
f := t.Field(i)
l, ok := f.Tag.Lookup("legend")
if ok {
legendFields[l] = append(legendFields[l], i)
}
}
// for eventIndex := 0; eventIndex < len(w.HistoricalEvents); eventIndex++ {
// e := w.HistoricalEvents[eventIndex]
// v := reflect.ValueOf(*e)
// processEvent(e, &v, legendFields["entity"], &w.EntityMap)
// processEvent(e, &v, legendFields["site"], &w.SiteMap)
// processEvent(e, &v, legendFields["hf"], &w.HistoricalFigureMap)
// processEvent(e, &v, legendFields["artifact"], &w.ArtifactMap)
// // processEvent(e, &v, legendFields["wc"], &w.WorldConstructionMap)
// // processEvent(e, &v, legendFields["structure"], &w.St)
// }
}
func processEvent[T HasEvents](event *HistoricalEvent, v *reflect.Value, fields []int, objectMap *map[int]T) {
for _, i := range fields {
val := v.Field(i)
if !val.IsZero() {
switch val.Elem().Kind() {
case reflect.Slice:
ids := val.Interface().(*[]int)
for _, id := range *ids {
if x, ok := (*objectMap)[id]; ok {
x.SetEvents(append(x.GetEvents(), event))
}
}
case reflect.Int:
id := int(val.Elem().Int())
if x, ok := (*objectMap)[id]; ok {
x.SetEvents(append(x.GetEvents(), event))
}
default:
fmt.Println("unknown", val.Elem().Kind())
}
}
}
}
func mapObjects[T Identifiable](objects *[]T, objectMap *map[int]T) {
for i, obj := range *objects {
(*objectMap)[obj.Id()] = (*objects)[i]
}
}

View file

@ -3,11 +3,11 @@ package server
import (
"encoding/json"
"fmt"
"legendsbrowser/model"
"net/http"
"strconv"
"github.com/gorilla/mux"
"github.com/robertjanetzko/LegendsBrowser2/backend/model"
)
type Info struct {

File diff suppressed because it is too large Load diff

View file

@ -1,7 +0,0 @@
//go:build ignore
package main
// func main() {
// generate.Generate()
// }