code gen parsers
This commit is contained in:
parent
32f4b0537f
commit
687d9451f7
|
@ -3,3 +3,4 @@
|
||||||
legendsbrowser
|
legendsbrowser
|
||||||
*.pprof
|
*.pprof
|
||||||
*.png
|
*.png
|
||||||
|
/*.json
|
|
@ -19,59 +19,83 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func Analyze(filex string) {
|
func Analyze(filex string) {
|
||||||
files, err := filepath.Glob("*.xml")
|
fmt.Println("Search...", filex)
|
||||||
|
files, err := filepath.Glob(filex + "/*.xml")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
fmt.Println(files)
|
fmt.Println(files)
|
||||||
|
|
||||||
files = []string{filex}
|
|
||||||
|
|
||||||
a := NewAnalyzeData()
|
a := NewAnalyzeData()
|
||||||
|
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
|
analyze(file, a)
|
||||||
|
}
|
||||||
|
|
||||||
|
file, _ := json.MarshalIndent(a, "", " ")
|
||||||
|
_ = ioutil.WriteFile("analyze.json", file, 0644)
|
||||||
|
|
||||||
|
createMetadata(a)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Generate() {
|
||||||
|
data, err := ioutil.ReadFile("analyze.json")
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
a := NewAnalyzeData()
|
||||||
|
json.Unmarshal(data, a)
|
||||||
|
createMetadata(a)
|
||||||
|
}
|
||||||
|
|
||||||
|
type AnalyzeData struct {
|
||||||
|
Types map[string]bool
|
||||||
|
Fields map[string]bool
|
||||||
|
IsString map[string]bool
|
||||||
|
Multiple map[string]bool
|
||||||
|
Base map[string]bool
|
||||||
|
Plus map[string]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewAnalyzeData() *AnalyzeData {
|
||||||
|
return &AnalyzeData{
|
||||||
|
Types: make(map[string]bool, 0),
|
||||||
|
Fields: make(map[string]bool, 0),
|
||||||
|
IsString: make(map[string]bool, 0),
|
||||||
|
Multiple: make(map[string]bool, 0),
|
||||||
|
Base: make(map[string]bool, 0),
|
||||||
|
Plus: make(map[string]bool, 0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func analyze(file string, a *AnalyzeData) error {
|
||||||
xmlFile, err := os.Open(file)
|
xmlFile, err := os.Open(file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println(err)
|
fmt.Println(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
plus := strings.HasSuffix(file, "_plus.xml")
|
||||||
|
|
||||||
fmt.Println("Successfully Opened", file)
|
fmt.Println("Successfully Opened", file)
|
||||||
defer xmlFile.Close()
|
defer xmlFile.Close()
|
||||||
|
|
||||||
converter := util.NewConvertReader(xmlFile)
|
converter := util.NewConvertReader(xmlFile)
|
||||||
analyze(converter, a)
|
|
||||||
|
return analyzeElement(xml.NewDecoder(converter), a, make([]string, 0), plus)
|
||||||
|
}
|
||||||
|
|
||||||
|
const PATH_SEPARATOR = "|"
|
||||||
|
|
||||||
|
func analyzeElement(d *xml.Decoder, a *AnalyzeData, path []string, plus bool) error {
|
||||||
|
if len(path) > 1 {
|
||||||
|
s := strings.Join(path, PATH_SEPARATOR)
|
||||||
|
a.Fields[s] = true
|
||||||
|
if plus {
|
||||||
|
a.Plus[s] = true
|
||||||
|
} else {
|
||||||
|
a.Base[s] = true
|
||||||
}
|
}
|
||||||
|
|
||||||
createMetadata(a)
|
|
||||||
}
|
|
||||||
|
|
||||||
type analyzeData struct {
|
|
||||||
path []string
|
|
||||||
types *map[string]bool
|
|
||||||
fields *map[string]bool
|
|
||||||
isString *map[string]bool
|
|
||||||
multiple *map[string]bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewAnalyzeData() analyzeData {
|
|
||||||
path := make([]string, 0)
|
|
||||||
types := make(map[string]bool, 0)
|
|
||||||
fields := make(map[string]bool, 0)
|
|
||||||
isString := make(map[string]bool, 0)
|
|
||||||
multiple := make(map[string]bool, 0)
|
|
||||||
|
|
||||||
return analyzeData{
|
|
||||||
path: path,
|
|
||||||
types: &types,
|
|
||||||
fields: &fields,
|
|
||||||
isString: &isString,
|
|
||||||
multiple: &multiple,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func analyzeElement(d *xml.Decoder, a analyzeData) error {
|
|
||||||
if len(a.path) > 1 {
|
|
||||||
(*a.fields)[strings.Join(a.path, ">")] = true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -93,17 +117,16 @@ Loop:
|
||||||
case xml.StartElement:
|
case xml.StartElement:
|
||||||
value = false
|
value = false
|
||||||
|
|
||||||
(*a.types)[strings.Join(a.path, ">")] = true
|
a.Types[strings.Join(path, PATH_SEPARATOR)] = true
|
||||||
|
|
||||||
a2 := a
|
newPath := append(path, t.Name.Local)
|
||||||
a2.path = append(a.path, t.Name.Local)
|
|
||||||
|
|
||||||
if _, ok := fields[t.Name.Local]; ok {
|
if _, ok := fields[t.Name.Local]; ok {
|
||||||
(*a.multiple)[strings.Join(a2.path, ">")] = true
|
a.Multiple[strings.Join(newPath, PATH_SEPARATOR)] = true
|
||||||
}
|
}
|
||||||
fields[t.Name.Local] = true
|
fields[t.Name.Local] = true
|
||||||
|
|
||||||
analyzeElement(d, a2)
|
analyzeElement(d, a, newPath, plus)
|
||||||
|
|
||||||
case xml.CharData:
|
case xml.CharData:
|
||||||
data = append(data, t...)
|
data = append(data, t...)
|
||||||
|
@ -111,14 +134,13 @@ Loop:
|
||||||
case xml.EndElement:
|
case xml.EndElement:
|
||||||
if value {
|
if value {
|
||||||
if _, err := strconv.Atoi(string(data)); err != nil {
|
if _, err := strconv.Atoi(string(data)); err != nil {
|
||||||
(*a.isString)[strings.Join(a.path, ">")] = true
|
a.IsString[strings.Join(path, PATH_SEPARATOR)] = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if t.Name.Local == "type" {
|
// if t.Name.Local == "type" {
|
||||||
a.path[len(a.path)-2] = a.path[len(a.path)-2] + strcase.ToCamel(string(data))
|
// path[len(path)-2] = path[len(path)-2] + "+" + strcase.ToCamel(string(data))
|
||||||
fmt.Println(a.path)
|
// }
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -126,25 +148,49 @@ Loop:
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func analyze(r io.Reader, a analyzeData) error {
|
func filterSubtypes(data map[string]bool) []string {
|
||||||
d := xml.NewDecoder(r)
|
allowed := map[string]bool{
|
||||||
return analyzeElement(d, a)
|
"df_world|historical_events|historical_event": true,
|
||||||
|
"df_world|historical_event_collections|historical_event_collection": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
filtered := make(map[string]bool)
|
||||||
|
for k, v := range data {
|
||||||
|
if !v {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
path := strings.Split(k, PATH_SEPARATOR)
|
||||||
|
for index, seg := range path {
|
||||||
|
if strings.Contains(seg, "+") {
|
||||||
|
base := seg[:strings.Index(seg, "+")]
|
||||||
|
basePath := strings.Join(append(path[:index], base), PATH_SEPARATOR)
|
||||||
|
if allowed[basePath] {
|
||||||
|
path[index] = seg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
filtered[strings.Join(path, PATH_SEPARATOR)] = true
|
||||||
|
}
|
||||||
|
list := util.Keys(filtered)
|
||||||
|
sort.Strings(list)
|
||||||
|
return list
|
||||||
}
|
}
|
||||||
|
|
||||||
func createMetadata(a analyzeData) {
|
func createMetadata(a *AnalyzeData) {
|
||||||
ts := util.Keys(*a.types)
|
ts := filterSubtypes(a.Types)
|
||||||
sort.Strings(ts)
|
fs := filterSubtypes(a.Fields)
|
||||||
|
|
||||||
fs := util.Keys(*a.fields)
|
// for _, s := range fs {
|
||||||
sort.Strings(fs)
|
// fmt.Println(s)
|
||||||
|
// }
|
||||||
|
|
||||||
objects := make(map[string]Object, 0)
|
objects := make(map[string]Object, 0)
|
||||||
|
|
||||||
for _, k := range ts {
|
for _, k := range ts {
|
||||||
if ok, _ := isArray(k, fs); !ok {
|
if ok, _ := isArray(k, fs); !ok {
|
||||||
n := k
|
n := k
|
||||||
if strings.Contains(k, ">") {
|
if strings.Contains(k, PATH_SEPARATOR) {
|
||||||
n = k[strings.LastIndex(k, ">")+1:]
|
n = k[strings.LastIndex(k, PATH_SEPARATOR)+1:]
|
||||||
}
|
}
|
||||||
|
|
||||||
if n == "" {
|
if n == "" {
|
||||||
|
@ -153,48 +199,49 @@ func createMetadata(a analyzeData) {
|
||||||
|
|
||||||
objFields := make(map[string]Field, 0)
|
objFields := make(map[string]Field, 0)
|
||||||
|
|
||||||
fmt.Println("\n", n)
|
|
||||||
for _, f := range fs {
|
for _, f := range fs {
|
||||||
if strings.HasPrefix(f, k+">") {
|
if strings.HasPrefix(f, k+PATH_SEPARATOR) {
|
||||||
fn := f[len(k)+1:]
|
fn := f[len(k)+1:]
|
||||||
if !strings.Contains(fn, ">") {
|
if !strings.Contains(fn, PATH_SEPARATOR) {
|
||||||
fmt.Println(" ", fn)
|
legend := ""
|
||||||
|
if a.Base[f] && a.Plus[f] {
|
||||||
|
legend = "both"
|
||||||
|
} else if a.Base[f] {
|
||||||
|
legend = "base"
|
||||||
|
} else if a.Plus[f] {
|
||||||
|
legend = "plus"
|
||||||
|
}
|
||||||
|
|
||||||
if ok, elements := isArray(f, fs); ok {
|
field := Field{
|
||||||
el := elements[strings.LastIndex(elements, ">")+1:]
|
|
||||||
objFields[fn] = Field{
|
|
||||||
Name: strcase.ToCamel(fn),
|
|
||||||
Type: "array",
|
|
||||||
ElementType: &(el),
|
|
||||||
}
|
|
||||||
} else if ok, _ := isObject(f, fs); ok {
|
|
||||||
objFields[fn] = Field{
|
|
||||||
Name: strcase.ToCamel(fn),
|
|
||||||
Type: "object",
|
|
||||||
Multiple: (*a.multiple)[f],
|
|
||||||
}
|
|
||||||
} else if (*a.isString)[f] {
|
|
||||||
objFields[fn] = Field{
|
|
||||||
Name: strcase.ToCamel(fn),
|
|
||||||
Type: "string",
|
|
||||||
Multiple: (*a.multiple)[f],
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
objFields[fn] = Field{
|
|
||||||
Name: strcase.ToCamel(fn),
|
Name: strcase.ToCamel(fn),
|
||||||
Type: "int",
|
Type: "int",
|
||||||
Multiple: (*a.multiple)[f],
|
Multiple: a.Multiple[f],
|
||||||
|
Legend: legend,
|
||||||
}
|
}
|
||||||
|
if ok, elements := isArray(f, fs); ok {
|
||||||
|
el := elements[strings.LastIndex(elements, PATH_SEPARATOR)+1:]
|
||||||
|
fmt.Println(f + PATH_SEPARATOR + elements + PATH_SEPARATOR + "id")
|
||||||
|
if a.Fields[elements+PATH_SEPARATOR+"id"] {
|
||||||
|
field.Type = "map"
|
||||||
|
} else {
|
||||||
|
field.Type = "array"
|
||||||
}
|
}
|
||||||
|
field.ElementType = &(el)
|
||||||
|
} else if ok, _ := isObject(f, fs); ok {
|
||||||
|
field.Type = "object"
|
||||||
|
} else if a.IsString[f] {
|
||||||
|
field.Type = "string"
|
||||||
|
}
|
||||||
|
objFields[fn] = field
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
objects[n] = Object{
|
objects[n] = Object{
|
||||||
Name: strcase.ToCamel(n),
|
Name: strcase.ToCamel(n),
|
||||||
Id: (*a.fields)[k+">id"],
|
Id: a.Fields[k+PATH_SEPARATOR+"id"],
|
||||||
Named: (*a.fields)[k+">name"],
|
Named: a.Fields[k+PATH_SEPARATOR+"name"],
|
||||||
Typed: (*a.fields)[k+">type"],
|
Typed: a.Fields[k+PATH_SEPARATOR+"type"],
|
||||||
Fields: objFields,
|
Fields: objFields,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -203,7 +250,7 @@ func createMetadata(a analyzeData) {
|
||||||
file, _ := json.MarshalIndent(objects, "", " ")
|
file, _ := json.MarshalIndent(objects, "", " ")
|
||||||
_ = ioutil.WriteFile("model.json", file, 0644)
|
_ = ioutil.WriteFile("model.json", file, 0644)
|
||||||
|
|
||||||
f, err := os.Create("contributors.go")
|
f, err := os.Create("df/model.go")
|
||||||
defer f.Close()
|
defer f.Close()
|
||||||
|
|
||||||
err = packageTemplate.Execute(f, struct {
|
err = packageTemplate.Execute(f, struct {
|
||||||
|
@ -221,11 +268,11 @@ func isArray(typ string, types []string) (bool, string) {
|
||||||
elements := ""
|
elements := ""
|
||||||
|
|
||||||
for _, t := range types {
|
for _, t := range types {
|
||||||
if !strings.HasPrefix(t, typ+">") {
|
if !strings.HasPrefix(t, typ+PATH_SEPARATOR) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
f := t[len(typ)+1:]
|
f := t[len(typ)+1:]
|
||||||
if strings.Contains(f, ">") {
|
if strings.Contains(f, PATH_SEPARATOR) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
fc++
|
fc++
|
||||||
|
@ -238,7 +285,7 @@ func isObject(typ string, types []string) (bool, string) {
|
||||||
fc := 0
|
fc := 0
|
||||||
|
|
||||||
for _, t := range types {
|
for _, t := range types {
|
||||||
if !strings.HasPrefix(t, typ+">") {
|
if !strings.HasPrefix(t, typ+PATH_SEPARATOR) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
fc++
|
fc++
|
||||||
|
@ -259,6 +306,7 @@ type Field struct {
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
Multiple bool `json:"multiple,omitempty"`
|
Multiple bool `json:"multiple,omitempty"`
|
||||||
ElementType *string `json:"elements,omitempty"`
|
ElementType *string `json:"elements,omitempty"`
|
||||||
|
Legend string `json:"legend"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f Field) TypeLine(objects map[string]Object) string {
|
func (f Field) TypeLine(objects map[string]Object) string {
|
||||||
|
@ -274,17 +322,85 @@ func (f Field) TypeLine(objects map[string]Object) string {
|
||||||
}
|
}
|
||||||
t := f.Type
|
t := f.Type
|
||||||
if f.Type == "array" {
|
if f.Type == "array" {
|
||||||
|
t = "[]*" + objects[*f.ElementType].Name
|
||||||
|
}
|
||||||
|
if f.Type == "map" {
|
||||||
t = "map[int]*" + objects[*f.ElementType].Name
|
t = "map[int]*" + objects[*f.ElementType].Name
|
||||||
}
|
}
|
||||||
if f.Type == "object" {
|
if f.Type == "object" {
|
||||||
t = f.Name
|
t = f.Name
|
||||||
}
|
}
|
||||||
j := "`json:\"" + strcase.ToLowerCamel(f.Name) + "\"`"
|
j := fmt.Sprintf("`json:\"%s\" legend:\"%s\"`", strcase.ToLowerCamel(f.Name), f.Legend)
|
||||||
return fmt.Sprintf("%s %s%s %s", n, m, t, j)
|
return fmt.Sprintf("%s %s%s %s", n, m, t, j)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f Field) StartAction() string {
|
||||||
|
n := f.Name
|
||||||
|
|
||||||
|
if n == "Id" || n == "Name" {
|
||||||
|
n = n + "_"
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Type == "object" {
|
||||||
|
p := fmt.Sprintf("v := %s{}\nv.Parse(d, &t)", f.Name)
|
||||||
|
if !f.Multiple {
|
||||||
|
return fmt.Sprintf("%s\nobj.%s = v", p, n)
|
||||||
|
} else {
|
||||||
|
return fmt.Sprintf("%s\nobj.%s = append(obj.%s, v)", p, n, n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Type == "array" || f.Type == "map" {
|
||||||
|
el := strcase.ToCamel(*f.ElementType)
|
||||||
|
gen := fmt.Sprintf("New%s", el)
|
||||||
|
|
||||||
|
if f.Type == "array" {
|
||||||
|
return fmt.Sprintf("parseArray(d, &obj.%s, %s)", f.Name, gen)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Type == "map" {
|
||||||
|
return fmt.Sprintf("obj.%s = make(map[int]*%s)\nparseMap(d, &obj.%s, %s)", f.Name, el, f.Name, gen)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Type == "int" || f.Type == "string" {
|
||||||
|
return "data = nil"
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f Field) EndAction() string {
|
||||||
|
n := f.Name
|
||||||
|
|
||||||
|
if n == "Id" || n == "Name" {
|
||||||
|
n = n + "_"
|
||||||
|
}
|
||||||
|
|
||||||
|
if !f.Multiple {
|
||||||
|
if f.Type == "int" {
|
||||||
|
return fmt.Sprintf("obj.%s = n(data)", n)
|
||||||
|
} else if f.Type == "string" {
|
||||||
|
return fmt.Sprintf("obj.%s = string(data)", n)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if f.Type == "int" {
|
||||||
|
return fmt.Sprintf("obj.%s = append(obj.%s, n(data))", n, n)
|
||||||
|
} else if f.Type == "string" {
|
||||||
|
return fmt.Sprintf("obj.%s = append(obj.%s, string(data))", n, n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
var packageTemplate = template.Must(template.New("").Parse(`// Code generated by go generate; DO NOT EDIT.
|
var packageTemplate = template.Must(template.New("").Parse(`// Code generated by go generate; DO NOT EDIT.
|
||||||
package generate
|
package df
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/xml"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
{{- range $name, $obj := .Objects }}
|
{{- range $name, $obj := .Objects }}
|
||||||
type {{ $obj.Name }} struct {
|
type {{ $obj.Name }} struct {
|
||||||
|
@ -293,6 +409,7 @@ type {{ $obj.Name }} struct {
|
||||||
{{- end }}
|
{{- end }}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func New{{ $obj.Name }}() *{{ $obj.Name }} { return &{{ $obj.Name }}{} }
|
||||||
{{- if $obj.Id }}
|
{{- if $obj.Id }}
|
||||||
func (x *{{ $obj.Name }}) Id() int { return x.Id_ }
|
func (x *{{ $obj.Name }}) Id() int { return x.Id_ }
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
@ -300,5 +417,57 @@ func (x *{{ $obj.Name }}) Id() int { return x.Id_ }
|
||||||
func (x *{{ $obj.Name }}) Name() string { return x.Name_ }
|
func (x *{{ $obj.Name }}) Name() string { return x.Name_ }
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
// Parser
|
||||||
|
|
||||||
|
func n(d []byte) int {
|
||||||
|
v, _ := strconv.Atoi(string(d))
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
{{- range $name, $obj := .Objects }}
|
||||||
|
func (obj *{{ $obj.Name }}) Parse(d *xml.Decoder, start *xml.StartElement) error {
|
||||||
|
var data []byte
|
||||||
|
|
||||||
|
for {
|
||||||
|
tok, err := d.Token()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
switch t := tok.(type) {
|
||||||
|
case xml.StartElement:
|
||||||
|
switch t.Name.Local {
|
||||||
|
{{- range $fname, $field := $obj.Fields }}
|
||||||
|
case "{{ $fname }}":
|
||||||
|
{{ $field.StartAction }}
|
||||||
|
{{- end }}
|
||||||
|
default:
|
||||||
|
// fmt.Println("unknown field", t.Name.Local)
|
||||||
|
d.Skip()
|
||||||
|
}
|
||||||
|
|
||||||
|
case xml.CharData:
|
||||||
|
data = append(data, t...)
|
||||||
|
|
||||||
|
case xml.EndElement:
|
||||||
|
if t.Name.Local == start.Name.Local {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
switch t.Name.Local {
|
||||||
|
{{- range $fname, $field := $obj.Fields }}
|
||||||
|
case "{{ $fname }}":
|
||||||
|
{{ $field.EndAction }}
|
||||||
|
{{- end }}
|
||||||
|
default:
|
||||||
|
// fmt.Println("unknown field", t.Name.Local)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
{{- end }}
|
{{- end }}
|
||||||
`))
|
`))
|
||||||
|
|
12496
contributors.go
12496
contributors.go
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,90 @@
|
||||||
|
package df
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/xml"
|
||||||
|
"fmt"
|
||||||
|
"legendsbrowser/util"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
// type DfWorld struct{}
|
||||||
|
|
||||||
|
// func (x *DfWorld) Parse(d *xml.Decoder, start *xml.StartElement) {}
|
||||||
|
|
||||||
|
func Parse(file string) (*DfWorld, error) {
|
||||||
|
xmlFile, err := os.Open(file)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("Successfully Opened", file)
|
||||||
|
defer xmlFile.Close()
|
||||||
|
|
||||||
|
converter := util.NewConvertReader(xmlFile)
|
||||||
|
d := xml.NewDecoder(converter)
|
||||||
|
|
||||||
|
for {
|
||||||
|
tok, err := d.Token()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
switch t := tok.(type) {
|
||||||
|
case xml.StartElement:
|
||||||
|
if t.Name.Local == "df_world" {
|
||||||
|
w := DfWorld{}
|
||||||
|
w.Parse(d, &t)
|
||||||
|
return &w, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// return nil, errors.New("Fehler!")
|
||||||
|
}
|
||||||
|
|
||||||
|
type Identifiable interface {
|
||||||
|
Id() int
|
||||||
|
}
|
||||||
|
|
||||||
|
type Parsable interface {
|
||||||
|
Parse(d *xml.Decoder, start *xml.StartElement) error
|
||||||
|
}
|
||||||
|
|
||||||
|
type IdentifiableParsable interface {
|
||||||
|
Identifiable
|
||||||
|
Parsable
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseArray[T Parsable](d *xml.Decoder, dest *[]T, creator func() T) {
|
||||||
|
for {
|
||||||
|
tok, err := d.Token()
|
||||||
|
if err != nil {
|
||||||
|
return // nil, err
|
||||||
|
}
|
||||||
|
switch t := tok.(type) {
|
||||||
|
case xml.StartElement:
|
||||||
|
x := creator()
|
||||||
|
x.Parse(d, &t)
|
||||||
|
*dest = append(*dest, x)
|
||||||
|
|
||||||
|
case xml.EndElement:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseMap[T IdentifiableParsable](d *xml.Decoder, dest *map[int]T, creator func() T) {
|
||||||
|
for {
|
||||||
|
tok, err := d.Token()
|
||||||
|
if err != nil {
|
||||||
|
return // nil, err
|
||||||
|
}
|
||||||
|
switch t := tok.(type) {
|
||||||
|
case xml.StartElement:
|
||||||
|
x := creator()
|
||||||
|
x.Parse(d, &t)
|
||||||
|
(*dest)[x.Id()] = x
|
||||||
|
|
||||||
|
case xml.EndElement:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
76
main.go
76
main.go
|
@ -4,45 +4,57 @@ import (
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"legendsbrowser/analyze"
|
"legendsbrowser/analyze"
|
||||||
"legendsbrowser/model"
|
"legendsbrowser/df"
|
||||||
"legendsbrowser/server"
|
"legendsbrowser/server"
|
||||||
"net/http"
|
"net/http"
|
||||||
_ "net/http/pprof"
|
_ "net/http/pprof"
|
||||||
"os"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
|
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
"github.com/pkg/profile"
|
"github.com/pkg/profile"
|
||||||
)
|
)
|
||||||
|
|
||||||
var world model.World
|
var world *df.DfWorld
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
var a string
|
|
||||||
flag.StringVar(&a, "a", "", "analyze a file")
|
|
||||||
flag.Parse()
|
|
||||||
|
|
||||||
if len(a) == 0 {
|
|
||||||
fmt.Println("Usage: defaults.go -a")
|
|
||||||
flag.PrintDefaults()
|
|
||||||
os.Exit(1)
|
|
||||||
} else {
|
|
||||||
analyze.Analyze(a)
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
defer profile.Start(profile.MemProfile).Stop()
|
defer profile.Start(profile.MemProfile).Stop()
|
||||||
go func() {
|
go func() {
|
||||||
http.ListenAndServe(":8081", nil)
|
http.ListenAndServe(":8081", nil)
|
||||||
}()
|
}()
|
||||||
|
|
||||||
|
a := flag.String("a", "", "analyze a file")
|
||||||
|
g := flag.Bool("g", false, "generate model")
|
||||||
|
f := flag.String("f", "", "open a file")
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
if len(*a) > 0 {
|
||||||
|
analyze.Analyze(*a)
|
||||||
|
}
|
||||||
|
if *g {
|
||||||
|
fmt.Println("Generating")
|
||||||
|
analyze.Generate()
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(*f) > 0 {
|
||||||
|
w, err := df.Parse(*f)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// file, _ := json.MarshalIndent(w, "", " ")
|
||||||
|
// _ = ioutil.WriteFile("world.json", file, 0644)
|
||||||
|
|
||||||
|
world = w
|
||||||
|
}
|
||||||
|
|
||||||
fmt.Println("Hallo Welt!")
|
fmt.Println("Hallo Welt!")
|
||||||
|
|
||||||
// world.Load("region1-00152-01-01-legends_plus.xml")
|
// world.Load("region1-00152-01-01-legends_plus.xml")
|
||||||
world.Load("region2-00195-01-01-legends.xml")
|
// world.Load("region2-00195-01-01-legends.xml")
|
||||||
// world.Load("Agora-00033-01-01-legends_plus.xml")
|
// world.Load("Agora-00033-01-01-legends_plus.xml")
|
||||||
runtime.GC()
|
runtime.GC()
|
||||||
world.Process()
|
// world.Process()
|
||||||
|
|
||||||
// model.ListOtherElements("world", &[]*model.World{&world})
|
// model.ListOtherElements("world", &[]*model.World{&world})
|
||||||
// model.ListOtherElements("region", &world.Regions)
|
// model.ListOtherElements("region", &world.Regions)
|
||||||
|
@ -63,21 +75,21 @@ func main() {
|
||||||
|
|
||||||
router := mux.NewRouter().StrictSlash(true)
|
router := mux.NewRouter().StrictSlash(true)
|
||||||
|
|
||||||
server.RegisterResource(router, "region", world.RegionMap)
|
// server.RegisterResource(router, "region", world.RegionMap)
|
||||||
server.RegisterResource(router, "undergroundRegion", world.UndergroundRegionMap)
|
// server.RegisterResource(router, "undergroundRegion", world.UndergroundRegionMap)
|
||||||
server.RegisterResource(router, "landmass", world.LandmassMap)
|
// server.RegisterResource(router, "landmass", world.LandmassMap)
|
||||||
server.RegisterResource(router, "site", world.SiteMap)
|
// server.RegisterResource(router, "site", world.SiteMap)
|
||||||
server.RegisterResource(router, "worldConstruction", world.WorldConstructionMap)
|
// server.RegisterResource(router, "worldConstruction", world.WorldConstructionMap)
|
||||||
server.RegisterResource(router, "artifact", world.ArtifactMap)
|
// server.RegisterResource(router, "artifact", world.ArtifactMap)
|
||||||
server.RegisterResource(router, "hf", world.HistoricalFigureMap)
|
// server.RegisterResource(router, "hf", world.HistoricalFigureMap)
|
||||||
server.RegisterResource(router, "collection", world.HistoricalEventCollectionMap)
|
// server.RegisterResource(router, "collection", world.HistoricalEventCollectionMap)
|
||||||
server.RegisterResource(router, "entity", world.EntityMap)
|
// server.RegisterResource(router, "entity", world.EntityMap)
|
||||||
server.RegisterResource(router, "event", world.HistoricalEventMap)
|
// server.RegisterResource(router, "event", world.HistoricalEventMap)
|
||||||
server.RegisterResource(router, "era", world.HistoricalEraMap)
|
// server.RegisterResource(router, "era", world.HistoricalEraMap)
|
||||||
server.RegisterResource(router, "danceForm", world.DanceFormMap)
|
// server.RegisterResource(router, "danceForm", world.DanceFormMap)
|
||||||
server.RegisterResource(router, "musicalForm", world.MusicalFormMap)
|
// server.RegisterResource(router, "musicalForm", world.MusicalFormMap)
|
||||||
server.RegisterResource(router, "poeticForm", world.PoeticFormMap)
|
// server.RegisterResource(router, "poeticForm", world.PoeticFormMap)
|
||||||
server.RegisterResource(router, "written", world.WrittenContentMap)
|
// server.RegisterResource(router, "written", world.WrittenContentMap)
|
||||||
|
|
||||||
spa := server.SpaHandler{StaticPath: "frontend/dist/legendsbrowser", IndexPath: "index.html"}
|
spa := server.SpaHandler{StaticPath: "frontend/dist/legendsbrowser", IndexPath: "index.html"}
|
||||||
router.PathPrefix("/").Handler(spa)
|
router.PathPrefix("/").Handler(spa)
|
||||||
|
|
2633
model.json
2633
model.json
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue