codegen parser
This commit is contained in:
parent
5274a114bf
commit
06a89e467b
|
@ -56,6 +56,10 @@ type FieldData struct {
|
|||
Plus bool
|
||||
}
|
||||
|
||||
func NewFieldData() *FieldData {
|
||||
return &FieldData{}
|
||||
}
|
||||
|
||||
type AnalyzeData struct {
|
||||
// Types map[string]bool
|
||||
Fields map[string]*FieldData
|
||||
|
@ -99,11 +103,12 @@ const PATH_SEPARATOR = "|"
|
|||
func analyzeElement(d *xml.Decoder, a *AnalyzeData, path []string, plus bool) error {
|
||||
if len(path) > 1 {
|
||||
s := strings.Join(path, PATH_SEPARATOR)
|
||||
a.Fields[s] = true
|
||||
fd := NewFieldData()
|
||||
a.Fields[s] = fd
|
||||
if plus {
|
||||
a.Plus[s] = true
|
||||
fd.Plus = true
|
||||
} else {
|
||||
a.Base[s] = true
|
||||
fd.Base = true
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -126,12 +131,12 @@ Loop:
|
|||
case xml.StartElement:
|
||||
value = false
|
||||
|
||||
a.Types[strings.Join(path, PATH_SEPARATOR)] = true
|
||||
// a.Types[strings.Join(path, PATH_SEPARATOR)] = true
|
||||
|
||||
newPath := append(path, t.Name.Local)
|
||||
|
||||
if _, ok := fields[t.Name.Local]; ok {
|
||||
a.Multiple[strings.Join(newPath, PATH_SEPARATOR)] = true
|
||||
a.Fields[strings.Join(newPath, PATH_SEPARATOR)].Multiple = true
|
||||
}
|
||||
fields[t.Name.Local] = true
|
||||
|
||||
|
@ -143,7 +148,7 @@ Loop:
|
|||
case xml.EndElement:
|
||||
if value {
|
||||
if _, err := strconv.Atoi(string(data)); err != nil {
|
||||
a.IsString[strings.Join(path, PATH_SEPARATOR)] = true
|
||||
a.Fields[strings.Join(path, PATH_SEPARATOR)].IsString = true
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -157,37 +162,74 @@ Loop:
|
|||
return nil
|
||||
}
|
||||
|
||||
func filterSubtypes(data map[string]bool) []string {
|
||||
allowed := map[string]bool{
|
||||
"df_world|historical_events|historical_event": true,
|
||||
"df_world|historical_event_collections|historical_event_collection": true,
|
||||
}
|
||||
var allowedTyped = map[string]bool{
|
||||
"df_world|historical_events|historical_event": true,
|
||||
"df_world|historical_event_collections|historical_event_collection": true,
|
||||
}
|
||||
|
||||
filtered := make(map[string]bool)
|
||||
for k, v := range data {
|
||||
if !v {
|
||||
continue
|
||||
}
|
||||
func filterSubtypes(data *map[string]*FieldData) []string {
|
||||
filtered := make(map[string]*FieldData)
|
||||
for k, v := range *data {
|
||||
path := strings.Split(k, PATH_SEPARATOR)
|
||||
for index, seg := range path {
|
||||
if strings.Contains(seg, "+") {
|
||||
base := seg[:strings.Index(seg, "+")]
|
||||
basePath := strings.Join(append(path[:index], base), PATH_SEPARATOR)
|
||||
if allowed[basePath] {
|
||||
if allowedTyped[basePath] {
|
||||
path[index] = seg
|
||||
}
|
||||
}
|
||||
}
|
||||
filtered[strings.Join(path, PATH_SEPARATOR)] = true
|
||||
filtered[strings.Join(path, PATH_SEPARATOR)] = v
|
||||
}
|
||||
*data = filtered
|
||||
list := util.Keys(filtered)
|
||||
sort.Strings(list)
|
||||
return list
|
||||
}
|
||||
|
||||
func getSubtypes(objectTypes []string, k string) *[]string {
|
||||
subtypes := make(map[string]bool)
|
||||
|
||||
for _, t := range objectTypes {
|
||||
if strings.HasPrefix(t, k+"+") && !strings.Contains(t[len(k):], PATH_SEPARATOR) {
|
||||
subtypes[t[strings.LastIndex(t, "+")+1:]] = true
|
||||
}
|
||||
}
|
||||
|
||||
keys := util.Keys(subtypes)
|
||||
sort.Strings(keys)
|
||||
|
||||
if len(keys) > 0 {
|
||||
return &keys
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getSubtypeOf(k string) *string {
|
||||
if strings.Contains(k, PATH_SEPARATOR) {
|
||||
last := k[strings.LastIndex(k, PATH_SEPARATOR)+1:]
|
||||
if strings.Contains(last, "+") {
|
||||
base := strcase.ToCamel(last[:strings.Index(last, "+")])
|
||||
return &base
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func createMetadata(a *AnalyzeData) {
|
||||
ts := filterSubtypes(a.Types)
|
||||
fs := filterSubtypes(a.Fields)
|
||||
|
||||
// ts := filterSubtypes(a.Types)
|
||||
fs := filterSubtypes(&a.Fields)
|
||||
|
||||
var objectTypes []string
|
||||
for k := range a.Fields {
|
||||
path := strings.Split(k, PATH_SEPARATOR)
|
||||
if len(path) >= 2 {
|
||||
objectTypes = append(objectTypes, strings.Join(path[:len(path)-1], PATH_SEPARATOR))
|
||||
}
|
||||
}
|
||||
|
||||
// for _, s := range fs {
|
||||
// fmt.Println(s)
|
||||
|
@ -195,7 +237,7 @@ func createMetadata(a *AnalyzeData) {
|
|||
|
||||
objects := make(map[string]Object, 0)
|
||||
|
||||
for _, k := range ts {
|
||||
for _, k := range objectTypes {
|
||||
if ok, _ := isArray(k, fs); !ok {
|
||||
n := k
|
||||
if strings.Contains(k, PATH_SEPARATOR) {
|
||||
|
@ -213,24 +255,23 @@ func createMetadata(a *AnalyzeData) {
|
|||
fn := f[len(k)+1:]
|
||||
if !strings.Contains(fn, PATH_SEPARATOR) {
|
||||
legend := ""
|
||||
if a.Base[f] && a.Plus[f] {
|
||||
if a.Fields[f].Base && a.Fields[f].Plus {
|
||||
legend = "both"
|
||||
} else if a.Base[f] {
|
||||
} else if a.Fields[f].Base {
|
||||
legend = "base"
|
||||
} else if a.Plus[f] {
|
||||
} else if a.Fields[f].Plus {
|
||||
legend = "plus"
|
||||
}
|
||||
|
||||
field := Field{
|
||||
Name: strcase.ToCamel(fn),
|
||||
Type: "int",
|
||||
Multiple: a.Multiple[f],
|
||||
Multiple: a.Fields[f].Multiple,
|
||||
Legend: legend,
|
||||
}
|
||||
if ok, elements := isArray(f, fs); ok {
|
||||
el := elements[strings.LastIndex(elements, PATH_SEPARATOR)+1:]
|
||||
fmt.Println(f + PATH_SEPARATOR + elements + PATH_SEPARATOR + "id")
|
||||
if a.Fields[elements+PATH_SEPARATOR+"id"] {
|
||||
if _, ok := a.Fields[elements+PATH_SEPARATOR+"id"]; ok {
|
||||
field.Type = "map"
|
||||
} else {
|
||||
field.Type = "array"
|
||||
|
@ -238,7 +279,7 @@ func createMetadata(a *AnalyzeData) {
|
|||
field.ElementType = &(el)
|
||||
} else if ok, _ := isObject(f, fs); ok {
|
||||
field.Type = "object"
|
||||
} else if a.IsString[f] {
|
||||
} else if a.Fields[f].IsString {
|
||||
field.Type = "string"
|
||||
}
|
||||
objFields[fn] = field
|
||||
|
@ -247,11 +288,13 @@ func createMetadata(a *AnalyzeData) {
|
|||
}
|
||||
|
||||
objects[n] = Object{
|
||||
Name: strcase.ToCamel(n),
|
||||
Id: a.Fields[k+PATH_SEPARATOR+"id"],
|
||||
Named: a.Fields[k+PATH_SEPARATOR+"name"],
|
||||
Typed: a.Fields[k+PATH_SEPARATOR+"type"],
|
||||
Fields: objFields,
|
||||
Name: strcase.ToCamel(n),
|
||||
Id: a.Fields[k+PATH_SEPARATOR+"id"] != nil,
|
||||
Named: a.Fields[k+PATH_SEPARATOR+"name"] != nil,
|
||||
Typed: a.Fields[k+PATH_SEPARATOR+"type"] != nil,
|
||||
SubTypes: getSubtypes(objectTypes, k),
|
||||
SubTypeOf: getSubtypeOf(k),
|
||||
Fields: objFields,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -276,12 +319,15 @@ func isArray(typ string, types []string) (bool, string) {
|
|||
fc := 0
|
||||
elements := ""
|
||||
|
||||
if !strings.Contains(typ, PATH_SEPARATOR) || strings.Contains(typ[strings.LastIndex(typ, PATH_SEPARATOR):], "+") {
|
||||
return false, ""
|
||||
}
|
||||
|
||||
for _, t := range types {
|
||||
if !strings.HasPrefix(t, typ+PATH_SEPARATOR) {
|
||||
continue
|
||||
}
|
||||
f := t[len(typ)+1:]
|
||||
if strings.Contains(f, PATH_SEPARATOR) {
|
||||
if strings.Contains(t[len(typ)+1:], PATH_SEPARATOR) {
|
||||
continue
|
||||
}
|
||||
fc++
|
||||
|
@ -303,11 +349,13 @@ func isObject(typ string, types []string) (bool, string) {
|
|||
}
|
||||
|
||||
type Object struct {
|
||||
Name string `json:"name"`
|
||||
Id bool `json:"id,omitempty"`
|
||||
Named bool `json:"named,omitempty"`
|
||||
Typed bool `json:"typed,omitempty"`
|
||||
Fields map[string]Field `json:"fields"`
|
||||
Name string `json:"name"`
|
||||
Id bool `json:"id,omitempty"`
|
||||
Named bool `json:"named,omitempty"`
|
||||
Typed bool `json:"typed,omitempty"`
|
||||
SubTypes *[]string `json:"subtypes,omitempty"`
|
||||
SubTypeOf *string `json:"subtypeof,omitempty"`
|
||||
Fields map[string]Field `json:"fields"`
|
||||
}
|
||||
|
||||
type Field struct {
|
||||
|
@ -337,7 +385,7 @@ func (f Field) TypeLine(objects map[string]Object) string {
|
|||
t = "map[int]*" + objects[*f.ElementType].Name
|
||||
}
|
||||
if f.Type == "object" {
|
||||
t = f.Name
|
||||
t = "*" + f.Name
|
||||
}
|
||||
j := fmt.Sprintf("`json:\"%s\" legend:\"%s\"`", strcase.ToLowerCamel(f.Name), f.Legend)
|
||||
return fmt.Sprintf("%s %s%s %s", n, m, t, j)
|
||||
|
@ -351,7 +399,7 @@ func (f Field) StartAction() string {
|
|||
}
|
||||
|
||||
if f.Type == "object" {
|
||||
p := fmt.Sprintf("v := %s{}\nv.Parse(d, &t)", f.Name)
|
||||
p := fmt.Sprintf("v, _ := parse%s(d, &t)", f.Name)
|
||||
if !f.Multiple {
|
||||
return fmt.Sprintf("%s\nobj.%s = v", p, n)
|
||||
} else {
|
||||
|
@ -361,7 +409,7 @@ func (f Field) StartAction() string {
|
|||
|
||||
if f.Type == "array" || f.Type == "map" {
|
||||
el := strcase.ToCamel(*f.ElementType)
|
||||
gen := fmt.Sprintf("New%s", el)
|
||||
gen := fmt.Sprintf("parse%s", el)
|
||||
|
||||
if f.Type == "array" {
|
||||
return fmt.Sprintf("parseArray(d, &obj.%s, %s)", f.Name, gen)
|
||||
|
@ -409,25 +457,27 @@ package df
|
|||
import (
|
||||
"encoding/xml"
|
||||
"strconv"
|
||||
"github.com/iancoleman/strcase"
|
||||
)
|
||||
|
||||
{{- range $name, $obj := .Objects }}
|
||||
type {{ $obj.Name }} struct {
|
||||
{{- range $fname, $field := $obj.Fields }}
|
||||
{{- if not (and (eq $fname "type") (not (not $obj.SubTypes))) }}
|
||||
{{ $field.TypeLine $.Objects }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if not (not $obj.SubTypes) }}
|
||||
Details any
|
||||
{{- end }}
|
||||
}
|
||||
|
||||
func New{{ $obj.Name }}() *{{ $obj.Name }} { return &{{ $obj.Name }}{} }
|
||||
{{- if $obj.Id }}
|
||||
func (x *{{ $obj.Name }}) Id() int { return x.Id_ }
|
||||
{{- end }}
|
||||
{{- if $obj.Named }}
|
||||
func (x *{{ $obj.Name }}) Name() string { return x.Name_ }
|
||||
{{- end }}
|
||||
|
||||
|
||||
|
||||
{{- end }}
|
||||
|
||||
// Parser
|
||||
|
@ -438,13 +488,15 @@ func n(d []byte) int {
|
|||
}
|
||||
|
||||
{{- range $name, $obj := .Objects }}
|
||||
func (obj *{{ $obj.Name }}) Parse(d *xml.Decoder, start *xml.StartElement) error {
|
||||
var data []byte
|
||||
|
||||
func parse{{ $obj.Name }}(d *xml.Decoder, start *xml.StartElement) (*{{ $obj.Name }}, error) {
|
||||
var (
|
||||
obj = {{ $obj.Name }}{}
|
||||
data []byte
|
||||
)
|
||||
for {
|
||||
tok, err := d.Token()
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
switch t := tok.(type) {
|
||||
case xml.StartElement:
|
||||
|
@ -463,13 +515,29 @@ func (obj *{{ $obj.Name }}) Parse(d *xml.Decoder, start *xml.StartElement) error
|
|||
|
||||
case xml.EndElement:
|
||||
if t.Name.Local == start.Name.Local {
|
||||
return nil
|
||||
return &obj, nil
|
||||
}
|
||||
|
||||
switch t.Name.Local {
|
||||
{{- range $fname, $field := $obj.Fields }}
|
||||
case "{{ $fname }}":
|
||||
{{- if and (eq $fname "type") (not (not $obj.SubTypes)) }}
|
||||
var err error
|
||||
switch strcase.ToCamel(string(data)) {
|
||||
{{- range $sub := $obj.SubTypes }}
|
||||
case "{{ $sub }}":
|
||||
obj.Details, err = parse{{ $obj.Name }}{{ $sub }}(d, start)
|
||||
{{- end }}
|
||||
default:
|
||||
d.Skip()
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &obj, nil
|
||||
{{- else }}
|
||||
{{ $field.EndAction }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
default:
|
||||
// fmt.Println("unknown field", t.Name.Local)
|
||||
|
@ -477,6 +545,5 @@ func (obj *{{ $obj.Name }}) Parse(d *xml.Decoder, start *xml.StartElement) error
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
{{- end }}
|
||||
`))
|
||||
|
|
11037
df/model.go
11037
df/model.go
File diff suppressed because it is too large
Load Diff
19
df/parse.go
19
df/parse.go
|
@ -9,7 +9,10 @@ import (
|
|||
|
||||
// type DfWorld struct{}
|
||||
|
||||
// func (x *DfWorld) Parse(d *xml.Decoder, start *xml.StartElement) {}
|
||||
// func parseDfWorld(d *xml.Decoder, start *xml.StartElement) (*DfWorld, error) { return nil, nil }
|
||||
|
||||
func (e *HistoricalEvent) Name() string { return "" }
|
||||
func (e *HistoricalEventCollection) Name() string { return "" }
|
||||
|
||||
func Parse(file string) (*DfWorld, error) {
|
||||
xmlFile, err := os.Open(file)
|
||||
|
@ -31,9 +34,7 @@ func Parse(file string) (*DfWorld, error) {
|
|||
switch t := tok.(type) {
|
||||
case xml.StartElement:
|
||||
if t.Name.Local == "df_world" {
|
||||
w := DfWorld{}
|
||||
w.Parse(d, &t)
|
||||
return &w, nil
|
||||
return parseDfWorld(d, &t)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -53,7 +54,7 @@ type IdentifiableParsable interface {
|
|||
Parsable
|
||||
}
|
||||
|
||||
func parseArray[T Parsable](d *xml.Decoder, dest *[]T, creator func() T) {
|
||||
func parseArray[T any](d *xml.Decoder, dest *[]T, creator func(*xml.Decoder, *xml.StartElement) (T, error)) {
|
||||
for {
|
||||
tok, err := d.Token()
|
||||
if err != nil {
|
||||
|
@ -61,8 +62,7 @@ func parseArray[T Parsable](d *xml.Decoder, dest *[]T, creator func() T) {
|
|||
}
|
||||
switch t := tok.(type) {
|
||||
case xml.StartElement:
|
||||
x := creator()
|
||||
x.Parse(d, &t)
|
||||
x, _ := creator(d, &t)
|
||||
*dest = append(*dest, x)
|
||||
|
||||
case xml.EndElement:
|
||||
|
@ -71,7 +71,7 @@ func parseArray[T Parsable](d *xml.Decoder, dest *[]T, creator func() T) {
|
|||
}
|
||||
}
|
||||
|
||||
func parseMap[T IdentifiableParsable](d *xml.Decoder, dest *map[int]T, creator func() T) {
|
||||
func parseMap[T Identifiable](d *xml.Decoder, dest *map[int]T, creator func(*xml.Decoder, *xml.StartElement) (T, error)) {
|
||||
for {
|
||||
tok, err := d.Token()
|
||||
if err != nil {
|
||||
|
@ -79,8 +79,7 @@ func parseMap[T IdentifiableParsable](d *xml.Decoder, dest *map[int]T, creator f
|
|||
}
|
||||
switch t := tok.(type) {
|
||||
case xml.StartElement:
|
||||
x := creator()
|
||||
x.Parse(d, &t)
|
||||
x, _ := creator(d, &t)
|
||||
(*dest)[x.Id()] = x
|
||||
|
||||
case xml.EndElement:
|
||||
|
|
109
main.go
109
main.go
|
@ -8,6 +8,7 @@ import (
|
|||
"legendsbrowser/server"
|
||||
"net/http"
|
||||
_ "net/http/pprof"
|
||||
"os"
|
||||
"runtime"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
|
@ -17,12 +18,6 @@ import (
|
|||
var world *df.DfWorld
|
||||
|
||||
func main() {
|
||||
|
||||
defer profile.Start(profile.MemProfile).Stop()
|
||||
go func() {
|
||||
http.ListenAndServe(":8081", nil)
|
||||
}()
|
||||
|
||||
a := flag.String("a", "", "analyze a file")
|
||||
g := flag.Bool("g", false, "generate model")
|
||||
f := flag.String("f", "", "open a file")
|
||||
|
@ -37,63 +32,63 @@ func main() {
|
|||
}
|
||||
|
||||
if len(*f) > 0 {
|
||||
defer profile.Start(profile.MemProfile).Stop()
|
||||
go func() {
|
||||
http.ListenAndServe(":8081", nil)
|
||||
}()
|
||||
|
||||
w, err := df.Parse(*f)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// file, _ := json.MarshalIndent(w, "", " ")
|
||||
// _ = ioutil.WriteFile("world.json", file, 0644)
|
||||
|
||||
world = w
|
||||
|
||||
fmt.Println("Hallo Welt!")
|
||||
runtime.GC()
|
||||
// world.Process()
|
||||
|
||||
// model.ListOtherElements("world", &[]*model.World{&world})
|
||||
// model.ListOtherElements("region", &world.Regions)
|
||||
// model.ListOtherElements("underground regions", &world.UndergroundRegions)
|
||||
// model.ListOtherElements("landmasses", &world.Landmasses)
|
||||
// model.ListOtherElements("sites", &world.Sites)
|
||||
// model.ListOtherElements("world constructions", &world.WorldConstructions)
|
||||
// model.ListOtherElements("artifacts", &world.Artifacts)
|
||||
// model.ListOtherElements("entities", &world.Entities)
|
||||
// model.ListOtherElements("hf", &world.HistoricalFigures)
|
||||
// model.ListOtherElements("events", &world.HistoricalEvents)
|
||||
// model.ListOtherElements("collections", &world.HistoricalEventCollections)
|
||||
// model.ListOtherElements("era", &world.HistoricalEras)
|
||||
// model.ListOtherElements("danceForm", &world.DanceForms)
|
||||
// model.ListOtherElements("musicalForm", &world.MusicalForms)
|
||||
// model.ListOtherElements("poeticForm", &world.PoeticForms)
|
||||
// model.ListOtherElements("written", &world.WrittenContents)
|
||||
|
||||
router := mux.NewRouter().StrictSlash(true)
|
||||
|
||||
server.RegisterResource(router, "region", world.Regions)
|
||||
// server.RegisterResource(router, "undergroundRegion", world.UndergroundRegions)
|
||||
server.RegisterResource(router, "landmass", world.Landmasses)
|
||||
server.RegisterResource(router, "site", world.Sites)
|
||||
server.RegisterResource(router, "worldConstruction", world.WorldConstructions)
|
||||
server.RegisterResource(router, "artifact", world.Artifacts)
|
||||
server.RegisterResource(router, "hf", world.HistoricalFigures)
|
||||
server.RegisterResource(router, "collection", world.HistoricalEventCollections)
|
||||
server.RegisterResource(router, "entity", world.Entities)
|
||||
server.RegisterResource(router, "event", world.HistoricalEvents)
|
||||
// server.RegisterResource(router, "era", world.HistoricalEras)
|
||||
server.RegisterResource(router, "danceForm", world.DanceForms)
|
||||
server.RegisterResource(router, "musicalForm", world.MusicalForms)
|
||||
server.RegisterResource(router, "poeticForm", world.PoeticForms)
|
||||
// server.RegisterResource(router, "written", world.WrittenContents)
|
||||
|
||||
spa := server.SpaHandler{StaticPath: "frontend/dist/legendsbrowser", IndexPath: "index.html"}
|
||||
router.PathPrefix("/").Handler(spa)
|
||||
|
||||
fmt.Println("Serving at :8080")
|
||||
http.ListenAndServe(":8080", router)
|
||||
}
|
||||
|
||||
fmt.Println("Hallo Welt!")
|
||||
|
||||
// world.Load("region1-00152-01-01-legends_plus.xml")
|
||||
// world.Load("region2-00195-01-01-legends.xml")
|
||||
// world.Load("Agora-00033-01-01-legends_plus.xml")
|
||||
runtime.GC()
|
||||
// world.Process()
|
||||
|
||||
// model.ListOtherElements("world", &[]*model.World{&world})
|
||||
// model.ListOtherElements("region", &world.Regions)
|
||||
// model.ListOtherElements("underground regions", &world.UndergroundRegions)
|
||||
// model.ListOtherElements("landmasses", &world.Landmasses)
|
||||
// model.ListOtherElements("sites", &world.Sites)
|
||||
// model.ListOtherElements("world constructions", &world.WorldConstructions)
|
||||
// model.ListOtherElements("artifacts", &world.Artifacts)
|
||||
// model.ListOtherElements("entities", &world.Entities)
|
||||
// model.ListOtherElements("hf", &world.HistoricalFigures)
|
||||
// model.ListOtherElements("events", &world.HistoricalEvents)
|
||||
// model.ListOtherElements("collections", &world.HistoricalEventCollections)
|
||||
// model.ListOtherElements("era", &world.HistoricalEras)
|
||||
// model.ListOtherElements("danceForm", &world.DanceForms)
|
||||
// model.ListOtherElements("musicalForm", &world.MusicalForms)
|
||||
// model.ListOtherElements("poeticForm", &world.PoeticForms)
|
||||
// model.ListOtherElements("written", &world.WrittenContents)
|
||||
|
||||
router := mux.NewRouter().StrictSlash(true)
|
||||
|
||||
// server.RegisterResource(router, "region", world.RegionMap)
|
||||
// server.RegisterResource(router, "undergroundRegion", world.UndergroundRegionMap)
|
||||
// server.RegisterResource(router, "landmass", world.LandmassMap)
|
||||
// server.RegisterResource(router, "site", world.SiteMap)
|
||||
// server.RegisterResource(router, "worldConstruction", world.WorldConstructionMap)
|
||||
// server.RegisterResource(router, "artifact", world.ArtifactMap)
|
||||
// server.RegisterResource(router, "hf", world.HistoricalFigureMap)
|
||||
// server.RegisterResource(router, "collection", world.HistoricalEventCollectionMap)
|
||||
// server.RegisterResource(router, "entity", world.EntityMap)
|
||||
// server.RegisterResource(router, "event", world.HistoricalEventMap)
|
||||
// server.RegisterResource(router, "era", world.HistoricalEraMap)
|
||||
// server.RegisterResource(router, "danceForm", world.DanceFormMap)
|
||||
// server.RegisterResource(router, "musicalForm", world.MusicalFormMap)
|
||||
// server.RegisterResource(router, "poeticForm", world.PoeticFormMap)
|
||||
// server.RegisterResource(router, "written", world.WrittenContentMap)
|
||||
|
||||
spa := server.SpaHandler{StaticPath: "frontend/dist/legendsbrowser", IndexPath: "index.html"}
|
||||
router.PathPrefix("/").Handler(spa)
|
||||
|
||||
fmt.Println("Serving at :8080")
|
||||
http.ListenAndServe(":8080", router)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue