diff options
author | Ondrej Fabry <ofabry@cisco.com> | 2020-07-17 10:36:28 +0200 |
---|---|---|
committer | Ondrej Fabry <ofabry@cisco.com> | 2020-07-17 11:43:41 +0200 |
commit | d1f24d37bd447b64e402298bb8eb2479681facf9 (patch) | |
tree | a3fc21ba730a91d8a402c7a5bf9c614e3677c4fc /binapigen | |
parent | 1548c7e12531e3d055567d761c580a1c7ff0ac40 (diff) |
Improve binapi generator
- simplified Size/Marshal/Unmarshal methods
- replace struc in unions with custom marshal/unmarshal
- fix imports in generated files
- fix mock adapter
- generate rpc service using low-level stream API (dumps generate control ping or stream msg..)
- move examples/binapi to binapi and generate all API for latest release
- add binapigen.Plugin for developing custom generator plugins
- optionally generate HTTP handlers (REST API) for RPC services
- add govpp program for browsing VPP API
Change-Id: I092e9ed2b0c17972b3476463c3d4b14dd76ed42b
Signed-off-by: Ondrej Fabry <ofabry@cisco.com>
Diffstat (limited to 'binapigen')
-rw-r--r-- | binapigen/binapigen.go | 572 | ||||
-rw-r--r-- | binapigen/binapigen_test.go | 55 | ||||
-rw-r--r-- | binapigen/gen_encoding.go | 375 | ||||
-rw-r--r-- | binapigen/gen_helpers.go | 348 | ||||
-rw-r--r-- | binapigen/gen_helpers_test.go | 156 | ||||
-rw-r--r-- | binapigen/gen_rest.go | 103 | ||||
-rw-r--r-- | binapigen/gen_rpc.go | 204 | ||||
-rw-r--r-- | binapigen/generate.go | 1518 | ||||
-rw-r--r-- | binapigen/generate_rpc.go | 203 | ||||
-rw-r--r-- | binapigen/generate_test.go | 175 | ||||
-rw-r--r-- | binapigen/generator.go | 425 | ||||
-rw-r--r-- | binapigen/generator_test.go | 76 | ||||
-rw-r--r-- | binapigen/plugin.go | 51 | ||||
-rw-r--r-- | binapigen/run.go | 132 | ||||
-rw-r--r-- | binapigen/types.go | 267 | ||||
-rw-r--r-- | binapigen/validate.go | 67 | ||||
-rw-r--r-- | binapigen/vppapi.go | 211 | ||||
-rw-r--r-- | binapigen/vppapi/api.go | 94 | ||||
-rw-r--r-- | binapigen/vppapi/api_schema.go | 89 | ||||
-rw-r--r-- | binapigen/vppapi/parse_json.go | 210 | ||||
-rw-r--r-- | binapigen/vppapi/util.go | 112 | ||||
-rw-r--r-- | binapigen/vppapi/vppapi.go (renamed from binapigen/vppapi/parser.go) | 49 | ||||
-rw-r--r-- | binapigen/vppapi/vppapi_test.go (renamed from binapigen/vppapi/parser_test.go) | 22 |
23 files changed, 3014 insertions, 2500 deletions
diff --git a/binapigen/binapigen.go b/binapigen/binapigen.go index c5a976b..1b4c7e5 100644 --- a/binapigen/binapigen.go +++ b/binapigen/binapigen.go @@ -17,85 +17,123 @@ package binapigen import ( "fmt" "path" - "sort" "strings" "git.fd.io/govpp.git/binapigen/vppapi" ) +// generatedCodeVersion indicates a version of the generated code. +// It is incremented whenever an incompatibility between the generated code and +// GoVPP api package is introduced; the generated code references +// a constant, api.GoVppAPIPackageIsVersionN (where N is generatedCodeVersion). +const generatedCodeVersion = 2 + +// file options +const ( + optFileVersion = "version" +) + type File struct { - vppapi.File + Desc vppapi.File - Generate bool + Generate bool + FilenamePrefix string + PackageName GoPackageName + GoImportPath GoImportPath - PackageName string - Imports []string + Version string + Imports []string - Enums []*Enum - Unions []*Union - Structs []*Struct - Aliases []*Alias - Messages []*Message + Enums []*Enum + Unions []*Union + Structs []*Struct + Aliases []*Alias - imports map[string]string - refmap map[string]string + Messages []*Message + Service *Service } -func newFile(gen *Generator, apifile *vppapi.File) (*File, error) { +func newFile(gen *Generator, apifile *vppapi.File, packageName GoPackageName, importPath GoImportPath) (*File, error) { file := &File{ - File: *apifile, - PackageName: sanitizedName(apifile.Name), - imports: make(map[string]string), - refmap: make(map[string]string), + Desc: *apifile, + PackageName: packageName, + GoImportPath: importPath, + } + if apifile.Options != nil { + file.Version = apifile.Options[optFileVersion] } - sortFileObjects(&file.File) + file.FilenamePrefix = path.Join(gen.opts.OutputDir, file.Desc.Name) for _, imp := range apifile.Imports { file.Imports = append(file.Imports, normalizeImport(imp)) } - for _, enum := range apifile.EnumTypes { - file.Enums = append(file.Enums, newEnum(gen, file, enum)) + + for _, enumType := range apifile.EnumTypes { + file.Enums = append(file.Enums, newEnum(gen, file, enumType)) } - for _, alias := range apifile.AliasTypes { - file.Aliases = append(file.Aliases, newAlias(gen, file, alias)) + for _, aliasType := range apifile.AliasTypes { + file.Aliases = append(file.Aliases, newAlias(gen, file, aliasType)) } for _, structType := range apifile.StructTypes { file.Structs = append(file.Structs, newStruct(gen, file, structType)) } - for _, union := range apifile.UnionTypes { - file.Unions = append(file.Unions, newUnion(gen, file, union)) + for _, unionType := range apifile.UnionTypes { + file.Unions = append(file.Unions, newUnion(gen, file, unionType)) } + for _, msg := range apifile.Messages { file.Messages = append(file.Messages, newMessage(gen, file, msg)) } + if apifile.Service != nil { + file.Service = newService(gen, file, *apifile.Service) + } + + for _, t := range file.Aliases { + if err := t.resolveDependencies(gen); err != nil { + return nil, err + } + } + for _, t := range file.Structs { + if err := t.resolveDependencies(gen); err != nil { + return nil, err + } + } + for _, t := range file.Unions { + if err := t.resolveDependencies(gen); err != nil { + return nil, err + } + } + for _, m := range file.Messages { + if err := m.resolveDependencies(gen); err != nil { + return nil, err + } + } + if file.Service != nil { + for _, rpc := range file.Service.RPCs { + if err := rpc.resolveMessages(gen); err != nil { + return nil, err + } + } + } return file, nil } -func (file *File) isTypes() bool { - return strings.HasSuffix(file.File.Name, "_types") +func (file *File) isTypesFile() bool { + return strings.HasSuffix(file.Desc.Name, "_types") } func (file *File) hasService() bool { return file.Service != nil && len(file.Service.RPCs) > 0 } -func (file *File) addRef(typ string, name string, ref interface{}) { - apiName := toApiType(name) - if _, ok := file.refmap[apiName]; ok { - logf("%s type %v already in refmap", typ, apiName) - return - } - file.refmap[apiName] = name -} - func (file *File) importedFiles(gen *Generator) []*File { var files []*File for _, imp := range file.Imports { impFile, ok := gen.FilesByName[imp] if !ok { - logf("file %s import %s not found API files", file.Name, imp) + logf("file %s import %s not found API files", file.Desc.Name, imp) continue } files = append(files, impFile) @@ -103,115 +141,102 @@ func (file *File) importedFiles(gen *Generator) []*File { return files } -func (file *File) loadTypeImports(gen *Generator, typeFiles []*File) { - if len(typeFiles) == 0 { - return - } - for _, t := range file.Structs { - for _, imp := range typeFiles { - if _, ok := file.imports[t.Name]; ok { - break - } - for _, at := range imp.File.StructTypes { - if at.Name != t.Name { - continue - } - if len(at.Fields) != len(t.Fields) { - continue - } - file.imports[t.Name] = imp.PackageName - } - } - } - for _, t := range file.AliasTypes { - for _, imp := range typeFiles { - if _, ok := file.imports[t.Name]; ok { - break - } - for _, at := range imp.File.AliasTypes { - if at.Name != t.Name { - continue - } - if at.Length != t.Length { - continue - } - if at.Type != t.Type { - continue - } - file.imports[t.Name] = imp.PackageName - } +func (file *File) dependsOnFile(gen *Generator, dep string) bool { + for _, imp := range file.Imports { + if imp == dep { + return true } - } - for _, t := range file.EnumTypes { - for _, imp := range typeFiles { - if _, ok := file.imports[t.Name]; ok { - break - } - for _, at := range imp.File.EnumTypes { - if at.Name != t.Name { - continue - } - if at.Type != t.Type { - continue - } - file.imports[t.Name] = imp.PackageName - } + impFile, ok := gen.FilesByName[imp] + if ok && impFile.dependsOnFile(gen, dep) { + return true } } - for _, t := range file.UnionTypes { - for _, imp := range typeFiles { - if _, ok := file.imports[t.Name]; ok { - break - } - for _, at := range imp.File.UnionTypes { - if at.Name != t.Name { - continue - } - file.imports[t.Name] = imp.PackageName - /*if gen.ImportTypes { - imp.Generate = true - }*/ - } - } + return false +} + +func normalizeImport(imp string) string { + imp = path.Base(imp) + if idx := strings.Index(imp, "."); idx >= 0 { + imp = imp[:idx] } + return imp +} + +const ( + enumFlagSuffix = "_flags" +) + +func isEnumFlag(enum *Enum) bool { + return strings.HasSuffix(enum.Name, enumFlagSuffix) } type Enum struct { vppapi.EnumType - GoName string + GoIdent } func newEnum(gen *Generator, file *File, apitype vppapi.EnumType) *Enum { typ := &Enum{ EnumType: apitype, - GoName: camelCaseName(apitype.Name), + GoIdent: GoIdent{ + GoName: camelCaseName(apitype.Name), + GoImportPath: file.GoImportPath, + }, } - gen.enumsByName[fmt.Sprintf("%s.%s", file.Name, typ.Name)] = typ - file.addRef("enum", typ.Name, typ) + gen.enumsByName[typ.Name] = typ return typ } type Alias struct { vppapi.AliasType - GoName string + GoIdent + + TypeBasic *string + TypeStruct *Struct + TypeUnion *Union } func newAlias(gen *Generator, file *File, apitype vppapi.AliasType) *Alias { typ := &Alias{ AliasType: apitype, - GoName: camelCaseName(apitype.Name), + GoIdent: GoIdent{ + GoName: camelCaseName(apitype.Name), + GoImportPath: file.GoImportPath, + }, } - gen.aliasesByName[fmt.Sprintf("%s.%s", file.Name, typ.Name)] = typ - file.addRef("alias", typ.Name, typ) + gen.aliasesByName[typ.Name] = typ return typ } +func (a *Alias) resolveDependencies(gen *Generator) error { + if err := a.resolveType(gen); err != nil { + return fmt.Errorf("unable to resolve field: %w", err) + } + return nil +} + +func (a *Alias) resolveType(gen *Generator) error { + if _, ok := BaseTypesGo[a.Type]; ok { + return nil + } + typ := fromApiType(a.Type) + if t, ok := gen.structsByName[typ]; ok { + a.TypeStruct = t + return nil + } + if t, ok := gen.unionsByName[typ]; ok { + a.TypeUnion = t + return nil + } + return fmt.Errorf("unknown type: %q", a.Type) +} + type Struct struct { vppapi.StructType - GoName string + GoIdent Fields []*Field } @@ -219,22 +244,33 @@ type Struct struct { func newStruct(gen *Generator, file *File, apitype vppapi.StructType) *Struct { typ := &Struct{ StructType: apitype, - GoName: camelCaseName(apitype.Name), + GoIdent: GoIdent{ + GoName: camelCaseName(apitype.Name), + GoImportPath: file.GoImportPath, + }, } + gen.structsByName[typ.Name] = typ for _, fieldType := range apitype.Fields { field := newField(gen, file, fieldType) field.ParentStruct = typ typ.Fields = append(typ.Fields, field) } - gen.structsByName[fmt.Sprintf("%s.%s", file.Name, typ.Name)] = typ - file.addRef("struct", typ.Name, typ) return typ } +func (m *Struct) resolveDependencies(gen *Generator) (err error) { + for _, field := range m.Fields { + if err := field.resolveDependencies(gen); err != nil { + return fmt.Errorf("unable to resolve for struct %s: %w", m.Name, err) + } + } + return nil +} + type Union struct { vppapi.UnionType - GoName string + GoIdent Fields []*Field } @@ -242,32 +278,96 @@ type Union struct { func newUnion(gen *Generator, file *File, apitype vppapi.UnionType) *Union { typ := &Union{ UnionType: apitype, - GoName: camelCaseName(apitype.Name), + GoIdent: GoIdent{ + GoName: camelCaseName(apitype.Name), + GoImportPath: file.GoImportPath, + }, } - gen.unionsByName[fmt.Sprintf("%s.%s", file.Name, typ.Name)] = typ + gen.unionsByName[typ.Name] = typ for _, fieldType := range apitype.Fields { field := newField(gen, file, fieldType) field.ParentUnion = typ typ.Fields = append(typ.Fields, field) } - file.addRef("union", typ.Name, typ) return typ } +func (m *Union) resolveDependencies(gen *Generator) (err error) { + for _, field := range m.Fields { + if err := field.resolveDependencies(gen); err != nil { + return err + } + } + return nil +} + +// msgType determines message header fields +type msgType int + +const ( + msgTypeBase msgType = iota // msg_id + msgTypeRequest // msg_id, client_index, context + msgTypeReply // msg_id, context + msgTypeEvent // msg_id, client_index +) + +func apiMsgType(t msgType) GoIdent { + switch t { + case msgTypeRequest: + return govppApiPkg.Ident("RequestMessage") + case msgTypeReply: + return govppApiPkg.Ident("ReplyMessage") + case msgTypeEvent: + return govppApiPkg.Ident("EventMessage") + default: + return govppApiPkg.Ident("OtherMessage") + } +} + +// message fields +const ( + fieldMsgID = "_vl_msg_id" + fieldClientIndex = "client_index" + fieldContext = "context" + fieldRetval = "retval" +) + +// field options +const ( + optFieldDefault = "default" +) + type Message struct { vppapi.Message - GoName string + CRC string + + GoIdent Fields []*Field + + msgType msgType } func newMessage(gen *Generator, file *File, apitype vppapi.Message) *Message { msg := &Message{ Message: apitype, - GoName: camelCaseName(apitype.Name), + CRC: strings.TrimPrefix(apitype.CRC, "0x"), + GoIdent: newGoIdent(file, apitype.Name), } + gen.messagesByName[apitype.Name] = msg + n := 0 for _, fieldType := range apitype.Fields { + // skip internal fields + switch strings.ToLower(fieldType.Name) { + case fieldMsgID: + continue + case fieldClientIndex, fieldContext: + if n == 0 { + continue + } + } + n++ field := newField(gen, file, fieldType) field.ParentMessage = msg msg.Fields = append(msg.Fields, field) @@ -275,21 +375,71 @@ func newMessage(gen *Generator, file *File, apitype vppapi.Message) *Message { return msg } +func (m *Message) resolveDependencies(gen *Generator) (err error) { + if m.msgType, err = getMsgType(m.Message); err != nil { + return err + } + for _, field := range m.Fields { + if err := field.resolveDependencies(gen); err != nil { + return err + } + } + return nil +} + +func getMsgType(m vppapi.Message) (msgType, error) { + if len(m.Fields) == 0 { + return msgType(0), fmt.Errorf("message %s has no fields", m.Name) + } + typ := msgTypeBase + wasClientIndex := false + for i, field := range m.Fields { + if i == 0 { + if field.Name != fieldMsgID { + return msgType(0), fmt.Errorf("message %s is missing ID field", m.Name) + } + } else if i == 1 { + if field.Name == fieldClientIndex { + // "client_index" as the second member, + // this might be an event message or a request + typ = msgTypeEvent + wasClientIndex = true + } else if field.Name == fieldContext { + // reply needs "context" as the second member + typ = msgTypeReply + } + } else if i == 2 { + if wasClientIndex && field.Name == fieldContext { + // request needs "client_index" as the second member + // and "context" as the third member + typ = msgTypeRequest + } + } + } + return typ, nil +} + type Field struct { vppapi.Field GoName string - // Field parent + DefaultValue interface{} + + // Reference to actual type of this field + TypeEnum *Enum + TypeAlias *Alias + TypeStruct *Struct + TypeUnion *Union + + // Parent in which this field is declared ParentMessage *Message ParentStruct *Struct ParentUnion *Union - // Type reference - Enum *Enum - Alias *Alias - Struct *Struct - Union *Union + // Field reference for fields determining size + FieldSizeOf *Field + FieldSizeFrom *Field } func newField(gen *Generator, file *File, apitype vppapi.Field) *Field { @@ -297,64 +447,134 @@ func newField(gen *Generator, file *File, apitype vppapi.Field) *Field { Field: apitype, GoName: camelCaseName(apitype.Name), } + if apitype.Meta != nil { + if val, ok := apitype.Meta[optFieldDefault]; ok { + typ.DefaultValue = val + } + } return typ } -type Service = vppapi.Service -type RPC = vppapi.RPC - -func sortFileObjects(file *vppapi.File) { - // sort imports - sort.SliceStable(file.Imports, func(i, j int) bool { - return file.Imports[i] < file.Imports[j] - }) - // sort enum types - sort.SliceStable(file.EnumTypes, func(i, j int) bool { - return file.EnumTypes[i].Name < file.EnumTypes[j].Name - }) - // sort alias types - sort.Slice(file.AliasTypes, func(i, j int) bool { - return file.AliasTypes[i].Name < file.AliasTypes[j].Name - }) - // sort struct types - sort.SliceStable(file.StructTypes, func(i, j int) bool { - return file.StructTypes[i].Name < file.StructTypes[j].Name - }) - // sort union types - sort.SliceStable(file.UnionTypes, func(i, j int) bool { - return file.UnionTypes[i].Name < file.UnionTypes[j].Name - }) - // sort messages - sort.SliceStable(file.Messages, func(i, j int) bool { - return file.Messages[i].Name < file.Messages[j].Name - }) - // sort services - if file.Service != nil { - sort.Slice(file.Service.RPCs, func(i, j int) bool { - // dumps first - if file.Service.RPCs[i].Stream != file.Service.RPCs[j].Stream { - return file.Service.RPCs[i].Stream +func (f *Field) resolveDependencies(gen *Generator) error { + if err := f.resolveType(gen); err != nil { + return fmt.Errorf("unable to resolve field type: %w", err) + } + if err := f.resolveFields(gen); err != nil { + return fmt.Errorf("unable to resolve fields: %w", err) + } + return nil +} + +func (f *Field) resolveType(gen *Generator) error { + if _, ok := BaseTypesGo[f.Type]; ok { + return nil + } + typ := fromApiType(f.Type) + if t, ok := gen.structsByName[typ]; ok { + f.TypeStruct = t + return nil + } + if t, ok := gen.enumsByName[typ]; ok { + f.TypeEnum = t + return nil + } + if t, ok := gen.aliasesByName[typ]; ok { + f.TypeAlias = t + return nil + } + if t, ok := gen.unionsByName[typ]; ok { + f.TypeUnion = t + return nil + } + return fmt.Errorf("unknown type: %q", f.Type) +} + +func (f *Field) resolveFields(gen *Generator) error { + var fields []*Field + if f.ParentMessage != nil { + fields = f.ParentMessage.Fields + } else if f.ParentStruct != nil { + fields = f.ParentStruct.Fields + } + if f.SizeFrom != "" { + for _, field := range fields { + if field.Name == f.SizeFrom { + f.FieldSizeFrom = field + break + } + } + } else { + for _, field := range fields { + if field.SizeFrom == f.Name { + f.FieldSizeOf = field + break } - return file.Service.RPCs[i].RequestMsg < file.Service.RPCs[j].RequestMsg - }) + } } + return nil } -func sanitizedName(name string) string { - switch name { - case "interface": - return "interfaces" - case "map": - return "maps" - default: - return name +type Service struct { + vppapi.Service + + RPCs []*RPC +} + +func newService(gen *Generator, file *File, apitype vppapi.Service) *Service { + svc := &Service{ + Service: apitype, } + for _, rpc := range apitype.RPCs { + svc.RPCs = append(svc.RPCs, newRpc(file, svc, rpc)) + } + return svc } -func normalizeImport(imp string) string { - imp = path.Base(imp) - if idx := strings.Index(imp, "."); idx >= 0 { - imp = imp[:idx] +const ( + serviceNoReply = "null" +) + +type RPC struct { + VPP vppapi.RPC + + GoName string + + Service *Service + + MsgRequest *Message + MsgReply *Message + MsgStream *Message +} + +func newRpc(file *File, service *Service, apitype vppapi.RPC) *RPC { + rpc := &RPC{ + VPP: apitype, + GoName: camelCaseName(apitype.Request), + Service: service, } - return imp + return rpc +} + +func (rpc *RPC) resolveMessages(gen *Generator) error { + msg, ok := gen.messagesByName[rpc.VPP.Request] + if !ok { + return fmt.Errorf("rpc %v: no message for request type %v", rpc.GoName, rpc.VPP.Request) + } + rpc.MsgRequest = msg + + if rpc.VPP.Reply != "" && rpc.VPP.Reply != serviceNoReply { + msg, ok := gen.messagesByName[rpc.VPP.Reply] + if !ok { + return fmt.Errorf("rpc %v: no message for reply type %v", rpc.GoName, rpc.VPP.Reply) + } + rpc.MsgReply = msg + } + if rpc.VPP.StreamMsg != "" { + msg, ok := gen.messagesByName[rpc.VPP.StreamMsg] + if !ok { + return fmt.Errorf("rpc %v: no message for stream type %v", rpc.GoName, rpc.VPP.StreamMsg) + } + rpc.MsgStream = msg + } + return nil } diff --git a/binapigen/binapigen_test.go b/binapigen/binapigen_test.go new file mode 100644 index 0000000..2fbd163 --- /dev/null +++ b/binapigen/binapigen_test.go @@ -0,0 +1,55 @@ +// Copyright (c) 2020 Cisco and/or its affiliates. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package binapigen + +import ( + "testing" + + . "github.com/onsi/gomega" + + "git.fd.io/govpp.git/binapigen/vppapi" +) + +func TestGenerator(t *testing.T) { + tests := []struct { + name string + file *vppapi.File + expectPackage string + }{ + {name: "vpe", file: &vppapi.File{ + Name: "vpe", + Path: "/usr/share/vpp/api/core/vpe.api.json", + CRC: "0x12345678", + }, + expectPackage: "vpe", + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + RegisterTestingT(t) + + apifiles := []*vppapi.File{test.file} + + gen, err := New(Options{ + ImportPrefix: "test", + }, apifiles, nil) + Expect(err).ToNot(HaveOccurred(), "unexpected generator error: %v", err) + + Expect(gen.Files).To(HaveLen(1)) + Expect(gen.Files[0].PackageName).To(BeEquivalentTo(test.expectPackage)) + Expect(gen.Files[0].GoImportPath).To(BeEquivalentTo("test/" + test.expectPackage)) + }) + } +} diff --git a/binapigen/gen_encoding.go b/binapigen/gen_encoding.go new file mode 100644 index 0000000..1cd3eb3 --- /dev/null +++ b/binapigen/gen_encoding.go @@ -0,0 +1,375 @@ +// Copyright (c) 2020 Cisco and/or its affiliates. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package binapigen + +import ( + "fmt" + "strconv" + "strings" + + "github.com/sirupsen/logrus" +) + +func init() { + //RegisterPlugin("encoding", GenerateEncoding) +} + +func generateMessageSize(g *GenFile, name string, fields []*Field) { + g.P("func (m *", name, ") Size() int {") + g.P("if m == nil { return 0 }") + g.P("var size int") + + sizeBaseType := func(typ, name string, length int, sizefrom string) { + switch typ { + case STRING: + if length > 0 { + g.P("size += ", length, " // ", name) + } else { + g.P("size += 4 + len(", name, ")", " // ", name) + } + default: + var size = BaseTypeSizes[typ] + if sizefrom != "" { + g.P("size += ", size, " * len(", name, ")", " // ", name) + } else { + if length > 0 { + g.P("size += ", size, " * ", length, " // ", name) + } else { + g.P("size += ", size, " // ", name) + } + } + } + } + + lvl := 0 + var sizeFields func(fields []*Field, parentName string) + sizeFields = func(fields []*Field, parentName string) { + lvl++ + defer func() { lvl-- }() + + getFieldName := func(name string) string { + return fmt.Sprintf("%s.%s", parentName, name) + } + + for _, field := range fields { + name := getFieldName(field.GoName) + + var sizeFromName string + if field.FieldSizeFrom != nil { + sizeFromName = getFieldName(field.FieldSizeFrom.GoName) + } + + if _, ok := BaseTypesGo[field.Type]; ok { + sizeBaseType(field.Type, name, field.Length, sizeFromName) + continue + } + + if field.Array { + char := fmt.Sprintf("s%d", lvl) + index := fmt.Sprintf("j%d", lvl) + if field.Length > 0 { + g.P("for ", index, " := 0; ", index, " < ", field.Length, "; ", index, "++ {") + } else if field.FieldSizeFrom != nil { + g.P("for ", index, " := 0; ", index, " < len(", name, "); ", index, "++ {") + } + g.P("var ", char, " ", fieldGoType(g, field)) + g.P("_ = ", char) + g.P("if ", index, " < len(", name, ") { ", char, " = ", name, "[", index, "] }") + name = char + } + + switch { + case field.TypeEnum != nil: + enum := field.TypeEnum + if _, ok := BaseTypesGo[enum.Type]; ok { + sizeBaseType(enum.Type, name, 0, "") + } else { + logrus.Panicf("\t// ??? ENUM %s %s\n", name, enum.Type) + } + case field.TypeAlias != nil: + alias := field.TypeAlias + if typ := alias.TypeStruct; typ != nil { + sizeFields(typ.Fields, name) + } else { + sizeBaseType(alias.Type, name, alias.Length, "") + } + case field.TypeStruct != nil: + typ := field.TypeStruct + sizeFields(typ.Fields, name) + case field.TypeUnion != nil: + union := field.TypeUnion + maxSize := getUnionSize(union) + sizeBaseType("u8", name, maxSize, "") + default: + logrus.Panicf("\t// ??? buf[pos] = %s (%s)\n", name, field.Type) + } + + if field.Array { + g.P("}") + } + } + } + sizeFields(fields, "m") + + g.P("return size") + g.P("}") +} + +func encodeBaseType(g *GenFile, typ, name string, length int, sizefrom string) { + isArray := length > 0 || sizefrom != "" + if isArray { + switch typ { + case U8: + g.P("buf.EncodeBytes(", name, "[:], ", length, ")") + return + case I8, I16, U16, I32, U32, I64, U64, F64: + gotype := BaseTypesGo[typ] + if length != 0 { + g.P("for i := 0; i < ", length, "; i++ {") + } else if sizefrom != "" { + g.P("for i := 0; i < len(", name, "); i++ {") + } + g.P("var x ", gotype) + g.P("if i < len(", name, ") { x = ", gotype, "(", name, "[i]) }") + name = "x" + } + } + switch typ { + case I8, U8, I16, U16, I32, U32, I64, U64: + typsize := BaseTypeSizes[typ] + g.P("buf.EncodeUint", typsize*8, "(uint", typsize*8, "(", name, "))") + case F64: + g.P("buf.EncodeFloat64(float64(", name, "))") + case BOOL: + g.P("buf.EncodeBool(", name, ")") + case STRING: + g.P("buf.EncodeString(", name, ", ", length, ")") + default: + logrus.Panicf("// ??? %s %s\n", name, typ) + } + if isArray { + switch typ { + case I8, U8, I16, U16, I32, U32, I64, U64, F64: + g.P("}") + } + } +} + +func encodeFields(g *GenFile, fields []*Field, parentName string, lvl int) { + getFieldName := func(name string) string { + return fmt.Sprintf("%s.%s", parentName, name) + } + + for _, field := range fields { + name := getFieldName(field.GoName) + + encodeField(g, field, name, getFieldName, lvl) + } +} + +func encodeField(g *GenFile, field *Field, name string, getFieldName func(name string) string, lvl int) { + if f := field.FieldSizeOf; f != nil { + if _, ok := BaseTypesGo[field.Type]; ok { + encodeBaseType(g, field.Type, fmt.Sprintf("len(%s)", getFieldName(f.GoName)), field.Length, "") + return + } else { + panic(fmt.Sprintf("failed to encode base type of sizefrom field: %s (%s)", field.Name, field.Type)) + } + } + var sizeFromName string + if field.FieldSizeFrom != nil { + sizeFromName = getFieldName(field.FieldSizeFrom.GoName) + } + + if _, ok := BaseTypesGo[field.Type]; ok { + encodeBaseType(g, field.Type, name, field.Length, sizeFromName) + return + } + + if field.Array { + char := fmt.Sprintf("v%d", lvl) + index := fmt.Sprintf("j%d", lvl) + if field.Length > 0 { + g.P("for ", index, " := 0; ", index, " < ", field.Length, "; ", index, "++ {") + } else if field.SizeFrom != "" { + g.P("for ", index, " := 0; ", index, " < len(", name, "); ", index, "++ {") + } + g.P("var ", char, " ", fieldGoType(g, field)) + g.P("if ", index, " < len(", name, ") { ", char, " = ", name, "[", index, "] }") + name = char + } + + switch { + case field.TypeEnum != nil: + encodeBaseType(g, field.TypeEnum.Type, name, 0, "") + case field.TypeAlias != nil: + alias := field.TypeAlias + if typ := alias.TypeStruct; typ != nil { + encodeFields(g, typ.Fields, name, lvl+1) + } else { + encodeBaseType(g, alias.Type, name, alias.Length, "") + } + case field.TypeStruct != nil: + encodeFields(g, field.TypeStruct.Fields, name, lvl+1) + case field.TypeUnion != nil: + g.P("buf.EncodeBytes(", name, ".", fieldUnionData, "[:], 0)") + default: + logrus.Panicf("\t// ??? buf[pos] = %s (%s)\n", name, field.Type) + } + + if field.Array { + g.P("}") + } +} + +func generateMessageMarshal(g *GenFile, name string, fields []*Field) { + g.P("func (m *", name, ") Marshal(b []byte) ([]byte, error) {") + g.P("var buf *", govppCodecPkg.Ident("Buffer")) + g.P("if b == nil {") + g.P("buf = ", govppCodecPkg.Ident("NewBuffer"), "(make([]byte, m.Size()))") + g.P("} else {") + g.P("buf = ", govppCodecPkg.Ident("NewBuffer"), "(b)") + g.P("}") + + encodeFields(g, fields, "m", 0) + + g.P("return buf.Bytes(), nil") + g.P("}") +} + +func decodeBaseType(g *GenFile, typ, orig, name string, length int, sizefrom string, alloc bool) { + isArray := length > 0 || sizefrom != "" + if isArray { + switch typ { + case U8: + g.P("copy(", name, "[:], buf.DecodeBytes(", length, "))") + return + case I8, I16, U16, I32, U32, I64, U64, F64: + if alloc { + var size string + switch { + case length > 0: + size = strconv.Itoa(length) + case sizefrom != "": + size = sizefrom + } + if size != "" { + g.P(name, " = make([]", orig, ", ", size, ")") + } + } + g.P("for i := 0; i < len(", name, "); i++ {") + name = fmt.Sprintf("%s[i]", name) + } + } + switch typ { + case I8, U8, I16, U16, I32, U32, I64, U64: + typsize := BaseTypeSizes[typ] + if gotype, ok := BaseTypesGo[typ]; !ok || gotype != orig || strings.HasPrefix(orig, "i") { + g.P(name, " = ", orig, "(buf.DecodeUint", typsize*8, "())") + } else { + g.P(name, " = buf.DecodeUint", typsize*8, "()") + } + case F64: + g.P(name, " = ", orig, "(buf.DecodeFloat64())") + case BOOL: + g.P(name, " = buf.DecodeBool()") + case STRING: + g.P(name, " = buf.DecodeString(", length, ")") + default: + logrus.Panicf("\t// ??? %s %s\n", name, typ) + } + if isArray { + switch typ { + case I8, U8, I16, U16, I32, U32, I64, U64, F64: + g.P("}") + } + } +} + +func generateMessageUnmarshal(g *GenFile, name string, fields []*Field) { + g.P("func (m *", name, ") Unmarshal(b []byte) error {") + + if len(fields) > 0 { + g.P("buf := ", govppCodecPkg.Ident("NewBuffer"), "(b)") + decodeFields(g, fields, "m", 0) + } + + g.P("return nil") + g.P("}") +} + +func decodeFields(g *GenFile, fields []*Field, parentName string, lvl int) { + getFieldName := func(name string) string { + return fmt.Sprintf("%s.%s", parentName, name) + } + + for _, field := range fields { + name := getFieldName(field.GoName) + + decodeField(g, field, name, getFieldName, lvl) + } +} + +func decodeField(g *GenFile, field *Field, name string, getFieldName func(string) string, lvl int) { + var sizeFromName string + if field.FieldSizeFrom != nil { + sizeFromName = getFieldName(field.FieldSizeFrom.GoName) + } + + if _, ok := BaseTypesGo[field.Type]; ok { + decodeBaseType(g, field.Type, fieldGoType(g, field), name, field.Length, sizeFromName, true) + return + } + + if field.Array { + index := fmt.Sprintf("j%d", lvl) + if field.Length > 0 { + g.P("for ", index, " := 0; ", index, " < ", field.Length, ";", index, "++ {") + } else if field.SizeFrom != "" { + g.P(name, " = make(", getFieldType(g, field), ", int(", sizeFromName, "))") + g.P("for ", index, " := 0; ", index, " < len(", name, ");", index, "++ {") + } + name = fmt.Sprintf("%s[%s]", name, index) + } + + if enum := field.TypeEnum; enum != nil { + if _, ok := BaseTypesGo[enum.Type]; ok { + decodeBaseType(g, enum.Type, fieldGoType(g, field), name, 0, "", false) + } else { + logrus.Panicf("\t// ??? ENUM %s %s\n", name, enum.Type) + } + } else if alias := field.TypeAlias; alias != nil { + if typ := alias.TypeStruct; typ != nil { + decodeFields(g, typ.Fields, name, lvl+1) + } else { + if alias.Length > 0 { + decodeBaseType(g, alias.Type, BaseTypesGo[alias.Type], name, alias.Length, "", false) + } else { + decodeBaseType(g, alias.Type, fieldGoType(g, field), name, alias.Length, "", false) + } + } + } else if typ := field.TypeStruct; typ != nil { + decodeFields(g, typ.Fields, name, lvl+1) + } else if union := field.TypeUnion; union != nil { + maxSize := getUnionSize(union) + g.P("copy(", name, ".", fieldUnionData, "[:], buf.DecodeBytes(", maxSize, "))") + } else { + logrus.Panicf("\t// ??? %s (%v)\n", field.GoName, field.Type) + } + + if field.Array { + g.P("}") + } +} diff --git a/binapigen/gen_helpers.go b/binapigen/gen_helpers.go new file mode 100644 index 0000000..a22f1c6 --- /dev/null +++ b/binapigen/gen_helpers.go @@ -0,0 +1,348 @@ +// Copyright (c) 2020 Cisco and/or its affiliates. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package binapigen + +func init() { + //RegisterPlugin("convert", GenerateConvert) +} + +// library dependencies +const ( + fmtPkg = GoImportPath("fmt") + netPkg = GoImportPath("net") + stringsPkg = GoImportPath("strings") +) + +func generateIPConversion(g *GenFile, structName string, ipv int) { + // ParseIPXAddress method + g.P("func Parse", structName, "(s string) (", structName, ", error) {") + if ipv == 4 { + g.P(" ip := ", netPkg.Ident("ParseIP"), "(s).To4()") + } else { + g.P(" ip := ", netPkg.Ident("ParseIP"), "(s).To16()") + } + g.P(" if ip == nil {") + g.P(" return ", structName, "{}, ", fmtPkg.Ident("Errorf"), "(\"invalid IP address: %s\", s)") + g.P(" }") + g.P(" var ipaddr ", structName) + if ipv == 4 { + g.P(" copy(ipaddr[:], ip.To4())") + } else { + g.P(" copy(ipaddr[:], ip.To16())") + } + g.P(" return ipaddr, nil") + g.P("}") + g.P() + + // ToIP method + g.P("func (x ", structName, ") ToIP() ", netPkg.Ident("IP"), " {") + if ipv == 4 { + g.P(" return ", netPkg.Ident("IP"), "(x[:]).To4()") + } else { + g.P(" return ", netPkg.Ident("IP"), "(x[:]).To16()") + } + g.P("}") + + // String method + g.P("func (x ", structName, ") String() string {") + g.P(" return x.ToIP().String()") + g.P("}") + + // MarshalText method + g.P("func (x *", structName, ") MarshalText() ([]byte, error) {") + g.P(" return []byte(x.String()), nil") + g.P("}") + + // UnmarshalText method + g.P("func (x *", structName, ") UnmarshalText(text []byte) error {") + g.P(" ipaddr, err := Parse", structName, "(string(text))") + g.P(" if err !=nil {") + g.P(" return err") + g.P(" }") + g.P(" *x = ipaddr") + g.P(" return nil") + g.P("}") + g.P() +} + +func generateAddressConversion(g *GenFile, structName string) { + // ParseAddress method + g.P("func Parse", structName, "(s string) (", structName, ", error) {") + g.P(" ip := ", netPkg.Ident("ParseIP"), "(s)") + g.P(" if ip == nil {") + g.P(" return ", structName, "{}, ", fmtPkg.Ident("Errorf"), "(\"invalid address: %s\", s)") + g.P(" }") + g.P(" var addr ", structName) + g.P(" if ip.To4() == nil {") + g.P(" addr.Af = ADDRESS_IP6") + g.P(" var ip6 IP6Address") + g.P(" copy(ip6[:], ip.To16())") + g.P(" addr.Un.SetIP6(ip6)") + g.P(" } else {") + g.P(" addr.Af = ADDRESS_IP4") + g.P(" var ip4 IP4Address") + g.P(" copy(ip4[:], ip.To4())") + g.P(" addr.Un.SetIP4(ip4)") + g.P(" }") + g.P(" return addr, nil") + g.P("}") + + // ToIP method + g.P("func (x ", structName, ") ToIP() ", netPkg.Ident("IP"), " {") + g.P(" if x.Af == ADDRESS_IP6 {") + g.P(" ip6 := x.Un.GetIP6()") + g.P(" return ", netPkg.Ident("IP"), "(ip6[:]).To16()") + g.P(" } else {") + g.P(" ip4 := x.Un.GetIP4()") + g.P(" return ", netPkg.Ident("IP"), "(ip4[:]).To4()") + g.P(" }") + g.P("}") + + // String method + g.P("func (x ", structName, ") String() string {") + g.P(" return x.ToIP().String()") + g.P("}") + + // MarshalText method + g.P("func (x *", structName, ") MarshalText() ([]byte, error) {") + g.P(" return []byte(x.String()), nil") + g.P("}") + + // UnmarshalText method + g.P("func (x *", structName, ") UnmarshalText(text []byte) error {") + g.P(" addr, err := Parse", structName, "(string(text))") + g.P(" if err != nil {") + g.P(" return err") + g.P(" }") + g.P(" *x = addr") + g.P(" return nil") + g.P("}") + g.P() +} + +func generateIPPrefixConversion(g *GenFile, structName string, ipv int) { + // ParsePrefix method + g.P("func Parse", structName, "(s string) (prefix ", structName, ", err error) {") + g.P(" hasPrefix := ", stringsPkg.Ident("Contains"), "(s, \"/\")") + g.P(" if hasPrefix {") + g.P(" ip, network, err := ", netPkg.Ident("ParseCIDR"), "(s)") + g.P(" if err != nil {") + g.P(" return ", structName, "{}, ", fmtPkg.Ident("Errorf"), "(\"invalid IP %s: %s\", s, err)") + g.P(" }") + g.P(" maskSize, _ := network.Mask.Size()") + g.P(" prefix.Len = byte(maskSize)") + if ipv == 4 { + g.P(" prefix.Address, err = ParseIP4Address(ip.String())") + } else { + g.P(" prefix.Address, err = ParseIP6Address(ip.String())") + } + g.P(" if err != nil {") + g.P(" return ", structName, "{}, ", fmtPkg.Ident("Errorf"), "(\"invalid IP %s: %s\", s, err)") + g.P(" }") + g.P(" } else {") + g.P(" ip := ", netPkg.Ident("ParseIP"), "(s)") + g.P(" defaultMaskSize, _ := ", netPkg.Ident("CIDRMask"), "(32, 32).Size()") + g.P(" if ip.To4() == nil {") + g.P(" defaultMaskSize, _ =", netPkg.Ident("CIDRMask"), "(128, 128).Size()") + g.P(" }") + g.P(" prefix.Len = byte(defaultMaskSize)") + if ipv == 4 { + g.P(" prefix.Address, err = ParseIP4Address(ip.String())") + } else { + g.P(" prefix.Address, err = ParseIP6Address(ip.String())") + } + g.P(" if err != nil {") + g.P(" return ", structName, "{}, ", fmtPkg.Ident("Errorf"), "(\"invalid IP %s: %s\", s, err)") + g.P(" }") + g.P(" }") + g.P(" return prefix, nil") + g.P("}") + + // ToIPNet method + g.P("func (x ", structName, ") ToIPNet() *", netPkg.Ident("IPNet"), " {") + if ipv == 4 { + g.P(" mask := ", netPkg.Ident("CIDRMask"), "(int(x.Len), 32)") + } else { + g.P(" mask := ", netPkg.Ident("CIDRMask"), "(int(x.Len), 128)") + } + g.P(" ipnet := &", netPkg.Ident("IPNet"), "{IP: x.Address.ToIP(), Mask: mask}") + g.P(" return ipnet") + g.P("}") + + // String method + g.P("func (x ", structName, ") String() string {") + g.P(" ip := x.Address.String()") + g.P(" return ip + \"/\" + ", strconvPkg.Ident("Itoa"), "(int(x.Len))") + /*if ipv == 4 { + g.P(" mask := ", netPkg.Ident("CIDRMask"), "(int(x.Len), 32)") + } else { + g.P(" mask := ", netPkg.Ident("CIDRMask"), "(int(x.Len), 128)") + } + g.P(" ipnet := &", netPkg.Ident("IPNet"), "{IP: x.Address.ToIP(), Mask: mask}") + g.P(" return ipnet.String()")*/ + g.P("}") + + // MarshalText method + g.P("func (x *", structName, ") MarshalText() ([]byte, error) {") + g.P(" return []byte(x.String()), nil") + g.P("}") + + // UnmarshalText method + g.P("func (x *", structName, ") UnmarshalText(text []byte) error {") + g.P(" prefix, err := Parse", structName, "(string(text))") + g.P(" if err != nil {") + g.P(" return err") + g.P(" }") + g.P(" *x = prefix") + g.P(" return nil") + g.P("}") + g.P() +} + +func generatePrefixConversion(g *GenFile, structName string) { + // ParsePrefix method + g.P("func Parse", structName, "(ip string) (prefix ", structName, ", err error) {") + g.P(" hasPrefix := ", stringsPkg.Ident("Contains"), "(ip, \"/\")") + g.P(" if hasPrefix {") + g.P(" netIP, network, err := ", netPkg.Ident("ParseCIDR"), "(ip)") + g.P(" if err != nil {") + g.P(" return Prefix{}, ", fmtPkg.Ident("Errorf"), "(\"invalid IP %s: %s\", ip, err)") + g.P(" }") + g.P(" maskSize, _ := network.Mask.Size()") + g.P(" prefix.Len = byte(maskSize)") + g.P(" prefix.Address, err = ParseAddress(netIP.String())") + g.P(" if err != nil {") + g.P(" return Prefix{}, ", fmtPkg.Ident("Errorf"), "(\"invalid IP %s: %s\", ip, err)") + g.P(" }") + g.P(" } else {") + g.P(" netIP := ", netPkg.Ident("ParseIP"), "(ip)") + g.P(" defaultMaskSize, _ := ", netPkg.Ident("CIDRMask"), "(32, 32).Size()") + g.P(" if netIP.To4() == nil {") + g.P(" defaultMaskSize, _ =", netPkg.Ident("CIDRMask"), "(128, 128).Size()") + g.P(" }") + g.P(" prefix.Len = byte(defaultMaskSize)") + g.P(" prefix.Address, err = ParseAddress(netIP.String())") + g.P(" if err != nil {") + g.P(" return Prefix{}, ", fmtPkg.Ident("Errorf"), "(\"invalid IP %s: %s\", ip, err)") + g.P(" }") + g.P(" }") + g.P(" return prefix, nil") + g.P("}") + + // ToIPNet method + g.P("func (x ", structName, ") ToIPNet() *", netPkg.Ident("IPNet"), " {") + g.P(" var mask ", netPkg.Ident("IPMask")) + g.P(" if x.Address.Af == ADDRESS_IP4 {") + g.P(" mask = ", netPkg.Ident("CIDRMask"), "(int(x.Len), 32)") + g.P(" } else {") + g.P(" mask = ", netPkg.Ident("CIDRMask"), "(int(x.Len), 128)") + g.P(" }") + g.P(" ipnet := &", netPkg.Ident("IPNet"), "{IP: x.Address.ToIP(), Mask: mask}") + g.P(" return ipnet") + g.P("}") + + // String method + g.P("func (x ", structName, ") String() string {") + g.P(" ip := x.Address.String()") + g.P(" return ip + \"/\" + ", strconvPkg.Ident("Itoa"), "(int(x.Len))") + g.P("}") + + // MarshalText method + g.P("func (x *", structName, ") MarshalText() ([]byte, error) {") + g.P(" return []byte(x.String()), nil") + g.P("}") + + // UnmarshalText method + g.P("func (x *", structName, ") UnmarshalText(text []byte) error {") + g.P(" prefix, err := Parse", structName, "(string(text))") + g.P(" if err !=nil {") + g.P(" return err") + g.P(" }") + g.P(" *x = prefix") + g.P(" return nil") + g.P("}") + g.P() +} + +func generateAddressWithPrefixConversion(g *GenFile, structName string) { + // ParseAddressWithPrefix method + g.P("func Parse", structName, "(s string) (", structName, ", error) {") + g.P(" prefix, err := ParsePrefix(s)") + g.P(" if err != nil {") + g.P(" return ", structName, "{}, err") + g.P(" }") + g.P(" return ", structName, "(prefix), nil") + g.P("}") + + // String method + g.P("func (x ", structName, ") String() string {") + g.P(" return Prefix(x).String()") + g.P("}") + + // MarshalText method + g.P("func (x *", structName, ") MarshalText() ([]byte, error) {") + g.P(" return []byte(x.String()), nil") + g.P("}") + + // UnmarshalText method + g.P("func (x *", structName, ") UnmarshalText(text []byte) error {") + g.P(" prefix, err := Parse", structName, "(string(text))") + g.P(" if err != nil {") + g.P(" return err") + g.P(" }") + g.P(" *x = prefix") + g.P(" return nil") + g.P("}") + g.P() +} + +func generateMacAddressConversion(g *GenFile, structName string) { + // ParseMAC method + g.P("func Parse", structName, "(s string) (", structName, ", error) {") + g.P(" var macaddr ", structName) + g.P(" mac, err := ", netPkg.Ident("ParseMAC"), "(s)") + g.P(" if err != nil {") + g.P(" return macaddr, err") + g.P(" }") + g.P(" copy(macaddr[:], mac[:])") + g.P(" return macaddr, nil") + g.P("}") + + // ToMAC method + g.P("func (x ", structName, ") ToMAC() ", netPkg.Ident("HardwareAddr"), " {") + g.P(" return ", netPkg.Ident("HardwareAddr"), "(x[:])") + g.P("}") + + // String method + g.P("func (x ", structName, ") String() string {") + g.P(" return x.ToMAC().String()") + g.P("}") + + // MarshalText method + g.P("func (x *", structName, ") MarshalText() ([]byte, error) {") + g.P(" return []byte(x.String()), nil") + g.P("}") + + // UnmarshalText method + g.P("func (x *", structName, ") UnmarshalText(text []byte) error {") + g.P(" mac, err := Parse", structName, "(string(text))") + g.P(" if err != nil {") + g.P(" return err") + g.P(" }") + g.P(" *x = mac") + g.P(" return nil") + g.P("}") + g.P() +} diff --git a/binapigen/gen_helpers_test.go b/binapigen/gen_helpers_test.go new file mode 100644 index 0000000..371fd6c --- /dev/null +++ b/binapigen/gen_helpers_test.go @@ -0,0 +1,156 @@ +// Copyright (c) 2020 Cisco and/or its affiliates. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package binapigen + +import ( + "strings" + "testing" + + . "github.com/onsi/gomega" + + "git.fd.io/govpp.git/binapi/ethernet_types" + "git.fd.io/govpp.git/binapi/ip_types" +) + +func TestGeneratedParseAddress(t *testing.T) { + RegisterTestingT(t) + + var data = []struct { + input string + result ip_types.Address + }{ + {"192.168.0.1", ip_types.Address{ + Af: ip_types.ADDRESS_IP4, + Un: ip_types.AddressUnionIP4(ip_types.IP4Address{192, 168, 0, 1}), + }}, + {"aac1:0:ab45::", ip_types.Address{ + Af: ip_types.ADDRESS_IP6, + Un: ip_types.AddressUnionIP6(ip_types.IP6Address{170, 193, 0, 0, 171, 69, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}), + }}, + } + + for _, entry := range data { + t.Run(entry.input, func(t *testing.T) { + parsedAddress, err := ip_types.ParseAddress(entry.input) + Expect(err).ShouldNot(HaveOccurred()) + Expect(parsedAddress).To(Equal(entry.result)) + + originAddress := parsedAddress.String() + Expect(originAddress).To(Equal(entry.input)) + }) + } +} + +func TestGeneratedParseAddressError(t *testing.T) { + RegisterTestingT(t) + + _, err := ip_types.ParseAddress("malformed_ip") + Expect(err).Should(HaveOccurred()) +} + +func TestGeneratedParsePrefix(t *testing.T) { + RegisterTestingT(t) + + var data = []struct { + input string + result ip_types.Prefix + }{ + {"192.168.0.1/24", ip_types.Prefix{ + Address: ip_types.Address{ + Af: ip_types.ADDRESS_IP4, + Un: ip_types.AddressUnionIP4(ip_types.IP4Address{192, 168, 0, 1}), + }, + Len: 24, + }}, + {"192.168.0.1", ip_types.Prefix{ + Address: ip_types.Address{ + Af: ip_types.ADDRESS_IP4, + Un: ip_types.AddressUnionIP4(ip_types.IP4Address{192, 168, 0, 1}), + }, + Len: 32, + }}, + {"aac1:0:ab45::/96", ip_types.Prefix{ + Address: ip_types.Address{ + Af: ip_types.ADDRESS_IP6, + Un: ip_types.AddressUnionIP6(ip_types.IP6Address{170, 193, 0, 0, 171, 69, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}), + }, + Len: 96, + }}, + {"aac1:0:ab45::", ip_types.Prefix{ + Address: ip_types.Address{ + Af: ip_types.ADDRESS_IP6, + Un: ip_types.AddressUnionIP6(ip_types.IP6Address{170, 193, 0, 0, 171, 69, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}), + }, + Len: 128, + }}, + } + + for _, entry := range data { + t.Run(entry.input, func(t *testing.T) { + parsedAddress, err := ip_types.ParsePrefix(entry.input) + Expect(err).ShouldNot(HaveOccurred()) + Expect(parsedAddress).To(Equal(entry.result)) + + // Parsed IP without prefix receives a default one + // so the input data must be adjusted + if entry.result.Address.Af == ip_types.ADDRESS_IP4 && !strings.Contains(entry.input, "/") { + entry.input = entry.input + "/32" + } + if entry.result.Address.Af == ip_types.ADDRESS_IP6 && !strings.Contains(entry.input, "/") { + entry.input = entry.input + "/128" + } + originAddress := parsedAddress.String() + Expect(originAddress).To(Equal(entry.input)) + }) + } +} + +func TestGeneratedParsePrefixError(t *testing.T) { + RegisterTestingT(t) + + _, err := ip_types.ParsePrefix("malformed_ip") + Expect(err).Should(HaveOccurred()) +} + +func TestGeneratedParseMAC(t *testing.T) { + RegisterTestingT(t) + + var data = []struct { + input string + result ethernet_types.MacAddress + }{ + {"b7:b9:bb:a1:5c:af", ethernet_types.MacAddress{183, 185, 187, 161, 92, 175}}, + {"47:4b:c7:3e:06:c8", ethernet_types.MacAddress{71, 75, 199, 62, 6, 200}}, + {"a7:cc:9f:10:18:e3", ethernet_types.MacAddress{167, 204, 159, 16, 24, 227}}, + } + + for _, entry := range data { + t.Run(entry.input, func(t *testing.T) { + parsedMac, err := ethernet_types.ParseMacAddress(entry.input) + Expect(err).ShouldNot(HaveOccurred()) + Expect(parsedMac).To(Equal(entry.result)) + + originAddress := parsedMac.String() + Expect(originAddress).To(Equal(entry.input)) + }) + } +} + +func TestGeneratedParseMACError(t *testing.T) { + RegisterTestingT(t) + + _, err := ethernet_types.ParseMacAddress("malformed_mac") + Expect(err).Should(HaveOccurred()) +} diff --git a/binapigen/gen_rest.go b/binapigen/gen_rest.go new file mode 100644 index 0000000..6ddb57a --- /dev/null +++ b/binapigen/gen_rest.go @@ -0,0 +1,103 @@ +// Copyright (c) 2020 Cisco and/or its affiliates. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package binapigen + +import ( + "path" + "strconv" +) + +func init() { + RegisterPlugin("rest", GenerateREST) +} + +// library dependencies +const ( + httpPkg = GoImportPath("net/http") + ioutilPkg = GoImportPath("io/ioutil") + jsonPkg = GoImportPath("encoding/json") +) + +func GenerateREST(gen *Generator, file *File) *GenFile { + if file.Service == nil { + return nil + } + + logf("----------------------------") + logf(" Generate REST - %s", file.Desc.Name) + logf("----------------------------") + + filename := path.Join(file.FilenamePrefix, file.Desc.Name+"_rest.ba.go") + g := gen.NewGenFile(filename, file.GoImportPath) + g.file = file + + // generate file header + g.P("// Code generated by GoVPP's binapi-generator. DO NOT EDIT.") + g.P() + g.P("package ", file.PackageName) + g.P() + + // generate RPC service + if len(file.Service.RPCs) > 0 { + genRESTHandler(g, file.Service) + } + + return g +} + +func genRESTHandler(g *GenFile, svc *Service) { + // generate handler constructor + g.P("func RESTHandler(rpc ", serviceApiName, ") ", httpPkg.Ident("Handler"), " {") + g.P(" mux := ", httpPkg.Ident("NewServeMux"), "()") + + // generate http handlers for rpc + for _, rpc := range svc.RPCs { + if rpc.MsgReply == nil { + continue + } + if rpc.VPP.Stream { + continue // TODO: implement handler for streaming messages + } + g.P("mux.HandleFunc(", strconv.Quote("/"+rpc.VPP.Request), ", func(w ", httpPkg.Ident("ResponseWriter"), ", req *", httpPkg.Ident("Request"), ") {") + g.P("var request = new(", rpc.MsgRequest.GoName, ")") + if len(rpc.MsgRequest.Fields) > 0 { + g.P("b, err := ", ioutilPkg.Ident("ReadAll"), "(req.Body)") + g.P("if err != nil {") + g.P(" ", httpPkg.Ident("Error"), "(w, \"read body failed\", ", httpPkg.Ident("StatusBadRequest"), ")") + g.P(" return") + g.P("}") + g.P("if err := ", jsonPkg.Ident("Unmarshal"), "(b, request); err != nil {") + g.P(" ", httpPkg.Ident("Error"), "(w, \"unmarshal data failed\", ", httpPkg.Ident("StatusBadRequest"), ")") + g.P(" return") + g.P("}") + } + g.P("reply, err := rpc.", rpc.GoName, "(req.Context(), request)") + g.P("if err != nil {") + g.P(" ", httpPkg.Ident("Error"), "(w, \"request failed: \"+err.Error(), ", httpPkg.Ident("StatusInternalServerError"), ")") + g.P(" return") + g.P("}") + g.P("rep, err := ", jsonPkg.Ident("MarshalIndent"), "(reply, \"\", \" \")") + g.P("if err != nil {") + g.P(" ", httpPkg.Ident("Error"), "(w, \"marshal failed: \"+err.Error(), ", httpPkg.Ident("StatusInternalServerError"), ")") + g.P(" return") + g.P("}") + g.P("w.Write(rep)") + g.P("})") + } + + g.P("return ", httpPkg.Ident("HandlerFunc"), "(mux.ServeHTTP)") + g.P("}") + g.P() +} diff --git a/binapigen/gen_rpc.go b/binapigen/gen_rpc.go new file mode 100644 index 0000000..ba23f4a --- /dev/null +++ b/binapigen/gen_rpc.go @@ -0,0 +1,204 @@ +// Copyright (c) 2020 Cisco and/or its affiliates. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package binapigen + +import ( + "fmt" + "path" + + "github.com/sirupsen/logrus" +) + +func init() { + RegisterPlugin("rpc", GenerateRPC) +} + +// library dependencies +const ( + contextPkg = GoImportPath("context") + ioPkg = GoImportPath("io") +) + +// generated names +const ( + serviceApiName = "RPCService" // name for the RPC service interface + serviceImplName = "serviceClient" // name for the RPC service implementation + serviceClientName = "ServiceClient" // name for the RPC service client + + // TODO: register service descriptor + //serviceDescType = "ServiceDesc" // name for service descriptor type + //serviceDescName = "_ServiceRPC_serviceDesc" // name for service descriptor var +) + +func GenerateRPC(gen *Generator, file *File) *GenFile { + if file.Service == nil { + return nil + } + + logf("----------------------------") + logf(" Generate RPC - %s", file.Desc.Name) + logf("----------------------------") + + filename := path.Join(file.FilenamePrefix, file.Desc.Name+"_rpc.ba.go") + g := gen.NewGenFile(filename, file.GoImportPath) + g.file = file + + // generate file header + g.P("// Code generated by GoVPP's binapi-generator. DO NOT EDIT.") + g.P() + g.P("package ", file.PackageName) + g.P() + + // generate RPC service + if len(file.Service.RPCs) > 0 { + genService(g, file.Service) + } + + return g +} + +func genService(g *GenFile, svc *Service) { + // generate comment + g.P("// ", serviceApiName, " defines RPC service ", g.file.Desc.Name, ".") + + // generate service interface + g.P("type ", serviceApiName, " interface {") + for _, rpc := range svc.RPCs { + g.P(rpcMethodSignature(g, rpc)) + } + g.P("}") + g.P() + + // generate client implementation + g.P("type ", serviceImplName, " struct {") + g.P("conn ", govppApiPkg.Ident("Connection")) + g.P("}") + g.P() + + // generate client constructor + g.P("func New", serviceClientName, "(conn ", govppApiPkg.Ident("Connection"), ") ", serviceApiName, " {") + g.P("return &", serviceImplName, "{conn}") + g.P("}") + g.P() + + msgControlPingReply, ok := g.gen.messagesByName["control_ping_reply"] + if !ok { + logrus.Fatalf("no message for %v", "control_ping_reply") + } + msgControlPing, ok := g.gen.messagesByName["control_ping"] + if !ok { + logrus.Fatalf("no message for %v", "control_ping") + } + + for _, rpc := range svc.RPCs { + logf(" gen RPC: %v (%s)", rpc.GoName, rpc.VPP.Request) + + g.P("func (c *", serviceImplName, ") ", rpcMethodSignature(g, rpc), " {") + if rpc.VPP.Stream { + streamImpl := fmt.Sprintf("%s_%sClient", serviceImplName, rpc.GoName) + streamApi := fmt.Sprintf("%s_%sClient", serviceApiName, rpc.GoName) + + msgDetails := rpc.MsgReply + var msgReply *Message + if rpc.MsgStream != nil { + msgDetails = rpc.MsgStream + msgReply = rpc.MsgReply + } else { + msgDetails = rpc.MsgReply + msgReply = msgControlPingReply + } + + g.P("stream, err := c.conn.NewStream(ctx)") + g.P("if err != nil { return nil, err }") + g.P("x := &", streamImpl, "{stream}") + g.P("if err := x.Stream.SendMsg(in); err != nil {") + g.P(" return nil, err") + g.P("}") + if rpc.MsgStream == nil { + g.P("if err = x.Stream.SendMsg(&", msgControlPing.GoIdent, "{}); err != nil {") + g.P(" return nil, err") + g.P("}") + } + g.P("return x, nil") + g.P("}") + g.P() + g.P("type ", streamApi, " interface {") + g.P(" Recv() (*", msgDetails.GoIdent, ", error)") + g.P(" ", govppApiPkg.Ident("Stream")) + g.P("}") + g.P() + + g.P("type ", streamImpl, " struct {") + g.P(" ", govppApiPkg.Ident("Stream")) + g.P("}") + g.P() + + g.P("func (c *", streamImpl, ") Recv() (*", msgDetails.GoIdent, ", error) {") + g.P(" msg, err := c.Stream.RecvMsg()") + g.P(" if err != nil { return nil, err }") + g.P(" switch m := msg.(type) {") + g.P(" case *", msgDetails.GoIdent, ":") + g.P(" return m, nil") + g.P(" case *", msgReply.GoIdent, ":") + g.P(" return nil, ", ioPkg.Ident("EOF")) + g.P(" default:") + g.P(" return nil, ", fmtPkg.Ident("Errorf"), "(\"unexpected message: %T %v\", m, m)") + g.P("}") + } else if rpc.MsgReply != nil { + g.P("out := new(", rpc.MsgReply.GoIdent, ")") + g.P("err := c.conn.Invoke(ctx, in, out)") + g.P("if err != nil { return nil, err }") + g.P("return out, nil") + } else { + g.P("stream, err := c.conn.NewStream(ctx)") + g.P("if err != nil { return err }") + g.P("err = stream.SendMsg(in)") + g.P("if err != nil { return err }") + g.P("return nil") + } + g.P("}") + g.P() + } + + // TODO: generate service descriptor + /*fmt.Fprintf(w, "var %s = api.%s{\n", serviceDescName, serviceDescType) + fmt.Fprintf(w, "\tServiceName: \"%s\",\n", ctx.moduleName) + fmt.Fprintf(w, "\tHandlerType: (*%s)(nil),\n", serviceApiName) + fmt.Fprintf(w, "\tMethods: []api.MethodDesc{\n") + for _, method := range rpcs { + fmt.Fprintf(w, "\t {\n") + fmt.Fprintf(w, "\t MethodName: \"%s\",\n", method.Name) + fmt.Fprintf(w, "\t },\n") + } + fmt.Fprintf(w, "\t},\n") + //fmt.Fprintf(w, "\tCompatibility: %s,\n", messageCrcName) + //fmt.Fprintf(w, "\tMetadata: reflect.TypeOf((*%s)(nil)).Elem().PkgPath(),\n", serviceApiName) + fmt.Fprintf(w, "\tMetadata: \"%s\",\n", ctx.inputFile) + fmt.Fprintln(w, "}")*/ + + g.P() +} + +func rpcMethodSignature(g *GenFile, rpc *RPC) string { + s := rpc.GoName + "(ctx " + g.GoIdent(contextPkg.Ident("Context")) + s += ", in *" + g.GoIdent(rpc.MsgRequest.GoIdent) + ") (" + if rpc.VPP.Stream { + s += serviceApiName + "_" + rpc.GoName + "Client, " + } else if rpc.MsgReply != nil { + s += "*" + g.GoIdent(rpc.MsgReply.GoIdent) + ", " + } + s += "error)" + return s +} diff --git a/binapigen/generate.go b/binapigen/generate.go index d35427f..689463e 100644 --- a/binapigen/generate.go +++ b/binapigen/generate.go @@ -16,1338 +16,468 @@ package binapigen import ( "fmt" - "io" + "path" "sort" + "strconv" "strings" - "git.fd.io/govpp.git/version" - "github.com/sirupsen/logrus" + "git.fd.io/govpp.git/internal/version" ) -// generatedCodeVersion indicates a version of the generated code. -// It is incremented whenever an incompatibility between the generated code and -// GoVPP api package is introduced; the generated code references -// a constant, api.GoVppAPIPackageIsVersionN (where N is generatedCodeVersion). -const generatedCodeVersion = 2 - -// common message fields +// library dependencies const ( - msgIdField = "_vl_msg_id" - clientIndexField = "client_index" - contextField = "context" - retvalField = "retval" -) + strconvPkg = GoImportPath("strconv") -// global API info -const ( - constModuleName = "ModuleName" // module name constant - constAPIVersion = "APIVersion" // API version constant - constVersionCrc = "VersionCrc" // version CRC constant + govppApiPkg = GoImportPath("git.fd.io/govpp.git/api") + govppCodecPkg = GoImportPath("git.fd.io/govpp.git/codec") ) -// generated fiels +// generated names const ( - unionDataField = "XXX_UnionData" // name for the union data field -) - -// MessageType represents the type of a VPP message -type MessageType int + apiName = "APIFile" // API file name + apiVersion = "APIVersion" // API version number + apiCrc = "VersionCrc" // version checksum -const ( - requestMessage MessageType = iota // VPP request message - replyMessage // VPP reply message - eventMessage // VPP event message - otherMessage // other VPP message + fieldUnionData = "XXX_UnionData" // name for the union data field ) -func generateFileBinapi(ctx *GenFile, w io.Writer) { +func GenerateAPI(gen *Generator, file *File) *GenFile { logf("----------------------------") - logf("generating BINAPI file package: %q", ctx.file.PackageName) + logf(" Generate API - %s", file.Desc.Name) logf("----------------------------") - // generate file header - fmt.Fprintln(w, "// Code generated by GoVPP's binapi-generator. DO NOT EDIT.") - fmt.Fprintln(w, "// versions:") - fmt.Fprintf(w, "// binapi-generator: %s\n", version.Version()) - if ctx.IncludeVppVersion { - fmt.Fprintf(w, "// VPP: %s\n", ctx.VPPVersion) + filename := path.Join(file.FilenamePrefix, file.Desc.Name+".ba.go") + g := gen.NewGenFile(filename, file.GoImportPath) + g.file = file + + g.P("// Code generated by GoVPP's binapi-generator. DO NOT EDIT.") + if !gen.opts.NoVersionInfo { + g.P("// versions:") + g.P("// binapi-generator: ", version.Version()) + g.P("// VPP: ", g.gen.vppVersion) + g.P("// source: ", g.file.Desc.Path) } - fmt.Fprintf(w, "// source: %s\n", ctx.file.Path) - fmt.Fprintln(w) + g.P() - generatePackageHeader(ctx, w) - generateImports(ctx, w) + genPackageComment(g) + g.P("package ", file.PackageName) + g.P() - generateApiInfo(ctx, w) - generateTypes(ctx, w) - generateMessages(ctx, w) + for _, imp := range g.file.Imports { + genImport(g, imp) + } - generateImportRefs(ctx, w) -} + // generate version assertion + g.P("// This is a compile-time assertion to ensure that this generated file") + g.P("// is compatible with the GoVPP api package it is being compiled against.") + g.P("// A compilation error at this line likely means your copy of the") + g.P("// GoVPP api package needs to be updated.") + g.P("const _ = ", govppApiPkg.Ident("GoVppAPIPackageIsVersion"), generatedCodeVersion) + g.P() -func generatePackageHeader(ctx *GenFile, w io.Writer) { - fmt.Fprintln(w, "/*") - fmt.Fprintf(w, "Package %s contains generated code for VPP API file %s.api (%s).\n", - ctx.file.PackageName, ctx.file.Name, ctx.file.Version()) - fmt.Fprintln(w) - fmt.Fprintln(w, "It consists of:") - printObjNum := func(obj string, num int) { - if num > 0 { - if num > 1 { - if strings.HasSuffix(obj, "s") { - obj += "es" - } else { - obj += "s" - } - } - fmt.Fprintf(w, "\t%3d %s\n", num, obj) - } + if !file.isTypesFile() { + g.P("const (") + g.P(apiName, " = ", strconv.Quote(g.file.Desc.Name)) + g.P(apiVersion, " = ", strconv.Quote(g.file.Version)) + g.P(apiCrc, " = ", g.file.Desc.CRC) + g.P(")") + g.P() } - printObjNum("alias", len(ctx.file.Aliases)) - printObjNum("enum", len(ctx.file.Enums)) - printObjNum("message", len(ctx.file.Messages)) - printObjNum("type", len(ctx.file.Structs)) - printObjNum("union", len(ctx.file.Unions)) - fmt.Fprintln(w, "*/") - fmt.Fprintf(w, "package %s\n", ctx.file.PackageName) - fmt.Fprintln(w) -} -func generateImports(ctx *GenFile, w io.Writer) { - fmt.Fprintln(w, "import (") - fmt.Fprintln(w, ` "bytes"`) - fmt.Fprintln(w, ` "context"`) - fmt.Fprintln(w, ` "encoding/binary"`) - fmt.Fprintln(w, ` "fmt"`) - fmt.Fprintln(w, ` "io"`) - fmt.Fprintln(w, ` "math"`) - fmt.Fprintln(w, ` "net"`) - fmt.Fprintln(w, ` "strconv"`) - fmt.Fprintln(w, ` "strings"`) - fmt.Fprintln(w) - fmt.Fprintf(w, "\tapi \"%s\"\n", "git.fd.io/govpp.git/api") - fmt.Fprintf(w, "\tcodec \"%s\"\n", "git.fd.io/govpp.git/codec") - fmt.Fprintf(w, "\tstruc \"%s\"\n", "github.com/lunixbochs/struc") - imports := listImports(ctx) - if len(imports) > 0 { - fmt.Fprintln(w) - for imp, importPath := range imports { - fmt.Fprintf(w, "\t%s \"%s\"\n", imp, importPath) - } + for _, enum := range g.file.Enums { + genEnum(g, enum) } - fmt.Fprintln(w, ")") - fmt.Fprintln(w) - - fmt.Fprintln(w, "// This is a compile-time assertion to ensure that this generated file") - fmt.Fprintln(w, "// is compatible with the GoVPP api package it is being compiled against.") - fmt.Fprintln(w, "// A compilation error at this line likely means your copy of the") - fmt.Fprintln(w, "// GoVPP api package needs to be updated.") - fmt.Fprintf(w, "const _ = api.GoVppAPIPackageIsVersion%d // please upgrade the GoVPP api package\n", generatedCodeVersion) - fmt.Fprintln(w) -} - -func generateApiInfo(ctx *GenFile, w io.Writer) { - // generate module desc - fmt.Fprintln(w, "const (") - fmt.Fprintf(w, "\t// %s is the name of this module.\n", constModuleName) - fmt.Fprintf(w, "\t%s = \"%s\"\n", constModuleName, ctx.file.Name) - - if ctx.IncludeAPIVersion { - fmt.Fprintf(w, "\t// %s is the API version of this module.\n", constAPIVersion) - fmt.Fprintf(w, "\t%s = \"%s\"\n", constAPIVersion, ctx.file.Version()) - fmt.Fprintf(w, "\t// %s is the CRC of this module.\n", constVersionCrc) - fmt.Fprintf(w, "\t%s = %v\n", constVersionCrc, ctx.file.CRC) + for _, alias := range g.file.Aliases { + genAlias(g, alias) } - fmt.Fprintln(w, ")") - fmt.Fprintln(w) -} - -func generateTypes(ctx *GenFile, w io.Writer) { - // generate enums - if len(ctx.file.Enums) > 0 { - for _, enum := range ctx.file.Enums { - if imp, ok := ctx.file.imports[enum.Name]; ok { - if strings.HasSuffix(ctx.file.Name, "_types") { - generateImportedAlias(ctx, w, enum.GoName, imp) - } - continue - } - generateEnum(ctx, w, enum) - } + for _, typ := range g.file.Structs { + genStruct(g, typ) } - - // generate aliases - if len(ctx.file.Aliases) > 0 { - for _, alias := range ctx.file.Aliases { - if imp, ok := ctx.file.imports[alias.Name]; ok { - if strings.HasSuffix(ctx.file.Name, "_types") { - generateImportedAlias(ctx, w, alias.GoName, imp) - } - continue - } - generateAlias(ctx, w, alias) - } + for _, union := range g.file.Unions { + genUnion(g, union) } + genMessages(g) - // generate types - if len(ctx.file.Structs) > 0 { - for _, typ := range ctx.file.Structs { - if imp, ok := ctx.file.imports[typ.Name]; ok { - if strings.HasSuffix(ctx.file.Name, "_types") { - generateImportedAlias(ctx, w, typ.GoName, imp) - } - continue - } - generateStruct(ctx, w, typ) - } - } + return g +} - // generate unions - if len(ctx.file.Unions) > 0 { - for _, union := range ctx.file.Unions { - if imp, ok := ctx.file.imports[union.Name]; ok { - if strings.HasSuffix(ctx.file.Name, "_types") { - generateImportedAlias(ctx, w, union.GoName, imp) +func genPackageComment(g *GenFile) { + apifile := g.file.Desc.Name + ".api" + g.P("// Package ", g.file.PackageName, " contains generated bindings for API file ", apifile, ".") + g.P("//") + g.P("// Contents:") + printObjNum := func(obj string, num int) { + if num > 0 { + if num > 1 { + if strings.HasSuffix(obj, "s") { + obj += "es" + } else { + obj += "s" } - continue } - generateUnion(ctx, w, union) + g.P("// ", fmt.Sprintf("%3d", num), " ", obj) } } + printObjNum("alias", len(g.file.Aliases)) + printObjNum("enum", len(g.file.Enums)) + printObjNum("struct", len(g.file.Structs)) + printObjNum("union", len(g.file.Unions)) + printObjNum("message", len(g.file.Messages)) + g.P("//") } -func generateMessages(ctx *GenFile, w io.Writer) { - if len(ctx.file.Messages) == 0 { +func genImport(g *GenFile, imp string) { + impFile, ok := g.gen.FilesByName[imp] + if !ok { return } - - for _, msg := range ctx.file.Messages { - generateMessage(ctx, w, msg) - } - - // generate message registrations - initFnName := fmt.Sprintf("file_%s_binapi_init", ctx.file.PackageName) - - fmt.Fprintf(w, "func init() { %s() }\n", initFnName) - fmt.Fprintf(w, "func %s() {\n", initFnName) - for _, msg := range ctx.file.Messages { - fmt.Fprintf(w, "\tapi.RegisterMessage((*%s)(nil), \"%s\")\n", - msg.GoName, ctx.file.Name+"."+msg.GoName) - } - fmt.Fprintln(w, "}") - fmt.Fprintln(w) - - // generate list of messages - fmt.Fprintf(w, "// Messages returns list of all messages in this module.\n") - fmt.Fprintln(w, "func AllMessages() []api.Message {") - fmt.Fprintln(w, "\treturn []api.Message{") - for _, msg := range ctx.file.Messages { - fmt.Fprintf(w, "\t(*%s)(nil),\n", msg.GoName) + if impFile.GoImportPath == g.file.GoImportPath { + // Skip generating imports for types in the same package + return } - fmt.Fprintln(w, "}") - fmt.Fprintln(w, "}") + // Generate imports for all dependencies, even if not used + g.Import(impFile.GoImportPath) } -func generateImportRefs(ctx *GenFile, w io.Writer) { - fmt.Fprintf(w, "// Reference imports to suppress errors if they are not otherwise used.\n") - fmt.Fprintf(w, "var _ = api.RegisterMessage\n") - fmt.Fprintf(w, "var _ = codec.DecodeString\n") - fmt.Fprintf(w, "var _ = bytes.NewBuffer\n") - fmt.Fprintf(w, "var _ = context.Background\n") - fmt.Fprintf(w, "var _ = io.Copy\n") - fmt.Fprintf(w, "var _ = strconv.Itoa\n") - fmt.Fprintf(w, "var _ = strings.Contains\n") - fmt.Fprintf(w, "var _ = struc.Pack\n") - fmt.Fprintf(w, "var _ = binary.BigEndian\n") - fmt.Fprintf(w, "var _ = math.Float32bits\n") - fmt.Fprintf(w, "var _ = net.ParseIP\n") - fmt.Fprintf(w, "var _ = fmt.Errorf\n") +func genTypeComment(g *GenFile, goName string, vppName string, objKind string) { + g.P("// ", goName, " defines ", objKind, " '", vppName, "'.") } -func generateComment(ctx *GenFile, w io.Writer, goName string, vppName string, objKind string) { - if objKind == "service" { - fmt.Fprintf(w, "// %s represents RPC service API for %s module.\n", goName, ctx.file.Name) - } else { - fmt.Fprintf(w, "// %s represents VPP binary API %s '%s'.\n", goName, objKind, vppName) - } -} +func genEnum(g *GenFile, enum *Enum) { + logf("gen ENUM %s (%s) - %d entries", enum.GoName, enum.Name, len(enum.Entries)) -func generateEnum(ctx *GenFile, w io.Writer, enum *Enum) { - name := enum.GoName - typ := binapiTypes[enum.Type] + genTypeComment(g, enum.GoName, enum.Name, "enum") - logf(" writing ENUM %q (%s) with %d entries", enum.Name, name, len(enum.Entries)) + gotype := BaseTypesGo[enum.Type] - // generate enum comment - generateComment(ctx, w, name, enum.Name, "enum") - - // generate enum definition - fmt.Fprintf(w, "type %s %s\n", name, typ) - fmt.Fprintln(w) + g.P("type ", enum.GoName, " ", gotype) + g.P() // generate enum entries - fmt.Fprintln(w, "const (") + g.P("const (") for _, entry := range enum.Entries { - fmt.Fprintf(w, "\t%s %s = %v\n", entry.Name, name, entry.Value) + g.P(entry.Name, " ", enum.GoName, " = ", entry.Value) } - fmt.Fprintln(w, ")") - fmt.Fprintln(w) + g.P(")") + g.P() // generate enum conversion maps - fmt.Fprintln(w, "var (") - fmt.Fprintf(w, "%s_name = map[%s]string{\n", name, typ) + g.P("var (") + g.P(enum.GoName, "_name = map[", gotype, "]string{") for _, entry := range enum.Entries { - fmt.Fprintf(w, "\t%v: \"%s\",\n", entry.Value, entry.Name) + g.P(entry.Value, ": ", strconv.Quote(entry.Name), ",") } - fmt.Fprintln(w, "}") - fmt.Fprintf(w, "%s_value = map[string]%s{\n", name, typ) + g.P("}") + g.P(enum.GoName, "_value = map[string]", gotype, "{") for _, entry := range enum.Entries { - fmt.Fprintf(w, "\t\"%s\": %v,\n", entry.Name, entry.Value) + g.P(strconv.Quote(entry.Name), ": ", entry.Value, ",") + } + g.P("}") + g.P(")") + g.P() + + if isEnumFlag(enum) { + size := BaseTypeSizes[enum.Type] * 8 + g.P("func (x ", enum.GoName, ") String() string {") + g.P(" s, ok := ", enum.GoName, "_name[", gotype, "(x)]") + g.P(" if ok { return s }") + g.P(" str := func(n ", gotype, ") string {") + g.P(" s, ok := ", enum.GoName, "_name[", gotype, "(n)]") + g.P(" if ok {") + g.P(" return s") + g.P(" }") + g.P(" return \"", enum.GoName, "(\" + ", strconvPkg.Ident("Itoa"), "(int(n)) + \")\"") + g.P(" }") + g.P(" for i := ", gotype, "(0); i <= ", size, "; i++ {") + g.P(" val := ", gotype, "(x)") + g.P(" if val&(1<<i) != 0 {") + g.P(" if s != \"\" {") + g.P(" s += \"|\"") + g.P(" }") + g.P(" s += str(1<<i)") + g.P(" }") + g.P(" }") + g.P(" if s == \"\" {") + g.P(" return str(", gotype, "(x))") + g.P(" }") + g.P(" return s") + g.P("}") + g.P() + } else { + g.P("func (x ", enum.GoName, ") String() string {") + g.P(" s, ok := ", enum.GoName, "_name[", gotype, "(x)]") + g.P(" if ok { return s }") + g.P(" return \"", enum.GoName, "(\" + ", strconvPkg.Ident("Itoa"), "(int(x)) + \")\"") + g.P("}") + g.P() } - fmt.Fprintln(w, "}") - fmt.Fprintln(w, ")") - fmt.Fprintln(w) - - fmt.Fprintf(w, "func (x %s) String() string {\n", name) - fmt.Fprintf(w, "\ts, ok := %s_name[%s(x)]\n", name, typ) - fmt.Fprintf(w, "\tif ok { return s }\n") - fmt.Fprintf(w, "\treturn \"%s(\" + strconv.Itoa(int(x)) + \")\"\n", name) - fmt.Fprintln(w, "}") - fmt.Fprintln(w) } -func generateImportedAlias(ctx *GenFile, w io.Writer, name string, imp string) { - fmt.Fprintf(w, "type %s = %s.%s\n", name, imp, name) - fmt.Fprintln(w) -} - -func generateAlias(ctx *GenFile, w io.Writer, alias *Alias) { - name := alias.GoName - - logf(" writing ALIAS %q (%s), length: %d", alias.Name, name, alias.Length) +func genAlias(g *GenFile, alias *Alias) { + logf("gen ALIAS %s (%s) - type: %s length: %d", alias.GoName, alias.Name, alias.Type, alias.Length) - // generate struct comment - generateComment(ctx, w, name, alias.Name, "alias") - - // generate struct definition - fmt.Fprintf(w, "type %s ", name) + genTypeComment(g, alias.GoName, alias.Name, "alias") + var gotype string + switch { + case alias.TypeStruct != nil: + gotype = g.GoIdent(alias.TypeStruct.GoIdent) + case alias.TypeUnion != nil: + gotype = g.GoIdent(alias.TypeUnion.GoIdent) + default: + gotype = BaseTypesGo[alias.Type] + } if alias.Length > 0 { - fmt.Fprintf(w, "[%d]", alias.Length) + gotype = fmt.Sprintf("[%d]%s", alias.Length, gotype) } - dataType := convertToGoType(ctx.file, alias.Type) - fmt.Fprintf(w, "%s\n", dataType) + g.P("type ", alias.GoName, " ", gotype) + g.P() // generate alias-specific methods switch alias.Name { + case "ip4_address": + generateIPConversion(g, alias.GoName, 4) + case "ip6_address": + generateIPConversion(g, alias.GoName, 16) + case "address_with_prefix": + generateAddressWithPrefixConversion(g, alias.GoName) case "mac_address": - fmt.Fprintln(w) - generateMacAddressConversion(w, name) + generateMacAddressConversion(g, alias.GoName) } - - fmt.Fprintln(w) } -func generateStruct(ctx *GenFile, w io.Writer, typ *Struct) { - name := typ.GoName - - logf(" writing STRUCT %q (%s) with %d fields", typ.Name, name, len(typ.Fields)) +func genStruct(g *GenFile, typ *Struct) { + logf("gen STRUCT %s (%s) - %d fields", typ.GoName, typ.Name, len(typ.Fields)) - // generate struct comment - generateComment(ctx, w, name, typ.Name, "type") + genTypeComment(g, typ.GoName, typ.Name, "type") - // generate struct definition - fmt.Fprintf(w, "type %s struct {\n", name) - - // generate struct fields - for i := range typ.Fields { - // skip internal fields - switch strings.ToLower(typ.Name) { - case msgIdField: - continue + if len(typ.Fields) == 0 { + g.P("type ", typ.GoName, " struct {}") + } else { + g.P("type ", typ.GoName, " struct {") + for i := range typ.Fields { + generateField(g, typ.Fields, i) } - - generateField(ctx, w, typ.Fields, i) + g.P("}") } - - // generate end of the struct - fmt.Fprintln(w, "}") - - // generate name getter - generateTypeNameGetter(w, name, typ.Name) + g.P() // generate type-specific methods switch typ.Name { case "address": - fmt.Fprintln(w) - generateIPAddressConversion(w, name) + generateAddressConversion(g, typ.GoName) case "prefix": - fmt.Fprintln(w) - generatePrefixConversion(w, name) + generatePrefixConversion(g, typ.GoName) + case "ip4_prefix": + generateIPPrefixConversion(g, typ.GoName, 4) + case "ip6_prefix": + generateIPPrefixConversion(g, typ.GoName, 6) } - - fmt.Fprintln(w) } -// generateUnionMethods generates methods that implement struc.Custom -// interface to allow having XXX_uniondata field unexported -// TODO: do more testing when unions are actually used in some messages -/*func generateUnionMethods(w io.Writer, structName string) { - // generate struc.Custom implementation for union - fmt.Fprintf(w, ` -func (u *%[1]s) Pack(p []byte, opt *struc.Options) (int, error) { - var b = new(bytes.Buffer) - if err := struc.PackWithOptions(b, u.union_data, opt); err != nil { - return 0, err - } - copy(p, b.Bytes()) - return b.Len(), nil -} -func (u *%[1]s) Unpack(r io.Reader, length int, opt *struc.Options) error { - return struc.UnpackWithOptions(r, u.union_data[:], opt) -} -func (u *%[1]s) Size(opt *struc.Options) int { - return len(u.union_data) -} -func (u *%[1]s) String() string { - return string(u.union_data[:]) -} -`, structName) -}*/ - -/*func generateUnionGetterSetterNew(w io.Writer, structName string, getterField, getterStruct string) { - fmt.Fprintf(w, ` -func %[1]s%[2]s(a %[3]s) (u %[1]s) { - u.Set%[2]s(a) - return -} -func (u *%[1]s) Set%[2]s(a %[3]s) { - copy(u.%[4]s[:], a[:]) -} -func (u *%[1]s) Get%[2]s() (a %[3]s) { - copy(a[:], u.%[4]s[:]) - return -} -`, structName, getterField, getterStruct, unionDataField) -}*/ - -func generateUnion(ctx *GenFile, w io.Writer, union *Union) { - name := union.GoName +func genUnion(g *GenFile, union *Union) { + logf("gen UNION %s (%s) - %d fields", union.GoName, union.Name, len(union.Fields)) - logf(" writing UNION %q (%s) with %d fields", union.Name, name, len(union.Fields)) + genTypeComment(g, union.GoName, union.Name, "union") - // generate struct comment - generateComment(ctx, w, name, union.Name, "union") + g.P("type ", union.GoName, " struct {") - // generate struct definition - fmt.Fprintln(w, "type", name, "struct {") - - // maximum size for union - maxSize := getUnionSize(ctx.file, union) + for _, field := range union.Fields { + g.P("// ", field.GoName, " *", getFieldType(g, field)) + } // generate data field - fmt.Fprintf(w, "\t%s [%d]byte\n", unionDataField, maxSize) + maxSize := getUnionSize(union) + g.P(fieldUnionData, " [", maxSize, "]byte") // generate end of the struct - fmt.Fprintln(w, "}") + g.P("}") + g.P() - // generate name getter - generateTypeNameGetter(w, name, union.Name) - - // generate getters for fields + // generate methods for fields for _, field := range union.Fields { - fieldType := convertToGoType(ctx.file, field.Type) - generateUnionGetterSetter(w, name, field.GoName, fieldType) + genUnionFieldMethods(g, union.GoName, field) } - - // generate union methods - //generateUnionMethods(w, name) - - fmt.Fprintln(w) -} - -func generateUnionGetterSetter(w io.Writer, structName string, getterField, getterStruct string) { - fmt.Fprintf(w, ` -func %[1]s%[2]s(a %[3]s) (u %[1]s) { - u.Set%[2]s(a) - return + g.P() } -func (u *%[1]s) Set%[2]s(a %[3]s) { - var b = new(bytes.Buffer) - if err := struc.Pack(b, &a); err != nil { - return - } - copy(u.%[4]s[:], b.Bytes()) -} -func (u *%[1]s) Get%[2]s() (a %[3]s) { - var b = bytes.NewReader(u.%[4]s[:]) - struc.Unpack(b, &a) - return -} -`, structName, getterField, getterStruct, unionDataField) -} - -func generateMessage(ctx *GenFile, w io.Writer, msg *Message) { - name := msg.GoName - logf(" writing MESSAGE %q (%s) with %d fields", msg.Name, name, len(msg.Fields)) +func genUnionFieldMethods(g *GenFile, structName string, field *Field) { + getterStruct := fieldGoType(g, field) - // generate struct comment - generateComment(ctx, w, name, msg.Name, "message") + // Constructor + g.P("func ", structName, field.GoName, "(a ", getterStruct, ") (u ", structName, ") {") + g.P(" u.Set", field.GoName, "(a)") + g.P(" return") + g.P("}") - // generate struct definition - fmt.Fprintf(w, "type %s struct {", name) + // Setter + g.P("func (u *", structName, ") Set", field.GoName, "(a ", getterStruct, ") {") + g.P(" var buf = ", govppCodecPkg.Ident("NewBuffer"), "(u.", fieldUnionData, "[:])") + encodeField(g, field, "a", func(name string) string { + return "a." + name + }, 0) + g.P("}") - msgType := otherMessage - wasClientIndex := false - - // generate struct fields - n := 0 - for i, field := range msg.Fields { - if i == 1 { - if field.Name == clientIndexField { - // "client_index" as the second member, - // this might be an event message or a request - msgType = eventMessage - wasClientIndex = true - } else if field.Name == contextField { - // reply needs "context" as the second member - msgType = replyMessage - } - } else if i == 2 { - if wasClientIndex && field.Name == contextField { - // request needs "client_index" as the second member - // and "context" as the third member - msgType = requestMessage - } - } - - // skip internal fields - switch strings.ToLower(field.Name) { - case msgIdField: - continue - case clientIndexField, contextField: - if n == 0 { - continue - } - } - n++ - if n == 1 { - fmt.Fprintln(w) - } - - generateField(ctx, w, msg.Fields, i) - } - - // generate end of the struct - fmt.Fprintln(w, "}") - - // generate message methods - generateMessageResetMethod(w, name) - generateMessageNameGetter(w, name, msg.Name) - generateCrcGetter(w, name, msg.CRC) - generateMessageTypeGetter(w, name, msgType) - generateMessageSize(ctx, w, name, msg.Fields) - generateMessageMarshal(ctx, w, name, msg.Fields) - generateMessageUnmarshal(ctx, w, name, msg.Fields) - - fmt.Fprintln(w) + // Getter + g.P("func (u *", structName, ") Get", field.GoName, "() (a ", getterStruct, ") {") + g.P(" var buf = ", govppCodecPkg.Ident("NewBuffer"), "(u.", fieldUnionData, "[:])") + decodeField(g, field, "a", func(name string) string { + return "a." + name + }, 0) + g.P(" return") + g.P("}") + g.P() } -func generateMessageSize(ctx *GenFile, w io.Writer, name string, fields []*Field) { - fmt.Fprintf(w, "func (m *%[1]s) Size() int {\n", name) - - fmt.Fprintf(w, "\tif m == nil { return 0 }\n") - fmt.Fprintf(w, "\tvar size int\n") - - encodeBaseType := func(typ, name string, length int, sizefrom string) bool { - t, ok := BaseTypeNames[typ] - if !ok { - return false - } +func generateField(g *GenFile, fields []*Field, i int) { + field := fields[i] - var s = BaseTypeSizes[t] - switch t { - case STRING: - if length > 0 { - s = length - fmt.Fprintf(w, "\tsize += %d\n", s) - } else { - s = 4 - fmt.Fprintf(w, "\tsize += %d + len(%s)\n", s, name) - } - default: - if sizefrom != "" { - //fmt.Fprintf(w, "\tsize += %d * int(%s)\n", s, sizefrom) - fmt.Fprintf(w, "\tsize += %d * len(%s)\n", s, name) - } else { - if length > 0 { - s = BaseTypeSizes[t] * length - } - fmt.Fprintf(w, "\tsize += %d\n", s) - } - } + logf(" gen FIELD[%d] %s (%s) - type: %q (array: %v/%v)", i, field.GoName, field.Name, field.Type, field.Array, field.Length) - return true + gotype := getFieldType(g, field) + tags := structTags{ + "binapi": fieldTagJSON(field), + "json": fieldTagBinapi(field), } - lvl := 0 - var sizeFields func(fields []*Field, parentName string) - sizeFields = func(fields []*Field, parentName string) { - lvl++ - defer func() { lvl-- }() - - n := 0 - for _, field := range fields { - if field.ParentMessage != nil { - // skip internal fields - switch strings.ToLower(field.Name) { - case msgIdField: - continue - case clientIndexField, contextField: - if n == 0 { - continue - } - } - } - n++ - - fieldName := field.GoName //camelCaseName(strings.TrimPrefix(field.Name, "_")) - name := fmt.Sprintf("%s.%s", parentName, fieldName) - sizeFrom := camelCaseName(strings.TrimPrefix(field.SizeFrom, "_")) - var sizeFromName string - if sizeFrom != "" { - sizeFromName = fmt.Sprintf("%s.%s", parentName, sizeFrom) - } - - fmt.Fprintf(w, "\t// field[%d] %s\n", lvl, name) - - if encodeBaseType(field.Type, name, field.Length, sizeFromName) { - continue - } - - char := fmt.Sprintf("s%d", lvl) - index := fmt.Sprintf("j%d", lvl) - - if field.Array { - if field.Length > 0 { - fmt.Fprintf(w, "\tfor %[2]s := 0; %[2]s < %[1]d; %[2]s ++ {\n", field.Length, index) - } else if field.SizeFrom != "" { - //fmt.Fprintf(w, "\tfor %[1]s := 0; %[1]s < int(%[2]s.%[3]s); %[1]s++ {\n", index, parentName, sizeFrom) - fmt.Fprintf(w, "\tfor %[1]s := 0; %[1]s < len(%[2]s); %[1]s++ {\n", index, name) - } - - fmt.Fprintf(w, "\tvar %[1]s %[2]s\n_ = %[1]s\n", char, convertToGoType(ctx.file, field.Type)) - fmt.Fprintf(w, "\tif %[1]s < len(%[2]s) { %[3]s = %[2]s[%[1]s] }\n", index, name, char) - name = char - } - - if enum := getEnumByRef(ctx.file, field.Type); enum != nil { - if encodeBaseType(enum.Type, name, 0, "") { - } else { - fmt.Fprintf(w, "\t// ??? ENUM %s %s\n", name, enum.Type) - } - } else if alias := getAliasByRef(ctx.file, field.Type); alias != nil { - if encodeBaseType(alias.Type, name, alias.Length, "") { - } else if typ := getTypeByRef(ctx.file, alias.Type); typ != nil { - sizeFields(typ.Fields, name) - } else { - fmt.Fprintf(w, "\t// ??? ALIAS %s %s\n", name, alias.Type) - } - } else if typ := getTypeByRef(ctx.file, field.Type); typ != nil { - sizeFields(typ.Fields, name) - } else if union := getUnionByRef(ctx.file, field.Type); union != nil { - maxSize := getUnionSize(ctx.file, union) - fmt.Fprintf(w, "\tsize += %d\n", maxSize) - } else { - fmt.Fprintf(w, "\t// ??? buf[pos] = (%s)\n", name) - } + g.P(field.GoName, " ", gotype, tags) +} - if field.Array { - fmt.Fprintf(w, "\t}\n") - } - } +func fieldTagBinapi(field *Field) string { + if field.FieldSizeOf != nil { + return "-" } - - sizeFields(fields, "m") - - fmt.Fprintf(w, "return size\n") - - fmt.Fprintf(w, "}\n") + return fmt.Sprintf("%s,omitempty", field.Name) } -func generateMessageMarshal(ctx *GenFile, w io.Writer, name string, fields []*Field) { - fmt.Fprintf(w, "func (m *%[1]s) Marshal(b []byte) ([]byte, error) {\n", name) - - fmt.Fprintf(w, "\to := binary.BigEndian\n") - fmt.Fprintf(w, "\t_ = o\n") - fmt.Fprintf(w, "\tpos := 0\n") - fmt.Fprintf(w, "\t_ = pos\n") - - var buf = new(strings.Builder) - - encodeBaseType := func(typ, name string, length int, sizefrom string) bool { - t, ok := BaseTypeNames[typ] - if !ok { - return false - } - - isArray := length > 0 || sizefrom != "" - - switch t { - case I8, U8, I16, U16, I32, U32, I64, U64, F64: - if isArray { - if length != 0 { - fmt.Fprintf(buf, "\tfor i := 0; i < %d; i++ {\n", length) - } else if sizefrom != "" { - //fmt.Fprintf(buf, "\tfor i := 0; i < int(%s); i++ {\n", sizefrom) - fmt.Fprintf(buf, "\tfor i := 0; i < len(%s); i++ {\n", name) - } - } - } - - switch t { - case I8, U8: - if isArray { - fmt.Fprintf(buf, "\tvar x uint8\n") - fmt.Fprintf(buf, "\tif i < len(%s) { x = uint8(%s[i]) }\n", name, name) - name = "x" - } - fmt.Fprintf(buf, "\tbuf[pos] = uint8(%s)\n", name) - fmt.Fprintf(buf, "\tpos += 1\n") - if isArray { - fmt.Fprintf(buf, "\t}\n") - } - case I16, U16: - if isArray { - fmt.Fprintf(buf, "\tvar x uint16\n") - fmt.Fprintf(buf, "\tif i < len(%s) { x = uint16(%s[i]) }\n", name, name) - name = "x" - } - fmt.Fprintf(buf, "\to.PutUint16(buf[pos:pos+2], uint16(%s))\n", name) - fmt.Fprintf(buf, "\tpos += 2\n") - if isArray { - fmt.Fprintf(buf, "\t}\n") - } - case I32, U32: - if isArray { - fmt.Fprintf(buf, "\tvar x uint32\n") - fmt.Fprintf(buf, "\tif i < len(%s) { x = uint32(%s[i]) }\n", name, name) - name = "x" - } - fmt.Fprintf(buf, "\to.PutUint32(buf[pos:pos+4], uint32(%s))\n", name) - fmt.Fprintf(buf, "\tpos += 4\n") - if isArray { - fmt.Fprintf(buf, "\t}\n") - } - case I64, U64: - if isArray { - fmt.Fprintf(buf, "\tvar x uint64\n") - fmt.Fprintf(buf, "\tif i < len(%s) { x = uint64(%s[i]) }\n", name, name) - name = "x" - } - fmt.Fprintf(buf, "\to.PutUint64(buf[pos:pos+8], uint64(%s))\n", name) - fmt.Fprintf(buf, "\tpos += 8\n") - if isArray { - fmt.Fprintf(buf, "\t}\n") - } - case F64: - if isArray { - fmt.Fprintf(buf, "\tvar x float64\n") - fmt.Fprintf(buf, "\tif i < len(%s) { x = float64(%s[i]) }\n", name, name) - name = "x" - } - fmt.Fprintf(buf, "\to.PutUint64(buf[pos:pos+8], math.Float64bits(float64(%s)))\n", name) - fmt.Fprintf(buf, "\tpos += 8\n") - if isArray { - fmt.Fprintf(buf, "\t}\n") - } - case BOOL: - fmt.Fprintf(buf, "\tif %s { buf[pos] = 1 }\n", name) - fmt.Fprintf(buf, "\tpos += 1\n") - case STRING: - if length != 0 { - fmt.Fprintf(buf, "\tcopy(buf[pos:pos+%d], %s)\n", length, name) - fmt.Fprintf(buf, "\tpos += %d\n", length) - } else { - fmt.Fprintf(buf, "\to.PutUint32(buf[pos:pos+4], uint32(len(%s)))\n", name) - fmt.Fprintf(buf, "\tpos += 4\n") - fmt.Fprintf(buf, "\tcopy(buf[pos:pos+len(%s)], %s[:])\n", name, name) - fmt.Fprintf(buf, "\tpos += len(%s)\n", name) - } - default: - fmt.Fprintf(buf, "\t// ??? %s %s\n", name, typ) - return false +func fieldTagJSON(field *Field) string { + typ := fromApiType(field.Type) + if field.Array { + if field.Length > 0 { + typ = fmt.Sprintf("%s[%d]", typ, field.Length) + } else if field.SizeFrom != "" { + typ = fmt.Sprintf("%s[%s]", typ, field.SizeFrom) + } else { + typ = fmt.Sprintf("%s[]", typ) } - return true } - - lvl := 0 - var encodeFields func(fields []*Field, parentName string) - encodeFields = func(fields []*Field, parentName string) { - lvl++ - defer func() { lvl-- }() - - n := 0 - for _, field := range fields { - if field.ParentMessage != nil { - // skip internal fields - switch strings.ToLower(field.Name) { - case msgIdField: - continue - case clientIndexField, contextField: - if n == 0 { - continue - } - } - } - n++ - - getFieldName := func(name string) string { - fieldName := camelCaseName(strings.TrimPrefix(name, "_")) - return fmt.Sprintf("%s.%s", parentName, fieldName) - } - - fieldName := camelCaseName(strings.TrimPrefix(field.Name, "_")) - name := fmt.Sprintf("%s.%s", parentName, fieldName) - sizeFrom := camelCaseName(strings.TrimPrefix(field.SizeFrom, "_")) - var sizeFromName string - if sizeFrom != "" { - sizeFromName = fmt.Sprintf("%s.%s", parentName, sizeFrom) - } - - fmt.Fprintf(buf, "\t// field[%d] %s\n", lvl, name) - - getSizeOfField := func() *Field { - for _, f := range fields { - if f.SizeFrom == field.Name { - return f - } - } - return nil - } - if f := getSizeOfField(); f != nil { - if encodeBaseType(field.Type, fmt.Sprintf("len(%s)", getFieldName(f.Name)), field.Length, "") { - continue - } - panic(fmt.Sprintf("failed to encode base type of sizefrom field: %s", field.Name)) - } - - if encodeBaseType(field.Type, name, field.Length, sizeFromName) { - continue - } - - char := fmt.Sprintf("v%d", lvl) - index := fmt.Sprintf("j%d", lvl) - - if field.Array { - if field.Length > 0 { - fmt.Fprintf(buf, "\tfor %[2]s := 0; %[2]s < %[1]d; %[2]s ++ {\n", field.Length, index) - } else if field.SizeFrom != "" { - //fmt.Fprintf(buf, "\tfor %[1]s := 0; %[1]s < int(%[2]s.%[3]s); %[1]s++ {\n", index, parentName, sizeFrom) - fmt.Fprintf(buf, "\tfor %[1]s := 0; %[1]s < len(%[2]s); %[1]s++ {\n", index, name) - } - - fmt.Fprintf(buf, "\tvar %s %s\n", char, convertToGoType(ctx.file, field.Type)) - fmt.Fprintf(buf, "\tif %[1]s < len(%[2]s) { %[3]s = %[2]s[%[1]s] }\n", index, name, char) - name = char - } - - if enum := getEnumByRef(ctx.file, field.Type); enum != nil { - if encodeBaseType(enum.Type, name, 0, "") { - } else { - fmt.Fprintf(buf, "\t// ??? ENUM %s %s\n", name, enum.Type) - } - } else if alias := getAliasByRef(ctx.file, field.Type); alias != nil { - if encodeBaseType(alias.Type, name, alias.Length, "") { - } else if typ := getTypeByRef(ctx.file, alias.Type); typ != nil { - encodeFields(typ.Fields, name) - } else { - fmt.Fprintf(buf, "\t// ??? ALIAS %s %s\n", name, alias.Type) - } - } else if typ := getTypeByRef(ctx.file, field.Type); typ != nil { - encodeFields(typ.Fields, name) - } else if union := getUnionByRef(ctx.file, field.Type); union != nil { - maxSize := getUnionSize(ctx.file, union) - fmt.Fprintf(buf, "\tcopy(buf[pos:pos+%d], %s.%s[:])\n", maxSize, name, unionDataField) - fmt.Fprintf(buf, "\tpos += %d\n", maxSize) - } else { - fmt.Fprintf(buf, "\t// ??? buf[pos] = (%s)\n", name) - } - - if field.Array { - fmt.Fprintf(buf, "\t}\n") - } - } + tag := []string{ + typ, + fmt.Sprintf("name=%s", field.Name), } - - encodeFields(fields, "m") - - fmt.Fprintf(w, "\tvar buf []byte\n") - fmt.Fprintf(w, "\tif b == nil {\n") - fmt.Fprintf(w, "\tbuf = make([]byte, m.Size())\n") - fmt.Fprintf(w, "\t} else {\n") - fmt.Fprintf(w, "\tbuf = b\n") - fmt.Fprintf(w, "\t}\n") - fmt.Fprint(w, buf.String()) - - fmt.Fprintf(w, "return buf, nil\n") - - fmt.Fprintf(w, "}\n") -} - -func generateMessageUnmarshal(ctx *GenFile, w io.Writer, name string, fields []*Field) { - fmt.Fprintf(w, "func (m *%[1]s) Unmarshal(tmp []byte) error {\n", name) - - fmt.Fprintf(w, "\to := binary.BigEndian\n") - fmt.Fprintf(w, "\t_ = o\n") - fmt.Fprintf(w, "\tpos := 0\n") - fmt.Fprintf(w, "\t_ = pos\n") - - decodeBaseType := func(typ, orig, name string, length int, sizefrom string, alloc bool) bool { - t, ok := BaseTypeNames[typ] - if !ok { - return false - } - - isArray := length > 0 || sizefrom != "" - - switch t { - case I8, U8, I16, U16, I32, U32, I64, U64, F64: - if isArray { - if alloc { - if length != 0 { - fmt.Fprintf(w, "\t%s = make([]%s, %d)\n", name, orig, length) - } else if sizefrom != "" { - fmt.Fprintf(w, "\t%s = make([]%s, %s)\n", name, orig, sizefrom) - } - } - fmt.Fprintf(w, "\tfor i := 0; i < len(%s); i++ {\n", name) - } - } - - switch t { - case I8, U8: - if isArray { - fmt.Fprintf(w, "\t%s[i] = %s(tmp[pos])\n", name, convertToGoType(ctx.file, typ)) - } else { - fmt.Fprintf(w, "\t%s = %s(tmp[pos])\n", name, orig) - } - fmt.Fprintf(w, "\tpos += 1\n") - if isArray { - fmt.Fprintf(w, "\t}\n") - } - case I16, U16: - if isArray { - fmt.Fprintf(w, "\t%s[i] = %s(o.Uint16(tmp[pos:pos+2]))\n", name, orig) - } else { - fmt.Fprintf(w, "\t%s = %s(o.Uint16(tmp[pos:pos+2]))\n", name, orig) - } - fmt.Fprintf(w, "\tpos += 2\n") - if isArray { - fmt.Fprintf(w, "\t}\n") - } - case I32, U32: - if isArray { - fmt.Fprintf(w, "\t%s[i] = %s(o.Uint32(tmp[pos:pos+4]))\n", name, orig) - } else { - fmt.Fprintf(w, "\t%s = %s(o.Uint32(tmp[pos:pos+4]))\n", name, orig) - } - fmt.Fprintf(w, "\tpos += 4\n") - if isArray { - fmt.Fprintf(w, "\t}\n") - } - case I64, U64: - if isArray { - fmt.Fprintf(w, "\t%s[i] = %s(o.Uint64(tmp[pos:pos+8]))\n", name, orig) - } else { - fmt.Fprintf(w, "\t%s = %s(o.Uint64(tmp[pos:pos+8]))\n", name, orig) - } - fmt.Fprintf(w, "\tpos += 8\n") - if isArray { - fmt.Fprintf(w, "\t}\n") - } - case F64: - if isArray { - fmt.Fprintf(w, "\t%s[i] = %s(math.Float64frombits(o.Uint64(tmp[pos:pos+8])))\n", name, orig) - } else { - fmt.Fprintf(w, "\t%s = %s(math.Float64frombits(o.Uint64(tmp[pos:pos+8])))\n", name, orig) - } - fmt.Fprintf(w, "\tpos += 8\n") - if isArray { - fmt.Fprintf(w, "\t}\n") - } - case BOOL: - fmt.Fprintf(w, "\t%s = tmp[pos] != 0\n", name) - fmt.Fprintf(w, "\tpos += 1\n") - case STRING: - if length != 0 { - fmt.Fprintf(w, "\t{\n") - fmt.Fprintf(w, "\tnul := bytes.Index(tmp[pos:pos+%d], []byte{0x00})\n", length) - fmt.Fprintf(w, "\t%[1]s = codec.DecodeString(tmp[pos:pos+nul])\n", name) - fmt.Fprintf(w, "\tpos += %d\n", length) - fmt.Fprintf(w, "\t}\n") - } else { - fmt.Fprintf(w, "\t{\n") - fmt.Fprintf(w, "\tsiz := o.Uint32(tmp[pos:pos+4])\n") - fmt.Fprintf(w, "\tpos += 4\n") - fmt.Fprintf(w, "\t%[1]s = codec.DecodeString(tmp[pos:pos+int(siz)])\n", name) - fmt.Fprintf(w, "\tpos += len(%s)\n", name) - fmt.Fprintf(w, "\t}\n") - } - default: - fmt.Fprintf(w, "\t// ??? %s %s\n", name, typ) - return false - } - return true + if limit, ok := field.Meta["limit"]; ok && limit.(int) > 0 { + tag = append(tag, fmt.Sprintf("limit=%s", limit)) } - - lvl := 0 - var decodeFields func(fields []*Field, parentName string) - decodeFields = func(fields []*Field, parentName string) { - lvl++ - defer func() { lvl-- }() - - n := 0 - for _, field := range fields { - if field.ParentMessage != nil { - // skip internal fields - switch strings.ToLower(field.Name) { - case msgIdField: - continue - case clientIndexField, contextField: - if n == 0 { - continue - } - } - } - n++ - - fieldName := camelCaseName(strings.TrimPrefix(field.Name, "_")) - name := fmt.Sprintf("%s.%s", parentName, fieldName) - sizeFrom := camelCaseName(strings.TrimPrefix(field.SizeFrom, "_")) - var sizeFromName string - if sizeFrom != "" { - sizeFromName = fmt.Sprintf("%s.%s", parentName, sizeFrom) - } - - fmt.Fprintf(w, "\t// field[%d] %s\n", lvl, name) - - if decodeBaseType(field.Type, convertToGoType(ctx.file, field.Type), name, field.Length, sizeFromName, true) { - continue - } - - //char := fmt.Sprintf("v%d", lvl) - index := fmt.Sprintf("j%d", lvl) - - if field.Array { - if field.Length > 0 { - fmt.Fprintf(w, "\tfor %[2]s := 0; %[2]s < %[1]d; %[2]s ++ {\n", field.Length, index) - } else if field.SizeFrom != "" { - fieldType := getFieldType(ctx, field) - if strings.HasPrefix(fieldType, "[]") { - fmt.Fprintf(w, "\t%s = make(%s, int(%s.%s))\n", name, fieldType, parentName, sizeFrom) - } - fmt.Fprintf(w, "\tfor %[1]s := 0; %[1]s < int(%[2]s.%[3]s); %[1]s++ {\n", index, parentName, sizeFrom) - } - - /*fmt.Fprintf(w, "\tvar %s %s\n", char, convertToGoType(ctx, field.Type)) - fmt.Fprintf(w, "\tif %[1]s < len(%[2]s) { %[3]s = %[2]s[%[1]s] }\n", index, name, char) - name = char*/ - name = fmt.Sprintf("%s[%s]", name, index) - } - - if enum := getEnumByRef(ctx.file, field.Type); enum != nil { - if decodeBaseType(enum.Type, convertToGoType(ctx.file, field.Type), name, 0, "", false) { - } else { - fmt.Fprintf(w, "\t// ??? ENUM %s %s\n", name, enum.Type) - } - } else if alias := getAliasByRef(ctx.file, field.Type); alias != nil { - if decodeBaseType(alias.Type, convertToGoType(ctx.file, field.Type), name, alias.Length, "", false) { - } else if typ := getTypeByRef(ctx.file, alias.Type); typ != nil { - decodeFields(typ.Fields, name) - } else { - fmt.Fprintf(w, "\t// ??? ALIAS %s %s\n", name, alias.Type) - } - } else if typ := getTypeByRef(ctx.file, field.Type); typ != nil { - decodeFields(typ.Fields, name) - } else if union := getUnionByRef(ctx.file, field.Type); union != nil { - maxSize := getUnionSize(ctx.file, union) - fmt.Fprintf(w, "\tcopy(%s.%s[:], tmp[pos:pos+%d])\n", name, unionDataField, maxSize) - fmt.Fprintf(w, "\tpos += %d\n", maxSize) - } else { - fmt.Fprintf(w, "\t// ??? buf[pos] = (%s)\n", name) - } - - if field.Array { - fmt.Fprintf(w, "\t}\n") + if def, ok := field.Meta["default"]; ok && def != nil { + actual := fieldActualType(field) + if t, ok := BaseTypesGo[actual]; ok { + switch t { + case I8, I16, I32, I64: + def = int(def.(float64)) + case U8, U16, U32, U64: + def = uint(def.(float64)) + case F64: + def = def.(float64) } } + tag = append(tag, fmt.Sprintf("default=%s", def)) } - - decodeFields(fields, "m") - - fmt.Fprintf(w, "return nil\n") - - fmt.Fprintf(w, "}\n") + return strings.Join(tag, ",") } -func getFieldType(ctx *GenFile, field *Field) string { - //fieldName := strings.TrimPrefix(field.Name, "_") - //fieldName = camelCaseName(fieldName) - //fieldName := field.GoName +type structTags map[string]string - dataType := convertToGoType(ctx.file, field.Type) - fieldType := dataType - - // check if it is array - if field.Length > 0 || field.SizeFrom != "" { - if dataType == "uint8" { - dataType = "byte" - } - if dataType == "string" && field.Array { - fieldType = "string" - dataType = "byte" - } else if _, ok := BaseTypeNames[field.Type]; !ok && field.SizeFrom == "" { - fieldType = fmt.Sprintf("[%d]%s", field.Length, dataType) - } else { - fieldType = "[]" + dataType - } +func (tags structTags) String() string { + if len(tags) == 0 { + return "" } - - return fieldType -} - -func generateField(ctx *GenFile, w io.Writer, fields []*Field, i int) { - field := fields[i] - - //fieldName := strings.TrimPrefix(field.Name, "_") - //fieldName = camelCaseName(fieldName) - fieldName := field.GoName - - dataType := convertToGoType(ctx.file, field.Type) - fieldType := dataType - - // generate length field for strings - if field.Type == "string" && field.Length == 0 { - fmt.Fprintf(w, "\tXXX_%sLen uint32 `struc:\"sizeof=%s\"`\n", fieldName, fieldName) + var keys []string + for k := range tags { + keys = append(keys, k) } - - // check if it is array - if field.Length > 0 || field.SizeFrom != "" { - if dataType == "uint8" { - dataType = "byte" - } - if dataType == "string" && field.Array { - fieldType = "string" - dataType = "byte" - } else if _, ok := BaseTypeNames[field.Type]; !ok && field.SizeFrom == "" { - fieldType = fmt.Sprintf("[%d]%s", field.Length, dataType) - } else { - fieldType = "[]" + dataType - } + sort.Strings(keys) + var ss []string + for _, key := range keys { + tag := tags[key] + ss = append(ss, fmt.Sprintf(`%s:%s`, key, strconv.Quote(tag))) } - fmt.Fprintf(w, "\t%s %s", fieldName, fieldType) - - fieldTags := map[string]string{} + return "`" + strings.Join(ss, " ") + "`" +} - if field.Length > 0 && field.Array { - // fixed size array - fieldTags["struc"] = fmt.Sprintf("[%d]%s", field.Length, dataType) - } else { - for _, f := range fields { - if f.SizeFrom == field.Name { - // variable sized array - //sizeOfName := camelCaseName(f.Name) - fieldTags["struc"] = fmt.Sprintf("sizeof=%s", f.GoName) - } - } +func genMessages(g *GenFile) { + if len(g.file.Messages) == 0 { + return } - if ctx.IncludeBinapiNames { - typ := fromApiType(field.Type) - if field.Array { - if field.Length > 0 { - fieldTags["binapi"] = fmt.Sprintf("%s[%d],name=%s", typ, field.Length, field.Name) - } else if field.SizeFrom != "" { - fieldTags["binapi"] = fmt.Sprintf("%s[%s],name=%s", typ, field.SizeFrom, field.Name) - } - } else { - fieldTags["binapi"] = fmt.Sprintf("%s,name=%s", typ, field.Name) - } - } - if limit, ok := field.Meta["limit"]; ok && limit.(int) > 0 { - fieldTags["binapi"] = fmt.Sprintf("%s,limit=%d", fieldTags["binapi"], limit) - } - if def, ok := field.Meta["default"]; ok && def != nil { - actual := getActualType(ctx.file, fieldType) - if t, ok := binapiTypes[actual]; ok && t != "float64" { - defnum := int(def.(float64)) - fieldTags["binapi"] = fmt.Sprintf("%s,default=%d", fieldTags["binapi"], defnum) - } else { - fieldTags["binapi"] = fmt.Sprintf("%s,default=%v", fieldTags["binapi"], def) - } + for _, msg := range g.file.Messages { + genMessage(g, msg) } - fieldTags["json"] = fmt.Sprintf("%s,omitempty", field.Name) + // generate registrations + initFnName := fmt.Sprintf("file_%s_binapi_init", g.file.PackageName) - if len(fieldTags) > 0 { - fmt.Fprintf(w, "\t`") - var keys []string - for k := range fieldTags { - keys = append(keys, k) - } - sort.Strings(keys) - var n int - for _, tt := range keys { - t, ok := fieldTags[tt] - if !ok { - continue - } - if n > 0 { - fmt.Fprintf(w, " ") - } - n++ - fmt.Fprintf(w, `%s:"%s"`, tt, t) - } - fmt.Fprintf(w, "`") + g.P("func init() { ", initFnName, "() }") + g.P("func ", initFnName, "() {") + for _, msg := range g.file.Messages { + id := fmt.Sprintf("%s_%s", msg.Name, msg.CRC) + g.P(govppApiPkg.Ident("RegisterMessage"), "((*", msg.GoIdent, ")(nil), ", strconv.Quote(id), ")") } + g.P("}") + g.P() - fmt.Fprintln(w) + // generate list of messages + g.P("// Messages returns list of all messages in this module.") + g.P("func AllMessages() []", govppApiPkg.Ident("Message"), " {") + g.P("return []", govppApiPkg.Ident("Message"), "{") + for _, msg := range g.file.Messages { + g.P("(*", msg.GoIdent, ")(nil),") + } + g.P("}") + g.P("}") } -func generateMessageResetMethod(w io.Writer, structName string) { - fmt.Fprintf(w, "func (m *%[1]s) Reset() { *m = %[1]s{} }\n", structName) -} +func genMessage(g *GenFile, msg *Message) { + logf("gen MESSAGE %s (%s) - %d fields", msg.GoName, msg.Name, len(msg.Fields)) -func generateMessageNameGetter(w io.Writer, structName, msgName string) { - fmt.Fprintf(w, "func (*%s) GetMessageName() string { return %q }\n", structName, msgName) -} + genTypeComment(g, msg.GoIdent.GoName, msg.Name, "message") -func generateTypeNameGetter(w io.Writer, structName, msgName string) { - fmt.Fprintf(w, "func (*%s) GetTypeName() string { return %q }\n", structName, msgName) -} - -func generateIPAddressConversion(w io.Writer, structName string) { - f1 := func(ipVer, ipVerExt int) string { - return fmt.Sprintf(`address.Af = ADDRESS_IP%[1]d - var ip%[1]daddr IP%[1]dAddress - copy(ip%[1]daddr[:], netIP.To%[2]d()) - address.Un.SetIP%[1]d(ip%[1]daddr)`, ipVer, ipVerExt) - } - f2 := func(ipVer, ipVerExt int) string { - return fmt.Sprintf("ip%[1]dAddress := a.Un.GetIP%[1]d()\nip = net.IP(ip%[1]dAddress[:]).To%[2]d().String()", - ipVer, ipVerExt) - } - // IP to Address - fmt.Fprintf(w, `func ParseAddress(ip string) (%[1]s, error) { - var address %[1]s - netIP := net.ParseIP(ip) - if netIP == nil { - return address, fmt.Errorf("invalid address: %[2]s", ip) - } - if ip4 := netIP.To4(); ip4 == nil { - %[3]s + // generate message definition + if len(msg.Fields) == 0 { + g.P("type ", msg.GoIdent, " struct {}") } else { - %[4]s - } - return address, nil -} -`, structName, "%s", f1(6, 16), f1(4, 4)) - fmt.Fprintln(w) - - // Address to IP - fmt.Fprintln(w) - fmt.Fprintf(w, `func (a *%[1]s) ToString() string { - var ip string - if a.Af == ADDRESS_IP6 { - %[2]s - } else { - %[3]s + g.P("type ", msg.GoIdent, " struct {") + for i := range msg.Fields { + generateField(g, msg.Fields, i) + } + g.P("}") } - return ip -}`, structName, f2(6, 16), f2(4, 4)) -} + g.P() -func generatePrefixConversion(w io.Writer, structName string) { - fErr := func() string { - return fmt.Sprintf(`if err != nil { - return Prefix{}, fmt.Errorf("invalid IP %s: %s", ip, err) - }`, "%s", "%v") - } + generateMessageMethods(g, msg) - // IP to Prefix - fmt.Fprintf(w, `func ParsePrefix(ip string) (prefix %[1]s, err error) { - hasPrefix := strings.Contains(ip, "/") - if hasPrefix { - netIP, network, err := net.ParseCIDR(ip) - %[2]s - maskSize, _ := network.Mask.Size() - prefix.Len = byte(maskSize) - prefix.Address, err = ParseAddress(netIP.String()) - %[2]s - } else { - netIP := net.ParseIP(ip) - defaultMaskSize, _ := net.CIDRMask(32, 32).Size() - if netIP.To4() == nil { - defaultMaskSize, _ = net.CIDRMask(128, 128).Size() - } - prefix.Len = byte(defaultMaskSize) - prefix.Address, err = ParseAddress(netIP.String()) - %[2]s - } - return prefix, nil -}`, structName, fErr(), nil) - fmt.Fprintln(w) - - // Prefix to IP - fmt.Fprintln(w) - fmt.Fprintf(w, `func (p *%[1]s) ToString() string { - ip := p.Address.ToString() - return ip + "/" + strconv.Itoa(int(p.Len)) - }`, structName) -} + // encoding methods + generateMessageSize(g, msg.GoIdent.GoName, msg.Fields) + generateMessageMarshal(g, msg.GoIdent.GoName, msg.Fields) + generateMessageUnmarshal(g, msg.GoIdent.GoName, msg.Fields) -func generateMacAddressConversion(w io.Writer, structName string) { - // string to MAC - fmt.Fprintf(w, `func ParseMAC(mac string) (parsed %[1]s, err error) { - var hw net.HardwareAddr - if hw, err = net.ParseMAC(mac); err != nil { - return - } - copy(parsed[:], hw[:]) - return -}`, structName) - fmt.Fprintln(w) - - // MAC to string - fmt.Fprintln(w) - fmt.Fprintf(w, `func (m *%[1]s) ToString() string { - return net.HardwareAddr(m[:]).String() - }`, structName) + g.P() } -func generateCrcGetter(w io.Writer, structName, crc string) { - crc = strings.TrimPrefix(crc, "0x") - fmt.Fprintf(w, "func (*%s) GetCrcString() string { return %q }\n", structName, crc) -} +func generateMessageMethods(g *GenFile, msg *Message) { + // Reset method + g.P("func (m *", msg.GoIdent.GoName, ") Reset() { *m = ", msg.GoIdent.GoName, "{} }") -func generateMessageTypeGetter(w io.Writer, structName string, msgType MessageType) { - fmt.Fprintf(w, "func (*"+structName+") GetMessageType() api.MessageType {") - if msgType == requestMessage { - fmt.Fprintf(w, "\treturn api.RequestMessage") - } else if msgType == replyMessage { - fmt.Fprintf(w, "\treturn api.ReplyMessage") - } else if msgType == eventMessage { - fmt.Fprintf(w, "\treturn api.EventMessage") - } else { - fmt.Fprintf(w, "\treturn api.OtherMessage") - } - fmt.Fprintln(w, "}") - fmt.Fprintln(w) -} + // GetMessageName method + g.P("func (*", msg.GoIdent.GoName, ") GetMessageName() string { return ", strconv.Quote(msg.Name), " }") + + // GetCrcString method + g.P("func (*", msg.GoIdent.GoName, ") GetCrcString() string { return ", strconv.Quote(msg.CRC), " }") + + // GetMessageType method + g.P("func (*", msg.GoIdent.GoName, ") GetMessageType() api.MessageType {") + g.P(" return ", apiMsgType(msg.msgType)) + g.P("}") -func logf(f string, v ...interface{}) { - logrus.Debugf(f, v...) + g.P() } diff --git a/binapigen/generate_rpc.go b/binapigen/generate_rpc.go deleted file mode 100644 index 4beec04..0000000 --- a/binapigen/generate_rpc.go +++ /dev/null @@ -1,203 +0,0 @@ -// Copyright (c) 2020 Cisco and/or its affiliates. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at: -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package binapigen - -import ( - "fmt" - "io" - "strings" -) - -// generated service names -const ( - serviceApiName = "RPCService" // name for the RPC service interface - serviceImplName = "serviceClient" // name for the RPC service implementation - serviceClientName = "ServiceClient" // name for the RPC service client - - // TODO: register service descriptor - //serviceDescType = "ServiceDesc" // name for service descriptor type - //serviceDescName = "_ServiceRPC_serviceDesc" // name for service descriptor var -) - -func generateFileRPC(ctx *GenFile, w io.Writer) { - logf("----------------------------") - logf("generating RPC file package: %q", ctx.file.PackageName) - logf("----------------------------") - - // generate file header - fmt.Fprintln(w, "// Code generated by GoVPP's binapi-generator. DO NOT EDIT.") - fmt.Fprintln(w) - - // generate package header - fmt.Fprintf(w, "package %s\n", ctx.file.PackageName) - fmt.Fprintln(w) - - // generate imports - fmt.Fprintln(w, "import (") - fmt.Fprintln(w, ` "context"`) - fmt.Fprintln(w, ` "io"`) - fmt.Fprintln(w) - fmt.Fprintf(w, "\tapi \"%s\"\n", "git.fd.io/govpp.git/api") - fmt.Fprintln(w, ")") - fmt.Fprintln(w) - - // generate RPC service - if ctx.file.Service != nil && len(ctx.file.Service.RPCs) > 0 { - generateService(ctx, w, ctx.file.Service) - } - - // generate message registrations - /*fmt.Fprintln(w, "var _RPCService_desc = api.RPCDesc{") - - fmt.Fprintln(w, "}") - fmt.Fprintln(w)*/ - - // generate import refs - fmt.Fprintf(w, "// Reference imports to suppress errors if they are not otherwise used.\n") - fmt.Fprintf(w, "var _ = api.RegisterMessage\n") - fmt.Fprintf(w, "var _ = context.Background\n") - fmt.Fprintf(w, "var _ = io.Copy\n") - -} - -func generateService(ctx *GenFile, w io.Writer, svc *Service) { - // generate services comment - generateComment(ctx, w, serviceApiName, "services", "service") - - // generate service api - fmt.Fprintf(w, "type %s interface {\n", serviceApiName) - for _, rpc := range svc.RPCs { - generateRPCMethod(ctx, w, &rpc) - fmt.Fprintln(w) - } - fmt.Fprintln(w, "}") - fmt.Fprintln(w) - - // generate client implementation - fmt.Fprintf(w, "type %s struct {\n", serviceImplName) - fmt.Fprintf(w, "\tch api.Channel\n") - fmt.Fprintln(w, "}") - fmt.Fprintln(w) - - // generate client constructor - fmt.Fprintf(w, "func New%s(ch api.Channel) %s {\n", serviceClientName, serviceApiName) - fmt.Fprintf(w, "\treturn &%s{ch}\n", serviceImplName) - fmt.Fprintln(w, "}") - fmt.Fprintln(w) - - for _, rpc := range svc.RPCs { - method := camelCaseName(rpc.RequestMsg) - if m := strings.TrimSuffix(method, "Dump"); method != m { - method = "Dump" + m - } - - fmt.Fprintf(w, "func (c *%s) ", serviceImplName) - generateRPCMethod(ctx, w, &rpc) - fmt.Fprintln(w, " {") - if rpc.Stream { - streamImpl := fmt.Sprintf("%s_%sClient", serviceImplName, method) - fmt.Fprintf(w, "\tstream := c.ch.SendMultiRequest(in)\n") - fmt.Fprintf(w, "\tx := &%s{stream}\n", streamImpl) - fmt.Fprintf(w, "\treturn x, nil\n") - } else if replyTyp := camelCaseName(rpc.ReplyMsg); replyTyp != "" { - fmt.Fprintf(w, "\tout := new(%s)\n", replyTyp) - fmt.Fprintf(w, "\terr:= c.ch.SendRequest(in).ReceiveReply(out)\n") - fmt.Fprintf(w, "\tif err != nil { return nil, err }\n") - fmt.Fprintf(w, "\treturn out, nil\n") - } else { - fmt.Fprintf(w, "\tc.ch.SendRequest(in)\n") - fmt.Fprintf(w, "\treturn nil\n") - } - fmt.Fprintln(w, "}") - fmt.Fprintln(w) - - if rpc.Stream { - replyTyp := camelCaseName(rpc.ReplyMsg) - method := camelCaseName(rpc.RequestMsg) - if m := strings.TrimSuffix(method, "Dump"); method != m { - method = "Dump" + m - } - streamApi := fmt.Sprintf("%s_%sClient", serviceApiName, method) - - fmt.Fprintf(w, "type %s interface {\n", streamApi) - fmt.Fprintf(w, "\tRecv() (*%s, error)\n", replyTyp) - fmt.Fprintln(w, "}") - fmt.Fprintln(w) - - streamImpl := fmt.Sprintf("%s_%sClient", serviceImplName, method) - fmt.Fprintf(w, "type %s struct {\n", streamImpl) - fmt.Fprintf(w, "\tapi.MultiRequestCtx\n") - fmt.Fprintln(w, "}") - fmt.Fprintln(w) - - fmt.Fprintf(w, "func (c *%s) Recv() (*%s, error) {\n", streamImpl, replyTyp) - fmt.Fprintf(w, "\tm := new(%s)\n", replyTyp) - fmt.Fprintf(w, "\tstop, err := c.MultiRequestCtx.ReceiveReply(m)\n") - fmt.Fprintf(w, "\tif err != nil { return nil, err }\n") - fmt.Fprintf(w, "\tif stop { return nil, io.EOF }\n") - fmt.Fprintf(w, "\treturn m, nil\n") - fmt.Fprintln(w, "}") - fmt.Fprintln(w) - } - } - - // TODO: generate service descriptor - /*fmt.Fprintf(w, "var %s = api.%s{\n", serviceDescName, serviceDescType) - fmt.Fprintf(w, "\tServiceName: \"%s\",\n", ctx.moduleName) - fmt.Fprintf(w, "\tHandlerType: (*%s)(nil),\n", serviceApiName) - fmt.Fprintf(w, "\tMethods: []api.MethodDesc{\n") - for _, method := range rpcs { - fmt.Fprintf(w, "\t {\n") - fmt.Fprintf(w, "\t MethodName: \"%s\",\n", method.Name) - fmt.Fprintf(w, "\t },\n") - } - fmt.Fprintf(w, "\t},\n") - //fmt.Fprintf(w, "\tCompatibility: %s,\n", messageCrcName) - //fmt.Fprintf(w, "\tMetadata: reflect.TypeOf((*%s)(nil)).Elem().PkgPath(),\n", serviceApiName) - fmt.Fprintf(w, "\tMetadata: \"%s\",\n", ctx.inputFile) - fmt.Fprintln(w, "}")*/ - - fmt.Fprintln(w) -} - -func generateRPCMethod(ctx *GenFile, w io.Writer, rpc *RPC) { - reqTyp := camelCaseName(rpc.RequestMsg) - - logf(" writing RPC: %+v", reqTyp) - - // method name is same as parameter type name by default - method := reqTyp - if rpc.Stream { - // use Dump as prefix instead of suffix for stream services - if m := strings.TrimSuffix(method, "Dump"); method != m { - method = "Dump" + m - } - } - - params := fmt.Sprintf("in *%s", reqTyp) - returns := "error" - - if replyType := camelCaseName(rpc.ReplyMsg); replyType != "" { - var replyTyp string - if rpc.Stream { - replyTyp = fmt.Sprintf("%s_%sClient", serviceApiName, method) - } else { - replyTyp = fmt.Sprintf("*%s", replyType) - } - returns = fmt.Sprintf("(%s, error)", replyTyp) - } - - fmt.Fprintf(w, "\t%s(ctx context.Context, %s) %s", method, params, returns) -} diff --git a/binapigen/generate_test.go b/binapigen/generate_test.go index 46cc5eb..2fa5dc6 100644 --- a/binapigen/generate_test.go +++ b/binapigen/generate_test.go @@ -15,43 +15,35 @@ package binapigen import ( - "git.fd.io/govpp.git/examples/binapi/interfaces" - "git.fd.io/govpp.git/examples/binapi/ip_types" "os" - "strings" "testing" . "github.com/onsi/gomega" + "git.fd.io/govpp.git/binapi/ip_types" "git.fd.io/govpp.git/binapigen/vppapi" ) -const testOutputDir = "test_output_directory" +const testOutputDir = "test_output_dir" -func GenerateFromFile(file, outputDir string, opts Options) error { +func GenerateFromFile(file string, opts Options) error { apifile, err := vppapi.ParseFile(file) if err != nil { return err } - - g, err := New(opts, []*vppapi.File{apifile}) + gen, err := New(opts, []*vppapi.File{apifile}, nil) if err != nil { return err } - for _, file := range g.Files { + for _, file := range gen.Files { if !file.Generate { continue } - GenerateBinapi(g, file, outputDir) - if file.Service != nil { - GenerateRPC(g, file, outputDir) - } + GenerateAPI(gen, file) } - - if err = g.Generate(); err != nil { + if err = gen.Generate(); err != nil { return err } - return nil } @@ -61,7 +53,8 @@ func TestGenerateFromFile(t *testing.T) { // remove directory created during test defer os.RemoveAll(testOutputDir) - err := GenerateFromFile("vppapi/testdata/acl.api.json", testOutputDir, Options{FilesToGenerate: []string{"acl"}}) + opts := Options{OutputDir: testOutputDir} + err := GenerateFromFile("vppapi/testdata/acl.api.json", opts) Expect(err).ShouldNot(HaveOccurred()) fileInfo, err := os.Stat(testOutputDir + "/acl/acl.ba.go") Expect(err).ShouldNot(HaveOccurred()) @@ -72,7 +65,8 @@ func TestGenerateFromFile(t *testing.T) { func TestGenerateFromFileInputError(t *testing.T) { RegisterTestingT(t) - err := GenerateFromFile("vppapi/testdata/nonexisting.json", testOutputDir, Options{}) + opts := Options{OutputDir: testOutputDir} + err := GenerateFromFile("vppapi/testdata/nonexisting.json", opts) Expect(err).Should(HaveOccurred()) Expect(err.Error()).To(ContainSubstring("unsupported")) } @@ -80,7 +74,8 @@ func TestGenerateFromFileInputError(t *testing.T) { func TestGenerateFromFileReadJsonError(t *testing.T) { RegisterTestingT(t) - err := GenerateFromFile("vppapi/testdata/input-read-json-error.json", testOutputDir, Options{}) + opts := Options{OutputDir: testOutputDir} + err := GenerateFromFile("vppapi/testdata/input-read-json-error.json", opts) Expect(err).Should(HaveOccurred()) Expect(err.Error()).To(ContainSubstring("unsupported")) } @@ -96,139 +91,23 @@ func TestGenerateFromFileGeneratePackageError(t *testing.T) { os.RemoveAll(testOutputDir) }() - err := GenerateFromFile("vppapi/testdata/input-generate-error.json", testOutputDir, Options{}) - Expect(err).Should(HaveOccurred()) -} - -func TestGeneratedParseAddress(t *testing.T) { - RegisterTestingT(t) - - var data = []struct { - input string - result ip_types.Address - }{ - {"192.168.0.1", ip_types.Address{ - Af: ip_types.ADDRESS_IP4, - Un: ip_types.AddressUnionIP4(ip_types.IP4Address{192, 168, 0, 1}), - }}, - {"aac1:0:ab45::", ip_types.Address{ - Af: ip_types.ADDRESS_IP6, - Un: ip_types.AddressUnionIP6(ip_types.IP6Address{170, 193, 0, 0, 171, 69, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}), - }}, - } - - for _, entry := range data { - t.Run(entry.input, func(t *testing.T) { - parsedAddress, err := ip_types.ParseAddress(entry.input) - Expect(err).ShouldNot(HaveOccurred()) - Expect(parsedAddress).To(Equal(entry.result)) - - originAddress := parsedAddress.ToString() - Expect(originAddress).To(Equal(entry.input)) - }) - } -} - -func TestGeneratedParseAddressError(t *testing.T) { - RegisterTestingT(t) - - _, err := ip_types.ParseAddress("malformed_ip") + opts := Options{OutputDir: testOutputDir} + err := GenerateFromFile("vppapi/testdata/input-generate-error.json", opts) Expect(err).Should(HaveOccurred()) } -func TestGeneratedParsePrefix(t *testing.T) { +func TestAddress(t *testing.T) { RegisterTestingT(t) - var data = []struct { - input string - result ip_types.Prefix - }{ - {"192.168.0.1/24", ip_types.Prefix{ - Address: ip_types.Address{ - Af: ip_types.ADDRESS_IP4, - Un: ip_types.AddressUnionIP4(ip_types.IP4Address{192, 168, 0, 1}), - }, - Len: 24, - }}, - {"192.168.0.1", ip_types.Prefix{ - Address: ip_types.Address{ - Af: ip_types.ADDRESS_IP4, - Un: ip_types.AddressUnionIP4(ip_types.IP4Address{192, 168, 0, 1}), - }, - Len: 32, - }}, - {"aac1:0:ab45::/96", ip_types.Prefix{ - Address: ip_types.Address{ - Af: ip_types.ADDRESS_IP6, - Un: ip_types.AddressUnionIP6(ip_types.IP6Address{170, 193, 0, 0, 171, 69, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}), - }, - Len: 96, - }}, - {"aac1:0:ab45::", ip_types.Prefix{ - Address: ip_types.Address{ - Af: ip_types.ADDRESS_IP6, - Un: ip_types.AddressUnionIP6(ip_types.IP6Address{170, 193, 0, 0, 171, 69, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}), - }, - Len: 128, - }}, - } + addr := ip_types.AddressUnionIP4(ip_types.IP4Address{10, 20, 0, 1}) + t.Logf("addr: %v (%#v)", addr, addr) - for _, entry := range data { - t.Run(entry.input, func(t *testing.T) { - parsedAddress, err := ip_types.ParsePrefix(entry.input) - Expect(err).ShouldNot(HaveOccurred()) - Expect(parsedAddress).To(Equal(entry.result)) + ip4 := addr.GetIP4() + t.Logf("ip4: %v", ip4) + addr.SetIP4(ip_types.IP4Address{192, 168, 1, 1}) + t.Logf("ip4: %v", addr.GetIP4()) - // Parsed IP without prefix receives a default one - // so the input data must be adjusted - if entry.result.Address.Af == ip_types.ADDRESS_IP4 && !strings.Contains(entry.input, "/") { - entry.input = entry.input + "/32" - } - if entry.result.Address.Af == ip_types.ADDRESS_IP6 && !strings.Contains(entry.input, "/") { - entry.input = entry.input + "/128" - } - originAddress := parsedAddress.ToString() - Expect(originAddress).To(Equal(entry.input)) - }) - } -} - -func TestGeneratedParsePrefixError(t *testing.T) { - RegisterTestingT(t) - - _, err := ip_types.ParsePrefix("malformed_ip") - Expect(err).Should(HaveOccurred()) -} - -func TestGeneratedParseMAC(t *testing.T) { - RegisterTestingT(t) - - var data = []struct { - input string - result interfaces.MacAddress - }{ - {"b7:b9:bb:a1:5c:af", interfaces.MacAddress{183, 185, 187, 161, 92, 175}}, - {"47:4b:c7:3e:06:c8", interfaces.MacAddress{71, 75, 199, 62, 6, 200}}, - {"a7:cc:9f:10:18:e3", interfaces.MacAddress{167, 204, 159, 16, 24, 227}}, - } - - for _, entry := range data { - t.Run(entry.input, func(t *testing.T) { - parsedMac, err := interfaces.ParseMAC(entry.input) - Expect(err).ShouldNot(HaveOccurred()) - Expect(parsedMac).To(Equal(entry.result)) - - originAddress := parsedMac.ToString() - Expect(originAddress).To(Equal(entry.input)) - }) - } -} - -func TestGeneratedParseMACError(t *testing.T) { - RegisterTestingT(t) - - _, err := interfaces.ParseMAC("malformed_mac") - Expect(err).Should(HaveOccurred()) + Expect(addr.GetIP4()).To(Equal(ip_types.IP4Address{192, 168, 1, 1})) } /*func TestGetContext(t *testing.T) { @@ -280,7 +159,7 @@ func TestGetContextInterfaceJson(t *testing.T) { // prepare writer writer := bufio.NewWriter(outFile) Expect(writer.Buffered()).To(BeZero()) - err = generateFileBinapi(testCtx, writer) + err = GenerateFileBinapi(testCtx, writer) Expect(err).ShouldNot(HaveOccurred()) } @@ -306,7 +185,7 @@ func TestGenerateMessageType(t *testing.T) { writer := bufio.NewWriter(outFile) for _, msg := range testCtx.file.Messages { - generateMessage(testCtx, writer, &msg) + genMessage(testCtx, writer, &msg) Expect(writer.Buffered()).ToNot(BeZero()) } }*/ @@ -335,7 +214,7 @@ func TestGenerateMessageType(t *testing.T) { for i := 0; i < types.Len(); i++ { typ := types.At(i) Expect(writer.Buffered()).To(BeZero()) - err := generateMessage(testCtx, writer, typ, false) + err := genMessage(testCtx, writer, typ, false) Expect(err).ShouldNot(HaveOccurred()) Expect(writer.Buffered()).ToNot(BeZero()) @@ -446,7 +325,7 @@ func TestGeneratePackageHeader(t *testing.T) { // prepare writer writer := bufio.NewWriter(outFile) Expect(writer.Buffered()).To(BeZero()) - generatePackageHeader(testCtx, writer, inFile) + genPackageComment(testCtx, writer, inFile) Expect(writer.Buffered()).ToNot(BeZero()) } diff --git a/binapigen/generator.go b/binapigen/generator.go index 07c1b13..e42e7fb 100644 --- a/binapigen/generator.go +++ b/binapigen/generator.go @@ -15,130 +15,122 @@ package binapigen import ( + "bufio" "bytes" "fmt" + "go/ast" "go/format" + "go/parser" + "go/printer" + "go/token" "io/ioutil" "os" "path" "path/filepath" - "regexp" + "sort" + "strconv" + "strings" "github.com/sirupsen/logrus" "git.fd.io/govpp.git/binapigen/vppapi" ) -type Options struct { - VPPVersion string // version of VPP that produced API files - - FilesToGenerate []string // list of API files to generate - - ImportPrefix string // defines import path prefix for importing types - ImportTypes bool // generate packages for import types - IncludeAPIVersion bool // include constant with API version string - IncludeComments bool // include parts of original source in comments - IncludeBinapiNames bool // include binary API names as struct tag - IncludeServices bool // include service interface with client implementation - IncludeVppVersion bool // include info about used VPP version -} - type Generator struct { - Options - Files []*File - FilesByPath map[string]*File FilesByName map[string]*File - enumsByName map[string]*Enum - aliasesByName map[string]*Alias - structsByName map[string]*Struct - unionsByName map[string]*Union + opts Options + apifiles []*vppapi.File + vppVersion string + + filesToGen []string + genfiles []*GenFile - genfiles []*GenFile + enumsByName map[string]*Enum + aliasesByName map[string]*Alias + structsByName map[string]*Struct + unionsByName map[string]*Union + messagesByName map[string]*Message } -func New(opts Options, apifiles []*vppapi.File) (*Generator, error) { - g := &Generator{ - Options: opts, - FilesByPath: make(map[string]*File), - FilesByName: make(map[string]*File), - enumsByName: map[string]*Enum{}, - aliasesByName: map[string]*Alias{}, - structsByName: map[string]*Struct{}, - unionsByName: map[string]*Union{}, +func New(opts Options, apifiles []*vppapi.File, filesToGen []string) (*Generator, error) { + gen := &Generator{ + FilesByName: make(map[string]*File), + opts: opts, + apifiles: apifiles, + filesToGen: filesToGen, + enumsByName: map[string]*Enum{}, + aliasesByName: map[string]*Alias{}, + structsByName: map[string]*Struct{}, + unionsByName: map[string]*Union{}, + messagesByName: map[string]*Message{}, } - logrus.Debugf("adding %d VPP API files to generator", len(apifiles)) + // Normalize API files + SortFilesByImports(gen.apifiles) for _, apifile := range apifiles { - filename := apifile.Path - if filename == "" { - filename = apifile.Name - } - if _, ok := g.FilesByPath[filename]; ok { - return nil, fmt.Errorf("duplicate file name: %q", filename) - } - if _, ok := g.FilesByName[apifile.Name]; ok { + RemoveImportedTypes(gen.apifiles, apifile) + SortFileObjectsByName(apifile) + } + + // prepare package names and import paths + packageNames := make(map[string]GoPackageName) + importPaths := make(map[string]GoImportPath) + for _, apifile := range gen.apifiles { + filename := getFilename(apifile) + packageNames[filename] = cleanPackageName(apifile.Name) + importPaths[filename] = GoImportPath(path.Join(gen.opts.ImportPrefix, baseName(apifile.Name))) + } + + logrus.Debugf("adding %d VPP API files to generator", len(gen.apifiles)) + + for _, apifile := range gen.apifiles { + filename := getFilename(apifile) + + if _, ok := gen.FilesByName[apifile.Name]; ok { return nil, fmt.Errorf("duplicate file: %q", apifile.Name) } - file, err := newFile(g, apifile) + file, err := newFile(gen, apifile, packageNames[filename], importPaths[filename]) if err != nil { - return nil, err + return nil, fmt.Errorf("loading file %s failed: %w", apifile.Name, err) } - g.Files = append(g.Files, file) - g.FilesByPath[filename] = file - g.FilesByName[apifile.Name] = file + gen.Files = append(gen.Files, file) + gen.FilesByName[apifile.Name] = file logrus.Debugf("added file %q (path: %v)", apifile.Name, apifile.Path) - if len(file.Imports) > 0 { - logrus.Debugf(" - %d imports: %v", len(file.Imports), file.Imports) - } } - if len(opts.FilesToGenerate) > 0 { - logrus.Debugf("Checking %d files to generate: %v", len(opts.FilesToGenerate), opts.FilesToGenerate) - for _, genfile := range opts.FilesToGenerate { - file, ok := g.FilesByPath[genfile] + // mark files for generation + if len(gen.filesToGen) > 0 { + logrus.Debugf("Checking %d files to generate: %v", len(gen.filesToGen), gen.filesToGen) + for _, genfile := range gen.filesToGen { + file, ok := gen.FilesByName[genfile] if !ok { - file, ok = g.FilesByName[genfile] - if !ok { - return nil, fmt.Errorf("no API file found for: %v", genfile) - } + return nil, fmt.Errorf("no API file found for: %v", genfile) } file.Generate = true - if opts.ImportTypes { - // generate all imported files - for _, impFile := range file.importedFiles(g) { - impFile.Generate = true - } + // generate all imported files + for _, impFile := range file.importedFiles(gen) { + impFile.Generate = true } } } else { - logrus.Debugf("Files to generate not specified, marking all %d files to generate", len(g.Files)) - for _, file := range g.Files { + logrus.Debugf("Files to generate not specified, marking all %d files for generate", len(gen.Files)) + for _, file := range gen.Files { file.Generate = true } } - logrus.Debugf("Resolving imported types") - for _, file := range g.Files { - if !file.Generate { - // skip resolving for non-generated files - continue - } - var importedFiles []*File - for _, impFile := range file.importedFiles(g) { - if !impFile.Generate { - // exclude imports of non-generated files - continue - } - importedFiles = append(importedFiles, impFile) - } - file.loadTypeImports(g, importedFiles) - } + return gen, nil +} - return g, nil +func getFilename(file *vppapi.File) string { + if file.Path == "" { + return file.Name + } + return file.Path } func (g *Generator) Generate() error { @@ -147,127 +139,238 @@ func (g *Generator) Generate() error { } logrus.Infof("Generating %d files", len(g.genfiles)) + for _, genfile := range g.genfiles { - if err := writeSourceTo(genfile.filename, genfile.Content()); err != nil { - return fmt.Errorf("writing source for RPC package %s failed: %v", genfile.filename, err) + content, err := genfile.Content() + if err != nil { + return err + } + if err := writeSourceTo(genfile.filename, content); err != nil { + return fmt.Errorf("writing source package %s failed: %v", genfile.filename, err) } } return nil } type GenFile struct { - *Generator - filename string - file *File - outputDir string - buf bytes.Buffer + gen *Generator + file *File + filename string + goImportPath GoImportPath + buf bytes.Buffer + manualImports map[GoImportPath]bool + packageNames map[GoImportPath]GoPackageName } -func (g *Generator) NewGenFile(filename string) *GenFile { +func (g *Generator) NewGenFile(filename string, importPath GoImportPath) *GenFile { f := &GenFile{ - Generator: g, - filename: filename, + gen: g, + filename: filename, + goImportPath: importPath, + manualImports: make(map[GoImportPath]bool), + packageNames: make(map[GoImportPath]GoPackageName), } g.genfiles = append(g.genfiles, f) return f } -func (f *GenFile) Content() []byte { - return f.buf.Bytes() +func (g *GenFile) Write(p []byte) (n int, err error) { + return g.buf.Write(p) } -func writeSourceTo(outputFile string, b []byte) error { - // create output directory - packageDir := filepath.Dir(outputFile) - if err := os.MkdirAll(packageDir, 0775); err != nil { - return fmt.Errorf("creating output dir %s failed: %v", packageDir, err) - } +func (g *GenFile) Import(importPath GoImportPath) { + g.manualImports[importPath] = true +} - // format generated source code - gosrc, err := format.Source(b) - if err != nil { - _ = ioutil.WriteFile(outputFile, b, 0666) - return fmt.Errorf("formatting source code failed: %v", err) +func (g *GenFile) GoIdent(ident GoIdent) string { + if ident.GoImportPath == g.goImportPath { + return ident.GoName } - - // write generated code to output file - if err := ioutil.WriteFile(outputFile, gosrc, 0666); err != nil { - return fmt.Errorf("writing to output file %s failed: %v", outputFile, err) + if packageName, ok := g.packageNames[ident.GoImportPath]; ok { + return string(packageName) + "." + ident.GoName } + packageName := cleanPackageName(baseName(string(ident.GoImportPath))) + g.packageNames[ident.GoImportPath] = packageName + return string(packageName) + "." + ident.GoName +} - lines := bytes.Count(gosrc, []byte("\n")) - logf("wrote %d lines (%d bytes) of code to: %q", lines, len(gosrc), outputFile) +func (g *GenFile) P(v ...interface{}) { + for _, x := range v { + switch x := x.(type) { + case GoIdent: + fmt.Fprint(&g.buf, g.GoIdent(x)) + default: + fmt.Fprint(&g.buf, x) + } + } + fmt.Fprintln(&g.buf) +} - return nil +func (g *GenFile) Content() ([]byte, error) { + if !strings.HasSuffix(g.filename, ".go") { + return g.buf.Bytes(), nil + } + return g.injectImports(g.buf.Bytes()) } -func listImports(genfile *GenFile) map[string]string { - var importPath = genfile.ImportPrefix - if importPath == "" { - importPath = resolveImportPath(genfile.outputDir) - logrus.Debugf("resolved import path: %s", importPath) +func (g *GenFile) injectImports(original []byte) ([]byte, error) { + // Parse source code + fset := token.NewFileSet() + file, err := parser.ParseFile(fset, "", original, parser.ParseComments) + if err != nil { + var src bytes.Buffer + s := bufio.NewScanner(bytes.NewReader(original)) + for line := 1; s.Scan(); line++ { + fmt.Fprintf(&src, "%5d\t%s\n", line, s.Bytes()) + } + return nil, fmt.Errorf("%v: unparsable Go source: %v\n%v", g.filename, err, src.String()) + } + type Import struct { + Name string + Path string + } + // Prepare list of all imports + var importPaths []Import + for importPath := range g.packageNames { + importPaths = append(importPaths, Import{ + Name: string(g.packageNames[GoImportPath(importPath)]), + Path: string(importPath), + }) } - imports := map[string]string{} - for _, imp := range genfile.file.imports { - if _, ok := imports[imp]; !ok { - imports[imp] = path.Join(importPath, imp) + for importPath := range g.manualImports { + if _, ok := g.packageNames[importPath]; ok { + continue } + importPaths = append(importPaths, Import{ + Name: "_", + Path: string(importPath), + }) } - return imports -} + // Sort imports by import path + sort.Slice(importPaths, func(i, j int) bool { + return importPaths[i].Path < importPaths[j].Path + }) + // Inject new import block into parsed AST + if len(importPaths) > 0 { + // Find import block position + pos := file.Package + tokFile := fset.File(file.Package) + pkgLine := tokFile.Line(file.Package) + for _, c := range file.Comments { + if tokFile.Line(c.Pos()) > pkgLine { + break + } + pos = c.End() + } + // Prepare the import block + impDecl := &ast.GenDecl{Tok: token.IMPORT, TokPos: pos, Lparen: pos, Rparen: pos} + for _, importPath := range importPaths { + var name *ast.Ident + if importPath.Name == "_" || strings.Contains(importPath.Path, ".") { + name = &ast.Ident{Name: importPath.Name, NamePos: pos} + } + impDecl.Specs = append(impDecl.Specs, &ast.ImportSpec{ + Name: name, + Path: &ast.BasicLit{Kind: token.STRING, Value: strconv.Quote(importPath.Path), ValuePos: pos}, + EndPos: pos, + }) + } -func resolveImportPath(outputDir string) string { - absPath, err := filepath.Abs(outputDir) - if err != nil { - panic(err) + file.Decls = append([]ast.Decl{impDecl}, file.Decls...) } - modRoot := findModuleRoot(absPath) - if modRoot == "" { - logrus.Fatalf("module root not found at: %s", absPath) + // Reformat source code + var out bytes.Buffer + cfg := &printer.Config{ + Mode: printer.TabIndent | printer.UseSpaces, + Tabwidth: 8, } - modPath := findModulePath(path.Join(modRoot, "go.mod")) - if modPath == "" { - logrus.Fatalf("module path not found") + if err = cfg.Fprint(&out, fset, file); err != nil { + return nil, fmt.Errorf("%v: can not reformat Go source: %v", g.filename, err) } - relDir, err := filepath.Rel(modRoot, absPath) - if err != nil { - panic(err) + return out.Bytes(), nil +} + +// GoIdent is a Go identifier, consisting of a name and import path. +// The name is a single identifier and may not be a dot-qualified selector. +type GoIdent struct { + GoName string + GoImportPath GoImportPath +} + +func (id GoIdent) String() string { + return fmt.Sprintf("%q.%v", id.GoImportPath, id.GoName) +} + +func newGoIdent(f *File, fullName string) GoIdent { + name := strings.TrimPrefix(fullName, string(f.PackageName)+".") + return GoIdent{ + GoName: camelCaseName(name), + GoImportPath: f.GoImportPath, } - return filepath.Join(modPath, relDir) } -func findModuleRoot(dir string) (root string) { - if dir == "" { - panic("dir not set") +// GoImportPath is a Go import path for a package. +type GoImportPath string + +func (p GoImportPath) String() string { + return strconv.Quote(string(p)) +} + +func (p GoImportPath) Ident(s string) GoIdent { + return GoIdent{GoName: s, GoImportPath: p} +} + +type GoPackageName string + +func cleanPackageName(name string) GoPackageName { + return GoPackageName(sanitizedName(name)) +} + +func sanitizedName(name string) string { + switch name { + case "interface": + return "interfaces" + case "map": + return "maps" + default: + return name } - dir = filepath.Clean(dir) +} - // Look for enclosing go.mod. - for { - if fi, err := os.Stat(filepath.Join(dir, "go.mod")); err == nil && !fi.IsDir() { - return dir - } - d := filepath.Dir(dir) - if d == dir { - break - } - dir = d +// baseName returns the last path element of the name, with the last dotted suffix removed. +func baseName(name string) string { + // First, find the last element + if i := strings.LastIndex(name, "/"); i >= 0 { + name = name[i+1:] + } + // Now drop the suffix + if i := strings.LastIndex(name, "."); i >= 0 { + name = name[:i] } - return "" + return name } -var ( - modulePathRE = regexp.MustCompile(`module[ \t]+([^ \t\r\n]+)`) -) +func writeSourceTo(outputFile string, b []byte) error { + // create output directory + packageDir := filepath.Dir(outputFile) + if err := os.MkdirAll(packageDir, 0775); err != nil { + return fmt.Errorf("creating output dir %s failed: %v", packageDir, err) + } -func findModulePath(file string) string { - data, err := ioutil.ReadFile(file) + // format generated source code + gosrc, err := format.Source(b) if err != nil { - return "" + _ = ioutil.WriteFile(outputFile, b, 0666) + return fmt.Errorf("formatting source code failed: %v", err) } - m := modulePathRE.FindSubmatch(data) - if m == nil { - return "" + + // write generated code to output file + if err := ioutil.WriteFile(outputFile, gosrc, 0666); err != nil { + return fmt.Errorf("writing to output file %s failed: %v", outputFile, err) } - return string(m[1]) + + lines := bytes.Count(gosrc, []byte("\n")) + logf("wrote %d lines (%d bytes) to: %q", lines, len(gosrc), outputFile) + + return nil } diff --git a/binapigen/generator_test.go b/binapigen/generator_test.go index 9e5b342..aa4ee04 100644 --- a/binapigen/generator_test.go +++ b/binapigen/generator_test.go @@ -18,10 +18,10 @@ import ( "testing" ) -func TestModule(t *testing.T) { - const expected = "git.fd.io/govpp.git/examples/binapi" +func TestGoModule(t *testing.T) { + const expected = "git.fd.io/govpp.git/binapi" - impPath := resolveImportPath("../examples/binapi") + impPath := resolveImportPath("../binapi") if impPath != expected { t.Fatalf("expected: %q, got: %q", expected, impPath) } @@ -42,78 +42,10 @@ func TestBinapiTypeSizes(t *testing.T) { } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - size := getBinapiTypeSize(test.input) + size := getSizeOfBinapiTypeLength(test.input, 1) if size != test.expsize { t.Errorf("expected %d, got %d", test.expsize, size) } }) } } - -/*func TestSizeOfType(t *testing.T) { - tests := []struct { - name string - input StructType - expsize int - }{ - { - name: "basic1", - input: StructType{ - Fields: []Field{ - {Type: "u8"}, - }, - }, - expsize: 1, - }, - { - name: "basic2", - input: Type{ - Fields: []Field{ - {Type: "u8", Length: 4}, - }, - }, - expsize: 4, - }, - { - name: "basic3", - input: Type{ - Fields: []Field{ - {Type: "u8", Length: 16}, - }, - }, - expsize: 16, - }, - { - name: "withEnum", - input: Type{ - Fields: []Field{ - {Type: "u16"}, - {Type: "vl_api_myenum_t"}, - }, - }, - expsize: 6, - }, - { - name: "invalid1", - input: Type{ - Fields: []Field{ - {Type: "x", Length: 16}, - }, - }, - expsize: 0, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - module := &File{ - Enums: []Enum{ - {Name: "myenum", Type: "u32"}, - }, - } - size := getSizeOfType(module, &test.input) - if size != test.expsize { - t.Errorf("expected %d, got %d", test.expsize, size) - } - }) - } -}*/ diff --git a/binapigen/plugin.go b/binapigen/plugin.go new file mode 100644 index 0000000..b57cc68 --- /dev/null +++ b/binapigen/plugin.go @@ -0,0 +1,51 @@ +// Copyright (c) 2020 Cisco and/or its affiliates. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package binapigen + +import "fmt" + +type Plugin struct { + Name string + GenerateFile func(*Generator, *File) *GenFile +} + +var Plugins = map[string]*Plugin{} +var plugins []*Plugin + +func RegisterPlugin(name string, genfn func(*Generator, *File) *GenFile) { + if name == "" { + panic("plugin name empty") + } + for _, p := range plugins { + if p.Name == name { + panic("duplicate plugin name: " + name) + } + } + plugin := &Plugin{ + Name: name, + GenerateFile: genfn, + } + plugins = append(plugins, plugin) + Plugins[name] = plugin +} + +func RunPlugin(name string, gen *Generator, file *File) error { + p, ok := Plugins[name] + if !ok { + return fmt.Errorf("plugin not found: %q", name) + } + p.GenerateFile(gen, file) + return nil +} diff --git a/binapigen/run.go b/binapigen/run.go index e6086ee..88e32b7 100644 --- a/binapigen/run.go +++ b/binapigen/run.go @@ -16,69 +16,145 @@ package binapigen import ( "fmt" + "io/ioutil" "os" + "path" "path/filepath" + "regexp" + "strings" + + "github.com/sirupsen/logrus" "git.fd.io/govpp.git/binapigen/vppapi" ) -const ( - outputFileExt = ".ba.go" // file extension of the Go generated files - rpcFileSuffix = "_rpc" // file name suffix for the RPC services -) +type Options struct { + OutputDir string // output directory for generated files + ImportPrefix string // prefix for import paths + NoVersionInfo bool // disables generating version info +} -func Run(apiDir string, opts Options, f func(*Generator) error) { - if err := run(apiDir, opts, f); err != nil { +func Run(apiDir string, filesToGenerate []string, opts Options, f func(*Generator) error) { + if err := run(apiDir, filesToGenerate, opts, f); err != nil { fmt.Fprintf(os.Stderr, "%s: %v\n", filepath.Base(os.Args[0]), err) os.Exit(1) } } -func run(apiDir string, opts Options, f func(*Generator) error) error { - // parse API files +func run(apiDir string, filesToGenerate []string, opts Options, fn func(*Generator) error) error { apifiles, err := vppapi.ParseDir(apiDir) if err != nil { return err } - g, err := New(opts, apifiles) + if opts.ImportPrefix == "" { + opts.ImportPrefix = resolveImportPath(opts.OutputDir) + logrus.Debugf("resolved import prefix: %s", opts.ImportPrefix) + } + + gen, err := New(opts, apifiles, filesToGenerate) if err != nil { return err } - if err := f(g); err != nil { - return err + gen.vppVersion = vppapi.ResolveVPPVersion(apiDir) + if gen.vppVersion == "" { + gen.vppVersion = "unknown" } - if err = g.Generate(); err != nil { + if fn == nil { + GenerateDefault(gen) + } else { + if err := fn(gen); err != nil { + return err + } + } + + if err = gen.Generate(); err != nil { return err } return nil } -func GenerateBinapi(gen *Generator, file *File, outputDir string) *GenFile { - packageDir := filepath.Join(outputDir, file.PackageName) - filename := filepath.Join(packageDir, file.PackageName+outputFileExt) +func GenerateDefault(gen *Generator) { + for _, file := range gen.Files { + if !file.Generate { + continue + } + GenerateAPI(gen, file) + GenerateRPC(gen, file) + } +} - g := gen.NewGenFile(filename) - g.file = file - g.outputDir = outputDir +var Logger = logrus.New() - generateFileBinapi(g, &g.buf) +func init() { + if debug := os.Getenv("DEBUG_GOVPP"); strings.Contains(debug, "binapigen") { + Logger.SetLevel(logrus.DebugLevel) + logrus.SetLevel(logrus.DebugLevel) + } else if debug != "" { + Logger.SetLevel(logrus.InfoLevel) + } else { + Logger.SetLevel(logrus.WarnLevel) + } +} - return g +func logf(f string, v ...interface{}) { + Logger.Debugf(f, v...) } -func GenerateRPC(gen *Generator, file *File, outputDir string) *GenFile { - packageDir := filepath.Join(outputDir, file.PackageName) - filename := filepath.Join(packageDir, file.PackageName+rpcFileSuffix+outputFileExt) +func resolveImportPath(dir string) string { + absPath, err := filepath.Abs(dir) + if err != nil { + panic(err) + } + modRoot := findGoModuleRoot(absPath) + if modRoot == "" { + logrus.Fatalf("module root not found at: %s", absPath) + } + modPath := findModulePath(path.Join(modRoot, "go.mod")) + if modPath == "" { + logrus.Fatalf("module path not found") + } + relDir, err := filepath.Rel(modRoot, absPath) + if err != nil { + panic(err) + } + return filepath.Join(modPath, relDir) +} - g := gen.NewGenFile(filename) - g.file = file - g.outputDir = outputDir +func findGoModuleRoot(dir string) (root string) { + if dir == "" { + panic("dir not set") + } + dir = filepath.Clean(dir) + // Look for enclosing go.mod. + for { + if fi, err := os.Stat(filepath.Join(dir, "go.mod")); err == nil && !fi.IsDir() { + return dir + } + d := filepath.Dir(dir) + if d == dir { + break + } + dir = d + } + return "" +} - generateFileRPC(g, &g.buf) +var ( + modulePathRE = regexp.MustCompile(`module[ \t]+([^ \t\r\n]+)`) +) - return g +func findModulePath(file string) string { + data, err := ioutil.ReadFile(file) + if err != nil { + return "" + } + m := modulePathRE.FindSubmatch(data) + if m == nil { + return "" + } + return string(m[1]) } diff --git a/binapigen/types.go b/binapigen/types.go index 96ae870..0a21622 100644 --- a/binapigen/types.go +++ b/binapigen/types.go @@ -27,53 +27,33 @@ const ( defineApiSuffix = "_t" ) -// BaseType represents base types in VPP binary API. -type BaseType int +// toApiType returns name that is used as type reference in VPP binary API +func toApiType(name string) string { + return defineApiPrefix + name + defineApiSuffix +} -const ( - U8 BaseType = iota + 1 - I8 - U16 - I16 - U32 - I32 - U64 - I64 - F64 - BOOL - STRING -) +func fromApiType(typ string) string { + name := typ + name = strings.TrimPrefix(name, defineApiPrefix) + name = strings.TrimSuffix(name, defineApiSuffix) + return name +} -var ( - BaseTypes = map[BaseType]string{ - U8: "u8", - I8: "i8", - U16: "u16", - I16: "i16", - U32: "u32", - I32: "i32", - U64: "u64", - I64: "i64", - F64: "f64", - BOOL: "bool", - STRING: "string", - } - BaseTypeNames = map[string]BaseType{ - "u8": U8, - "i8": I8, - "u16": U16, - "i16": I16, - "u32": U32, - "i32": I32, - "u64": U64, - "i64": I64, - "f64": F64, - "bool": BOOL, - "string": STRING, - } +const ( + U8 = "u8" + I8 = "i8" + U16 = "u16" + I16 = "i16" + U32 = "u32" + I32 = "i32" + U64 = "u64" + I64 = "i64" + F64 = "f64" + BOOL = "bool" + STRING = "string" ) -var BaseTypeSizes = map[BaseType]int{ +var BaseTypeSizes = map[string]int{ U8: 1, I8: 1, U16: 2, @@ -87,106 +67,7 @@ var BaseTypeSizes = map[BaseType]int{ STRING: 1, } -type Kind int - -const ( - _ = iota - Uint8Kind - Int8Kind - Uint16Kind - Int16Kind - Uint32Kind - Int32Kind - Uint64Kind - Int64Kind - Float64Kind - BoolKind - StringKind - EnumKind - AliasKind - StructKind - UnionKind - MessageKind -) - -// toApiType returns name that is used as type reference in VPP binary API -func toApiType(name string) string { - return defineApiPrefix + name + defineApiSuffix -} - -func fromApiType(typ string) string { - name := typ - name = strings.TrimPrefix(name, defineApiPrefix) - name = strings.TrimSuffix(name, defineApiSuffix) - return name -} - -func getSizeOfType(module *File, typ *Struct) (size int) { - for _, field := range typ.Fields { - enum := getEnumByRef(module, field.Type) - if enum != nil { - size += getSizeOfBinapiTypeLength(enum.Type, field.Length) - continue - } - size += getSizeOfBinapiTypeLength(field.Type, field.Length) - } - return size -} - -func getEnumByRef(file *File, ref string) *Enum { - for _, typ := range file.Enums { - if ref == toApiType(typ.Name) { - return typ - } - } - return nil -} - -func getTypeByRef(file *File, ref string) *Struct { - for _, typ := range file.Structs { - if ref == toApiType(typ.Name) { - return typ - } - } - return nil -} - -func getAliasByRef(file *File, ref string) *Alias { - for _, alias := range file.Aliases { - if ref == toApiType(alias.Name) { - return alias - } - } - return nil -} - -func getUnionByRef(file *File, ref string) *Union { - for _, union := range file.Unions { - if ref == toApiType(union.Name) { - return union - } - } - return nil -} - -func getBinapiTypeSize(binapiType string) (size int) { - typName := BaseTypeNames[binapiType] - return BaseTypeSizes[typName] -} - -// binapiTypes is a set of types used VPP binary API for translation to Go types -var binapiTypes = map[string]string{ - "u8": "uint8", - "i8": "int8", - "u16": "uint16", - "i16": "int16", - "u32": "uint32", - "i32": "int32", - "u64": "uint64", - "i64": "int64", - "f64": "float64", -} -var BaseTypesGo = map[BaseType]string{ +var BaseTypesGo = map[string]string{ U8: "uint8", I8: "int8", U16: "uint16", @@ -200,82 +81,90 @@ var BaseTypesGo = map[BaseType]string{ STRING: "string", } -func getActualType(file *File, typ string) (actual string) { - for _, enum := range file.Enums { - if enum.GoName == typ { - return enum.Type - } +func fieldActualType(field *Field) (actual string) { + switch { + case field.TypeAlias != nil: + actual = field.TypeAlias.Type + case field.TypeEnum != nil: + actual = field.TypeEnum.Type } - for _, alias := range file.Aliases { - if alias.GoName == typ { - return alias.Type - } + return field.Type +} + +func fieldGoType(g *GenFile, field *Field) string { + switch { + case field.TypeAlias != nil: + return g.GoIdent(field.TypeAlias.GoIdent) + case field.TypeEnum != nil: + return g.GoIdent(field.TypeEnum.GoIdent) + case field.TypeStruct != nil: + return g.GoIdent(field.TypeStruct.GoIdent) + case field.TypeUnion != nil: + return g.GoIdent(field.TypeUnion.GoIdent) + } + t, ok := BaseTypesGo[field.Type] + if !ok { + logrus.Panicf("type %s is not base type", field.Type) } - return typ + return t } -// convertToGoType translates the VPP binary API type into Go type. -// Imported types are with import prefix. -func convertToGoType(file *File, binapiType string) (typ string) { - if t, ok := binapiTypes[binapiType]; ok { - // basic types - typ = t - } else if r, ok := file.refmap[binapiType]; ok { - // specific types (enums/types/unions) - var prefix string - typ = camelCaseName(r) - // look in imports using name and type name eventually - if imp, ok := file.imports[typ]; ok { - prefix = fmt.Sprintf("%s.", imp) - } else if imp, ok := file.imports[fromApiType(binapiType)]; ok { - prefix = fmt.Sprintf("%s.", imp) +func getFieldType(g *GenFile, field *Field) string { + gotype := fieldGoType(g, field) + if field.Array { + switch gotype { + case "uint8": + return "[]byte" + case "string": + return "string" } - typ = fmt.Sprintf("%s%s", prefix, typ) - } else { - switch binapiType { - case "bool", "string": - typ = binapiType - default: - // fallback type - logrus.Warnf("found unknown VPP binary API type %q, using byte", binapiType) - typ = "byte" + if _, ok := BaseTypesGo[field.Type]; !ok && field.Length > 0 { + return fmt.Sprintf("[%d]%s", field.Length, gotype) } + return "[]" + gotype } - return typ + return gotype } func getSizeOfBinapiTypeLength(typ string, length int) (size int) { - if n := getBinapiTypeSize(typ); n > 0 { + if n := BaseTypeSizes[typ]; n > 0 { if length > 0 { return n * length } else { return n } } - return } -func getUnionSize(file *File, union *Union) (maxSize int) { +func getSizeOfType(typ *Struct) (size int) { + for _, field := range typ.Fields { + if enum := field.TypeEnum; enum != nil { + size += getSizeOfBinapiTypeLength(enum.Type, field.Length) + continue + } + size += getSizeOfBinapiTypeLength(field.Type, field.Length) + } + return size +} + +func getUnionSize(union *Union) (maxSize int) { for _, field := range union.Fields { - typ := getTypeByRef(file, field.Type) - if typ != nil { - if size := getSizeOfType(file, typ); size > maxSize { + if typ := field.TypeStruct; typ != nil { + if size := getSizeOfType(typ); size > maxSize { maxSize = size } continue } - alias := getAliasByRef(file, field.Type) - if alias != nil { + if alias := field.TypeAlias; alias != nil { if size := getSizeOfBinapiTypeLength(alias.Type, alias.Length); size > maxSize { maxSize = size } continue } else { - logf("no type or alias found for union %s field type %q", union.Name, field.Type) - continue + logrus.Panicf("no type or alias found for union %s field type %q", union.Name, field.Type) } } - logf("getUnionSize: %s %+v max=%v", union.Name, union.Fields, maxSize) + //logf("getUnionSize: %s %+v max=%v", union.Name, union.Fields, maxSize) return } diff --git a/binapigen/validate.go b/binapigen/validate.go deleted file mode 100644 index a79e148..0000000 --- a/binapigen/validate.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright (c) 2020 Cisco and/or its affiliates. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at: -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package binapigen - -import ( - "strings" - - "github.com/sirupsen/logrus" - - "git.fd.io/govpp.git/binapigen/vppapi" -) - -const ( - serviceEventPrefix = "want_" - serviceDumpSuffix = "_dump" - serviceDetailsSuffix = "_details" - serviceReplySuffix = "_reply" -) - -func validateService(svc vppapi.Service) { - for _, rpc := range svc.RPCs { - validateRPC(rpc) - } -} - -func validateRPC(rpc vppapi.RPC) { - if len(rpc.Events) > 0 { - // EVENT service - if !strings.HasPrefix(rpc.RequestMsg, serviceEventPrefix) { - logrus.Warnf("unusual EVENTS service: %+v\n"+ - "- events service %q does not have %q prefix in request.", - rpc, rpc.Name, serviceEventPrefix) - } - } else if rpc.Stream { - // STREAM service - if !strings.HasSuffix(rpc.RequestMsg, serviceDumpSuffix) { - logrus.Warnf("unusual STREAM service: %+v\n"+ - "- stream service %q does not have %q suffix in request.", - rpc, rpc.Name, serviceDumpSuffix) - } - if !strings.HasSuffix(rpc.ReplyMsg, serviceDetailsSuffix) && !strings.HasSuffix(rpc.StreamMsg, serviceDetailsSuffix) { - logrus.Warnf("unusual STREAM service: %+v\n"+ - "- stream service %q does not have %q suffix in reply or stream msg.", - rpc, rpc.Name, serviceDetailsSuffix) - } - } else if rpc.ReplyMsg != "" { - // REQUEST service - // some messages might have `null` reply (for example: memclnt) - if !strings.HasSuffix(rpc.ReplyMsg, serviceReplySuffix) { - logrus.Warnf("unusual REQUEST service: %+v\n"+ - "- service %q does not have %q suffix in reply.", - rpc, rpc.Name, serviceReplySuffix) - } - } -} diff --git a/binapigen/vppapi.go b/binapigen/vppapi.go new file mode 100644 index 0000000..7388ad5 --- /dev/null +++ b/binapigen/vppapi.go @@ -0,0 +1,211 @@ +// Copyright (c) 2020 Cisco and/or its affiliates. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package binapigen + +import ( + "log" + "sort" + + "git.fd.io/govpp.git/binapigen/vppapi" +) + +func SortFileObjectsByName(file *vppapi.File) { + sort.SliceStable(file.Imports, func(i, j int) bool { + return file.Imports[i] < file.Imports[j] + }) + sort.SliceStable(file.EnumTypes, func(i, j int) bool { + return file.EnumTypes[i].Name < file.EnumTypes[j].Name + }) + sort.Slice(file.AliasTypes, func(i, j int) bool { + return file.AliasTypes[i].Name < file.AliasTypes[j].Name + }) + sort.SliceStable(file.StructTypes, func(i, j int) bool { + return file.StructTypes[i].Name < file.StructTypes[j].Name + }) + sort.SliceStable(file.UnionTypes, func(i, j int) bool { + return file.UnionTypes[i].Name < file.UnionTypes[j].Name + }) + sort.SliceStable(file.Messages, func(i, j int) bool { + return file.Messages[i].Name < file.Messages[j].Name + }) + if file.Service != nil { + sort.Slice(file.Service.RPCs, func(i, j int) bool { + return file.Service.RPCs[i].Request < file.Service.RPCs[j].Request + }) + } +} + +func importedFiles(files []*vppapi.File, file *vppapi.File) []*vppapi.File { + var list []*vppapi.File + byName := func(s string) *vppapi.File { + for _, f := range files { + if f.Name == s { + return f + } + } + return nil + } + imported := map[string]struct{}{} + for _, imp := range file.Imports { + imp = normalizeImport(imp) + impFile := byName(imp) + if impFile == nil { + log.Fatalf("file %q not found", imp) + } + for _, nest := range importedFiles(files, impFile) { + if _, ok := imported[nest.Name]; !ok { + list = append(list, nest) + imported[nest.Name] = struct{}{} + } + } + if _, ok := imported[impFile.Name]; !ok { + list = append(list, impFile) + imported[impFile.Name] = struct{}{} + } + } + return list +} + +func SortFilesByImports(apifiles []*vppapi.File) { + dependsOn := func(file *vppapi.File, dep string) bool { + for _, imp := range importedFiles(apifiles, file) { + if imp.Name == dep { + return true + } + } + return false + } + sort.Slice(apifiles, func(i, j int) bool { + a := apifiles[i] + b := apifiles[j] + if dependsOn(a, b.Name) { + return false + } + if dependsOn(b, a.Name) { + return true + } + return len(b.Imports) > len(a.Imports) + }) +} + +func ListImportedTypes(apifiles []*vppapi.File, file *vppapi.File) []string { + var importedTypes []string + typeFiles := importedFiles(apifiles, file) + for _, t := range file.StructTypes { + var imported bool + for _, imp := range typeFiles { + for _, at := range imp.StructTypes { + if at.Name != t.Name { + continue + } + importedTypes = append(importedTypes, t.Name) + imported = true + break + } + if imported { + break + } + } + } + for _, t := range file.AliasTypes { + var imported bool + for _, imp := range typeFiles { + for _, at := range imp.AliasTypes { + if at.Name != t.Name { + continue + } + importedTypes = append(importedTypes, t.Name) + imported = true + break + } + if imported { + break + } + } + } + for _, t := range file.EnumTypes { + var imported bool + for _, imp := range typeFiles { + for _, at := range imp.EnumTypes { + if at.Name != t.Name { + continue + } + importedTypes = append(importedTypes, t.Name) + imported = true + break + } + if imported { + break + } + } + } + for _, t := range file.UnionTypes { + var imported bool + for _, imp := range typeFiles { + for _, at := range imp.UnionTypes { + if at.Name != t.Name { + continue + } + importedTypes = append(importedTypes, t.Name) + imported = true + break + } + if imported { + break + } + } + } + return importedTypes +} + +func RemoveImportedTypes(apifiles []*vppapi.File, apifile *vppapi.File) { + importedTypes := ListImportedTypes(apifiles, apifile) + isImportedType := func(s string) bool { + for _, t := range importedTypes { + if t == s { + return true + } + } + return false + } + var enums []vppapi.EnumType + for _, enumType := range apifile.EnumTypes { + if !isImportedType(enumType.Name) { + enums = append(enums, enumType) + } + } + var aliases []vppapi.AliasType + for _, aliasType := range apifile.AliasTypes { + if !isImportedType(aliasType.Name) { + aliases = append(aliases, aliasType) + } + } + var structs []vppapi.StructType + for _, structType := range apifile.StructTypes { + if !isImportedType(structType.Name) { + structs = append(structs, structType) + } + } + var unions []vppapi.UnionType + for _, unionType := range apifile.UnionTypes { + if !isImportedType(unionType.Name) { + unions = append(unions, unionType) + } + } + apifile.EnumTypes = enums + apifile.AliasTypes = aliases + apifile.StructTypes = structs + apifile.UnionTypes = unions +} diff --git a/binapigen/vppapi/api.go b/binapigen/vppapi/api.go deleted file mode 100644 index 06d9046..0000000 --- a/binapigen/vppapi/api.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright (c) 2020 Cisco and/or its affiliates. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at: -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package vppapi - -type File struct { - Name string - Path string - - CRC string - Options map[string]string `json:",omitempty"` - - Imports []string `json:",omitempty"` - - AliasTypes []AliasType `json:",omitempty"` - EnumTypes []EnumType `json:",omitempty"` - StructTypes []StructType `json:",omitempty"` - UnionTypes []UnionType `json:",omitempty"` - Messages []Message `json:",omitempty"` - Service *Service `json:",omitempty"` -} - -func (x File) Version() string { - if x.Options != nil { - return x.Options[fileOptionVersion] - } - return "" -} - -type AliasType struct { - Name string - Type string - Length int `json:",omitempty"` -} - -type EnumType struct { - Name string - Type string - Entries []EnumEntry -} - -type EnumEntry struct { - Name string - Value uint32 -} - -type StructType struct { - Name string - Fields []Field -} - -type UnionType struct { - Name string - Fields []Field -} - -type Message struct { - Name string - Fields []Field - CRC string -} - -type Field struct { - Name string - Type string - Length int `json:",omitempty"` - Array bool `json:",omitempty"` - SizeFrom string `json:",omitempty"` - Meta map[string]interface{} `json:",omitempty"` -} - -type Service struct { - RPCs []RPC `json:",omitempty"` -} - -type RPC struct { - Name string - RequestMsg string - ReplyMsg string - Stream bool `json:",omitempty"` - StreamMsg string `json:",omitempty"` - Events []string `json:",omitempty"` -} diff --git a/binapigen/vppapi/api_schema.go b/binapigen/vppapi/api_schema.go new file mode 100644 index 0000000..7eceab3 --- /dev/null +++ b/binapigen/vppapi/api_schema.go @@ -0,0 +1,89 @@ +// Copyright (c) 2020 Cisco and/or its affiliates. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package vppapi parses VPP API files without any additional processing. +package vppapi + +type ( + File struct { + Name string + Path string + CRC string + + Options map[string]string `json:",omitempty"` + Imports []string `json:",omitempty"` + + AliasTypes []AliasType `json:",omitempty"` + EnumTypes []EnumType `json:",omitempty"` + StructTypes []StructType `json:",omitempty"` + UnionTypes []UnionType `json:",omitempty"` + + Messages []Message `json:",omitempty"` + Service *Service `json:",omitempty"` + } + + AliasType struct { + Name string + Type string + Length int `json:",omitempty"` + } + + EnumType struct { + Name string + Type string + Entries []EnumEntry + } + + EnumEntry struct { + Name string + Value uint32 + } + + StructType struct { + Name string + Fields []Field + } + + UnionType struct { + Name string + Fields []Field + } + + Message struct { + Name string + Fields []Field + CRC string + } + + Field struct { + Name string + Type string + Length int `json:",omitempty"` + Array bool `json:",omitempty"` + SizeFrom string `json:",omitempty"` + Meta map[string]interface{} `json:",omitempty"` + } + + Service struct { + RPCs []RPC `json:",omitempty"` + } + + RPC struct { + Request string + Reply string + Stream bool `json:",omitempty"` + StreamMsg string `json:",omitempty"` + Events []string `json:",omitempty"` + } +) diff --git a/binapigen/vppapi/parse_json.go b/binapigen/vppapi/parse_json.go index 45b5796..d14865c 100644 --- a/binapigen/vppapi/parse_json.go +++ b/binapigen/vppapi/parse_json.go @@ -18,79 +18,52 @@ import ( "encoding/json" "errors" "fmt" + "log" "os" "strings" "github.com/bennyscetbun/jsongo" - "github.com/sirupsen/logrus" ) -var Logger *logrus.Logger - -func init() { - if strings.Contains(os.Getenv("DEBUG_GOVPP"), "parser") { - Logger = logrus.StandardLogger() - } -} +var debug = strings.Contains(os.Getenv("DEBUG_GOVPP"), "parser") func logf(f string, v ...interface{}) { - if Logger != nil { - Logger.Debugf(f, v...) + if debug { + log.Printf(f, v...) } } const ( - // file - objAPIVersion = "vl_api_version" - objTypes = "types" - objMessages = "messages" - objUnions = "unions" - objEnums = "enums" - objServices = "services" - objAliases = "aliases" - objOptions = "options" - objImports = "imports" - - // message - messageFieldCrc = "crc" - - // alias - aliasFieldLength = "length" - aliasFieldType = "type" - - // service - serviceFieldReply = "reply" - serviceFieldStream = "stream" - serviceFieldStreamMsg = "stream_msg" - serviceFieldEvents = "events" -) - -const ( - // file - fileOptionVersion = "version" - - // field - fieldOptionLimit = "limit" - fieldOptionDefault = "default" - + // root keys + fileAPIVersion = "vl_api_version" + fileOptions = "options" + fileTypes = "types" + fileMessages = "messages" + fileUnions = "unions" + fileEnums = "enums" + fileAliases = "aliases" + fileServices = "services" + fileImports = "imports" + // type keys + messageCrc = "crc" + enumType = "enumtype" + aliasLength = "length" + aliasType = "type" // service - serviceReplyNull = "null" + serviceReply = "reply" + serviceStream = "stream" + serviceStreamMsg = "stream_msg" + serviceEvents = "events" ) func parseJSON(data []byte) (module *File, err error) { - defer func() { - if e := recover(); e != nil { - err = fmt.Errorf("recovered panic: %v", e) - } - }() - - // parse JSON data into objects + // parse root jsonRoot := new(jsongo.Node) if err := json.Unmarshal(data, jsonRoot); err != nil { return nil, fmt.Errorf("unmarshalling JSON failed: %v", err) } - logf("file contents:") + logf("file contains:") for _, key := range jsonRoot.GetKeys() { if jsonRoot.At(key).Len() > 0 { logf(" - %2d %s", jsonRoot.At(key).Len(), key) @@ -100,38 +73,35 @@ func parseJSON(data []byte) (module *File, err error) { module = new(File) // parse CRC - if crc := jsonRoot.At(objAPIVersion); crc.GetType() == jsongo.TypeValue { - module.CRC = crc.Get().(string) + crc := jsonRoot.At(fileAPIVersion) + if crc.GetType() == jsongo.TypeValue { + module.CRC = crc.MustGetString() } // parse options - opt := jsonRoot.Map(objOptions) + opt := jsonRoot.Map(fileOptions) if opt.GetType() == jsongo.TypeMap { - module.Options = make(map[string]string, 0) + module.Options = make(map[string]string) for _, key := range opt.GetKeys() { - optionsNode := opt.At(key) optionKey := key.(string) - optionValue := optionsNode.Get().(string) - module.Options[optionKey] = optionValue + optionVal := opt.At(key).MustGetString() + module.Options[optionKey] = optionVal } } // parse imports - imports := jsonRoot.Map(objImports) - module.Imports = make([]string, 0) - imported := make(map[string]struct{}) - for i := 0; i < imports.Len(); i++ { - importNode := imports.At(i) - imp, err := parseImport(importNode) - if err != nil { - return nil, err - } - if _, ok := imported[*imp]; ok { - logf("duplicate import found: %v", *imp) + importsNode := jsonRoot.Map(fileImports) + module.Imports = make([]string, 0, importsNode.Len()) + uniq := make(map[string]struct{}) + for i := 0; i < importsNode.Len(); i++ { + importNode := importsNode.At(i) + imp := importNode.MustGetString() + if _, ok := uniq[imp]; ok { + logf("duplicate import found: %v", imp) continue } - imported[*imp] = struct{}{} - module.Imports = append(module.Imports, *imp) + uniq[imp] = struct{}{} + module.Imports = append(module.Imports, imp) } // avoid duplicate objects @@ -146,11 +116,10 @@ func parseJSON(data []byte) (module *File, err error) { } // parse enum types - enumsNode := jsonRoot.Map(objEnums) + enumsNode := jsonRoot.Map(fileEnums) module.EnumTypes = make([]EnumType, 0) for i := 0; i < enumsNode.Len(); i++ { - enumNode := enumsNode.At(i) - enum, err := parseEnum(enumNode) + enum, err := parseEnum(enumsNode.At(i)) if err != nil { return nil, err } @@ -161,13 +130,12 @@ func parseJSON(data []byte) (module *File, err error) { } // parse alias types - aliasesNode := jsonRoot.Map(objAliases) + aliasesNode := jsonRoot.Map(fileAliases) if aliasesNode.GetType() == jsongo.TypeMap { module.AliasTypes = make([]AliasType, 0) for _, key := range aliasesNode.GetKeys() { - aliasNode := aliasesNode.At(key) aliasName := key.(string) - alias, err := parseAlias(aliasName, aliasNode) + alias, err := parseAlias(aliasName, aliasesNode.At(key)) if err != nil { return nil, err } @@ -179,11 +147,10 @@ func parseJSON(data []byte) (module *File, err error) { } // parse struct types - typesNode := jsonRoot.Map(objTypes) + typesNode := jsonRoot.Map(fileTypes) module.StructTypes = make([]StructType, 0) for i := 0; i < typesNode.Len(); i++ { - typNode := typesNode.At(i) - structyp, err := parseStruct(typNode) + structyp, err := parseStruct(typesNode.At(i)) if err != nil { return nil, err } @@ -194,11 +161,10 @@ func parseJSON(data []byte) (module *File, err error) { } // parse union types - unionsNode := jsonRoot.Map(objUnions) + unionsNode := jsonRoot.Map(fileUnions) module.UnionTypes = make([]UnionType, 0) for i := 0; i < unionsNode.Len(); i++ { - unionNode := unionsNode.At(i) - union, err := parseUnion(unionNode) + union, err := parseUnion(unionsNode.At(i)) if err != nil { return nil, err } @@ -209,12 +175,11 @@ func parseJSON(data []byte) (module *File, err error) { } // parse messages - messagesNode := jsonRoot.Map(objMessages) + messagesNode := jsonRoot.Map(fileMessages) if messagesNode.GetType() == jsongo.TypeArray { module.Messages = make([]Message, messagesNode.Len()) for i := 0; i < messagesNode.Len(); i++ { - msgNode := messagesNode.At(i) - msg, err := parseMessage(msgNode) + msg, err := parseMessage(messagesNode.At(i)) if err != nil { return nil, err } @@ -223,15 +188,14 @@ func parseJSON(data []byte) (module *File, err error) { } // parse services - servicesNode := jsonRoot.Map(objServices) + servicesNode := jsonRoot.Map(fileServices) if servicesNode.GetType() == jsongo.TypeMap { module.Service = &Service{ RPCs: make([]RPC, servicesNode.Len()), } for i, key := range servicesNode.GetKeys() { - rpcNode := servicesNode.At(key) rpcName := key.(string) - svc, err := parseServiceRPC(rpcName, rpcNode) + svc, err := parseServiceRPC(rpcName, servicesNode.At(key)) if err != nil { return nil, err } @@ -242,20 +206,6 @@ func parseJSON(data []byte) (module *File, err error) { return module, nil } -// parseImport parses VPP binary API import from JSON node -func parseImport(importNode *jsongo.Node) (*string, error) { - if importNode.GetType() != jsongo.TypeValue { - return nil, errors.New("invalid JSON for import specified") - } - - importName, ok := importNode.Get().(string) - if !ok { - return nil, fmt.Errorf("import name is %T, not a string", importNode.Get()) - } - - return &importName, nil -} - // parseEnum parses VPP binary API enum object from JSON node func parseEnum(enumNode *jsongo.Node) (*EnumType, error) { if enumNode.Len() == 0 || enumNode.At(0).GetType() != jsongo.TypeValue { @@ -266,7 +216,7 @@ func parseEnum(enumNode *jsongo.Node) (*EnumType, error) { if !ok { return nil, fmt.Errorf("enum name is %T, not a string", enumNode.At(0).Get()) } - enumType, ok := enumNode.At(enumNode.Len() - 1).At("enumtype").Get().(string) + enumType, ok := enumNode.At(enumNode.Len() - 1).At(enumType).Get().(string) if !ok { return nil, fmt.Errorf("enum type invalid or missing") } @@ -367,7 +317,7 @@ func parseStruct(typeNode *jsongo.Node) (*StructType, error) { // parseAlias parses VPP binary API alias object from JSON node func parseAlias(aliasName string, aliasNode *jsongo.Node) (*AliasType, error) { - if aliasNode.Len() == 0 || aliasNode.At(aliasFieldType).GetType() != jsongo.TypeValue { + if aliasNode.Len() == 0 || aliasNode.At(aliasType).GetType() != jsongo.TypeValue { return nil, errors.New("invalid JSON for alias specified") } @@ -375,7 +325,7 @@ func parseAlias(aliasName string, aliasNode *jsongo.Node) (*AliasType, error) { Name: aliasName, } - if typeNode := aliasNode.At(aliasFieldType); typeNode.GetType() == jsongo.TypeValue { + if typeNode := aliasNode.At(aliasType); typeNode.GetType() == jsongo.TypeValue { typ, ok := typeNode.Get().(string) if !ok { return nil, fmt.Errorf("alias type is %T, not a string", typeNode.Get()) @@ -385,7 +335,7 @@ func parseAlias(aliasName string, aliasNode *jsongo.Node) (*AliasType, error) { } } - if lengthNode := aliasNode.At(aliasFieldLength); lengthNode.GetType() == jsongo.TypeValue { + if lengthNode := aliasNode.At(aliasLength); lengthNode.GetType() == jsongo.TypeValue { length, ok := lengthNode.Get().(float64) if !ok { return nil, fmt.Errorf("alias length is %T, not a float64", lengthNode.Get()) @@ -398,7 +348,7 @@ func parseAlias(aliasName string, aliasNode *jsongo.Node) (*AliasType, error) { // parseMessage parses VPP binary API message object from JSON node func parseMessage(msgNode *jsongo.Node) (*Message, error) { - if msgNode.Len() == 0 || msgNode.At(0).GetType() != jsongo.TypeValue { + if msgNode.Len() < 2 || msgNode.At(0).GetType() != jsongo.TypeValue { return nil, errors.New("invalid JSON for message specified") } @@ -406,9 +356,8 @@ func parseMessage(msgNode *jsongo.Node) (*Message, error) { if !ok { return nil, fmt.Errorf("message name is %T, not a string", msgNode.At(0).Get()) } - msgCRC, ok := msgNode.At(msgNode.Len() - 1).At(messageFieldCrc).Get().(string) + msgCRC, ok := msgNode.At(msgNode.Len() - 1).At(messageCrc).Get().(string) if !ok { - return nil, fmt.Errorf("message crc invalid or missing") } @@ -466,26 +415,16 @@ func parseField(field *jsongo.Node) (*Field, error) { case jsongo.TypeMap: fieldMeta := field.At(2) - + if fieldMeta.Len() == 0 { + break + } + f.Meta = map[string]interface{}{} for _, key := range fieldMeta.GetKeys() { - metaNode := fieldMeta.At(key) metaName := key.(string) - metaValue := metaNode.Get() - - switch metaName { - case fieldOptionLimit: - metaValue = int(metaNode.Get().(float64)) - case fieldOptionDefault: - metaValue = metaNode.Get() - default: - logrus.Warnf("unknown meta info (%s=%v) for field (%s)", metaName, metaValue, fieldName) - } - - if f.Meta == nil { - f.Meta = map[string]interface{}{} - } + metaValue := fieldMeta.At(key).Get() f.Meta[metaName] = metaValue } + default: return nil, errors.New("invalid JSON for field specified") } @@ -503,27 +442,24 @@ func parseField(field *jsongo.Node) (*Field, error) { // parseServiceRPC parses VPP binary API service object from JSON node func parseServiceRPC(rpcName string, rpcNode *jsongo.Node) (*RPC, error) { - if rpcNode.Len() == 0 || rpcNode.At(serviceFieldReply).GetType() != jsongo.TypeValue { + if rpcNode.Len() == 0 || rpcNode.At(serviceReply).GetType() != jsongo.TypeValue { return nil, errors.New("invalid JSON for service RPC specified") } rpc := RPC{ - Name: rpcName, - RequestMsg: rpcName, + Request: rpcName, } - if replyNode := rpcNode.At(serviceFieldReply); replyNode.GetType() == jsongo.TypeValue { + if replyNode := rpcNode.At(serviceReply); replyNode.GetType() == jsongo.TypeValue { reply, ok := replyNode.Get().(string) if !ok { return nil, fmt.Errorf("service RPC reply is %T, not a string", replyNode.Get()) } - if reply != serviceReplyNull { - rpc.ReplyMsg = reply - } + rpc.Reply = reply } // is stream (dump) - if streamNode := rpcNode.At(serviceFieldStream); streamNode.GetType() == jsongo.TypeValue { + if streamNode := rpcNode.At(serviceStream); streamNode.GetType() == jsongo.TypeValue { var ok bool rpc.Stream, ok = streamNode.Get().(bool) if !ok { @@ -532,7 +468,7 @@ func parseServiceRPC(rpcName string, rpcNode *jsongo.Node) (*RPC, error) { } // stream message - if streamMsgNode := rpcNode.At(serviceFieldStreamMsg); streamMsgNode.GetType() == jsongo.TypeValue { + if streamMsgNode := rpcNode.At(serviceStreamMsg); streamMsgNode.GetType() == jsongo.TypeValue { var ok bool rpc.StreamMsg, ok = streamMsgNode.Get().(string) if !ok { @@ -541,7 +477,7 @@ func parseServiceRPC(rpcName string, rpcNode *jsongo.Node) (*RPC, error) { } // events service (event subscription) - if eventsNode := rpcNode.At(serviceFieldEvents); eventsNode.GetType() == jsongo.TypeArray { + if eventsNode := rpcNode.At(serviceEvents); eventsNode.GetType() == jsongo.TypeArray { for j := 0; j < eventsNode.Len(); j++ { event := eventsNode.At(j).Get().(string) rpc.Events = append(rpc.Events, event) diff --git a/binapigen/vppapi/util.go b/binapigen/vppapi/util.go new file mode 100644 index 0000000..87f2e55 --- /dev/null +++ b/binapigen/vppapi/util.go @@ -0,0 +1,112 @@ +// Copyright (c) 2020 Cisco and/or its affiliates. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vppapi + +import ( + "fmt" + "io/ioutil" + "os" + "os/exec" + "path" + "strings" + + "github.com/sirupsen/logrus" +) + +const ( + VPPVersionEnvVar = "VPP_VERSION" +) + +// ResolveVPPVersion resolves version of the VPP for target directory. +// +// Version resolved here can be overriden by setting VPP_VERSION env var. +func ResolveVPPVersion(apidir string) string { + // check env variable override + if ver := os.Getenv(VPPVersionEnvVar); ver != "" { + logrus.Debugf("VPP version was manually set to %q via %s env var", ver, VPPVersionEnvVar) + return ver + } + + // assuming VPP package is installed + if path.Clean(apidir) == DefaultDir { + version, err := GetVPPVersionInstalled() + if err != nil { + logrus.Warnf("resolving VPP version from installed package failed: %v", err) + } else { + logrus.Debugf("resolved VPP version from installed package: %v", version) + return version + } + } + + // check if inside VPP repo + repoDir, err := findGitRepoRootDir(apidir) + if err != nil { + logrus.Warnf("checking VPP git repo failed: %v", err) + } else { + logrus.Debugf("resolved git repo root directory: %v", repoDir) + version, err := GetVPPVersionRepo(repoDir) + if err != nil { + logrus.Warnf("resolving VPP version from version script failed: %v", err) + } else { + logrus.Debugf("resolved VPP version from version script: %v", version) + return version + } + } + + // try to read VPP_VERSION file + data, err := ioutil.ReadFile(path.Join(repoDir, "VPP_VERSION")) + if err == nil { + return strings.TrimSpace(string(data)) + } + + logrus.Warnf("VPP version could not be resolved, you can set it manually using %s env var", VPPVersionEnvVar) + return "" +} + +// GetVPPVersionInstalled retrieves VPP version of installed package using dpkg-query. +func GetVPPVersionInstalled() (string, error) { + cmd := exec.Command("dpkg-query", "-f", "${Version}", "-W", "vpp") + out, err := cmd.CombinedOutput() + if err != nil { + return "", fmt.Errorf("dpkg-query command failed: %v\noutput: %s", err, out) + } + return strings.TrimSpace(string(out)), nil +} + +const versionScriptPath = "./src/scripts/version" + +// GetVPPVersionRepo retrieves VPP version using script in repo directory. +func GetVPPVersionRepo(repoDir string) (string, error) { + if _, err := os.Stat(versionScriptPath); err != nil { + return "", err + } + cmd := exec.Command(versionScriptPath) + cmd.Dir = repoDir + out, err := cmd.CombinedOutput() + if err != nil { + return "", fmt.Errorf("version script failed: %v\noutput: %s", err, out) + } + return strings.TrimSpace(string(out)), nil +} + +func findGitRepoRootDir(dir string) (string, error) { + cmd := exec.Command("git", "rev-parse", "--show-toplevel") + cmd.Dir = dir + out, err := cmd.CombinedOutput() + if err != nil { + return "", fmt.Errorf("git command failed: %v\noutput: %s", err, out) + } + return strings.TrimSpace(string(out)), nil +} diff --git a/binapigen/vppapi/parser.go b/binapigen/vppapi/vppapi.go index 312dd0e..665fa81 100644 --- a/binapigen/vppapi/parser.go +++ b/binapigen/vppapi/vppapi.go @@ -19,18 +19,15 @@ import ( "io/ioutil" "path/filepath" "strings" - - "github.com/sirupsen/logrus" ) const ( - DefaultAPIDir = "/usr/share/vpp/api" + // DefaultDir is default location of API files. + DefaultDir = "/usr/share/vpp/api" ) -const apifileSuffixJson = ".api.json" - -// FindFiles returns all input files located in specified directory -func FindFiles(dir string, deep int) (paths []string, err error) { +// FindFiles finds API files located in dir or in a nested directory that is not nested deeper than deep. +func FindFiles(dir string, deep int) (files []string, err error) { entries, err := ioutil.ReadDir(dir) if err != nil { return nil, fmt.Errorf("reading directory %s failed: %v", dir, err) @@ -41,43 +38,44 @@ func FindFiles(dir string, deep int) (paths []string, err error) { if nested, err := FindFiles(nestedDir, deep-1); err != nil { return nil, err } else { - paths = append(paths, nested...) + files = append(files, nested...) } - } else if strings.HasSuffix(e.Name(), apifileSuffixJson) { - paths = append(paths, filepath.Join(dir, e.Name())) + } else if !e.IsDir() && strings.HasSuffix(e.Name(), ".api.json") { + files = append(files, filepath.Join(dir, e.Name())) } } - return paths, nil + return files, nil } +// Parse parses API files in directory DefaultDir. func Parse() ([]*File, error) { - return ParseDir(DefaultAPIDir) + return ParseDir(DefaultDir) } +// ParseDir finds and parses API files in given directory and returns parsed files. +// Supports API files in JSON format (.api.json) only. func ParseDir(apidir string) ([]*File, error) { - files, err := FindFiles(apidir, 1) + list, err := FindFiles(apidir, 1) if err != nil { return nil, err } - logrus.Infof("found %d files in API dir %q", len(files), apidir) - - var modules []*File + logf("found %d files in API dir %q", len(list), apidir) - for _, file := range files { + var files []*File + for _, file := range list { module, err := ParseFile(file) if err != nil { return nil, err } - modules = append(modules, module) + files = append(files, module) } - - return modules, nil + return files, nil } -// ParseFile parses API file contents and returns File. +// ParseFile parses API file and returns File. func ParseFile(apifile string) (*File, error) { - if !strings.HasSuffix(apifile, apifileSuffixJson) { + if !strings.HasSuffix(apifile, ".api.json") { return nil, fmt.Errorf("unsupported file format: %q", apifile) } @@ -101,7 +99,14 @@ func ParseFile(apifile string) (*File, error) { return module, nil } +// ParseRaw parses raw API file data and returns File. func ParseRaw(data []byte) (file *File, err error) { + defer func() { + if e := recover(); e != nil { + err = fmt.Errorf("panic occurred: %v", e) + } + }() + file, err = parseJSON(data) if err != nil { return nil, err diff --git a/binapigen/vppapi/parser_test.go b/binapigen/vppapi/vppapi_test.go index 2dc82e4..027cc1f 100644 --- a/binapigen/vppapi/parser_test.go +++ b/binapigen/vppapi/vppapi_test.go @@ -46,7 +46,7 @@ func TestReadJson(t *testing.T) { inputData, err := ioutil.ReadFile("testdata/af_packet.api.json") Expect(err).ShouldNot(HaveOccurred()) - result, err := parseJSON(inputData) + result, err := ParseRaw(inputData) Expect(err).ShouldNot(HaveOccurred()) Expect(result).ToNot(BeNil()) Expect(result.EnumTypes).To(HaveLen(0)) @@ -60,7 +60,7 @@ func TestReadJsonError(t *testing.T) { inputData, err := ioutil.ReadFile("testdata/input-read-json-error.json") Expect(err).ShouldNot(HaveOccurred()) - result, err := parseJSON(inputData) + result, err := ParseRaw(inputData) Expect(err).Should(HaveOccurred()) Expect(result).To(BeNil()) } @@ -80,17 +80,21 @@ func TestParseFile(t *testing.T) { if module.Name != "vpe" { t.Errorf("expected Name=%s, got %v", "vpe", module.Name) } + if module.Path != "testdata/vpe.api.json" { + t.Errorf("expected Path=%s, got %v", "testdata/vpe.api.json", module.Path) + } if module.CRC != "0xbd2c94f4" { t.Errorf("expected CRC=%s, got %v", "0xbd2c94f4", module.CRC) } - if module.Version() != "1.6.1" { - t.Errorf("expected Version=%s, got %v", "1.6.1", module.Version()) + + if version := module.Options["version"]; version != "1.6.1" { + t.Errorf("expected option[version]=%s, got %v", "1.6.1", version) } if len(module.Imports) == 0 { t.Errorf("expected imports, got none") } - if len(module.Options) == 0 { - t.Errorf("expected options, got none") + if len(module.EnumTypes) == 0 { + t.Errorf("expected enums, got none") } if len(module.AliasTypes) == 0 { t.Errorf("expected aliases, got none") @@ -98,12 +102,12 @@ func TestParseFile(t *testing.T) { if len(module.StructTypes) == 0 { t.Errorf("expected types, got none") } - if len(module.Service.RPCs) == 0 { - t.Errorf("expected service method, got none") - } if len(module.Messages) == 0 { t.Errorf("expected messages, got none") } + if len(module.Service.RPCs) == 0 { + t.Errorf("expected service RPCs, got none") + } } func TestParseFileUnsupported(t *testing.T) { |