summaryrefslogtreecommitdiff
path: root/vendor/github.com/bufbuild/protocompile/parser
diff options
context:
space:
mode:
authormo khan <mo@mokhan.ca>2025-05-20 14:28:06 -0600
committermo khan <mo@mokhan.ca>2025-05-23 14:49:19 -0600
commit4beee46dc6c7642316e118a4d3aa51e4b407256e (patch)
tree039bdf57b99061844aeb0fe55ad0bc1c864166af /vendor/github.com/bufbuild/protocompile/parser
parent0ba49bfbde242920d8675a193d7af89420456fc0 (diff)
feat: add external authorization service (authzd) with JWT authentication
- Add new authzd gRPC service implementing Envoy's external authorization API - Integrate JWT authentication filter in Envoy configuration with claim extraction - Update middleware to support both cookie-based and header-based user authentication - Add comprehensive test coverage for authorization service and server - Configure proper service orchestration with authzd, sparkled, and Envoy - Update build system and Docker configuration for multi-service deployment - Add grpcurl tool for gRPC service debugging and testing This enables fine-grained authorization control through Envoy's ext_authz filter while maintaining backward compatibility with existing cookie-based authentication.
Diffstat (limited to 'vendor/github.com/bufbuild/protocompile/parser')
-rw-r--r--vendor/github.com/bufbuild/protocompile/parser/.gitignore1
-rw-r--r--vendor/github.com/bufbuild/protocompile/parser/ast.go144
-rw-r--r--vendor/github.com/bufbuild/protocompile/parser/clone.go183
-rw-r--r--vendor/github.com/bufbuild/protocompile/parser/doc.go25
-rw-r--r--vendor/github.com/bufbuild/protocompile/parser/errors.go22
-rw-r--r--vendor/github.com/bufbuild/protocompile/parser/lexer.go771
-rw-r--r--vendor/github.com/bufbuild/protocompile/parser/parser.go201
-rw-r--r--vendor/github.com/bufbuild/protocompile/parser/proto.y1498
-rw-r--r--vendor/github.com/bufbuild/protocompile/parser/proto.y.go2659
-rw-r--r--vendor/github.com/bufbuild/protocompile/parser/result.go1012
-rw-r--r--vendor/github.com/bufbuild/protocompile/parser/validate.go568
11 files changed, 7084 insertions, 0 deletions
diff --git a/vendor/github.com/bufbuild/protocompile/parser/.gitignore b/vendor/github.com/bufbuild/protocompile/parser/.gitignore
new file mode 100644
index 0000000..2652053
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/parser/.gitignore
@@ -0,0 +1 @@
+y.output
diff --git a/vendor/github.com/bufbuild/protocompile/parser/ast.go b/vendor/github.com/bufbuild/protocompile/parser/ast.go
new file mode 100644
index 0000000..f58f7ae
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/parser/ast.go
@@ -0,0 +1,144 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package parser
+
+import (
+ "github.com/bufbuild/protocompile/ast"
+)
+
+// the types below are accumulator types, just used in intermediate productions
+// to accumulate slices that will get stored in AST nodes
+
+type compactOptionSlices struct {
+ options []*ast.OptionNode
+ commas []*ast.RuneNode
+}
+
+func toStringValueNode(strs []*ast.StringLiteralNode) ast.StringValueNode {
+ if len(strs) == 1 {
+ return strs[0]
+ }
+ return ast.NewCompoundLiteralStringNode(strs...)
+}
+
+type nameSlices struct {
+ // only names or idents will be set, never both
+ names []ast.StringValueNode
+ idents []*ast.IdentNode
+ commas []*ast.RuneNode
+}
+
+type rangeSlices struct {
+ ranges []*ast.RangeNode
+ commas []*ast.RuneNode
+}
+
+type valueSlices struct {
+ vals []ast.ValueNode
+ commas []*ast.RuneNode
+}
+
+type fieldRefSlices struct {
+ refs []*ast.FieldReferenceNode
+ dots []*ast.RuneNode
+}
+
+type identSlices struct {
+ idents []*ast.IdentNode
+ dots []*ast.RuneNode
+}
+
+func (s *identSlices) toIdentValueNode(leadingDot *ast.RuneNode) ast.IdentValueNode {
+ if len(s.idents) == 1 && leadingDot == nil {
+ // single simple name
+ return s.idents[0]
+ }
+ return ast.NewCompoundIdentNode(leadingDot, s.idents, s.dots)
+}
+
+type messageFieldList struct {
+ field *ast.MessageFieldNode
+ delimiter *ast.RuneNode
+ next *messageFieldList
+}
+
+func (list *messageFieldList) toNodes() ([]*ast.MessageFieldNode, []*ast.RuneNode) {
+ if list == nil {
+ return nil, nil
+ }
+ l := 0
+ for cur := list; cur != nil; cur = cur.next {
+ l++
+ }
+ fields := make([]*ast.MessageFieldNode, l)
+ delimiters := make([]*ast.RuneNode, l)
+ for cur, i := list, 0; cur != nil; cur, i = cur.next, i+1 {
+ fields[i] = cur.field
+ if cur.delimiter != nil {
+ delimiters[i] = cur.delimiter
+ }
+ }
+ return fields, delimiters
+}
+
+func prependRunes[T ast.Node](convert func(*ast.RuneNode) T, runes []*ast.RuneNode, elements []T) []T {
+ elems := make([]T, 0, len(runes)+len(elements))
+ for _, rune := range runes {
+ elems = append(elems, convert(rune))
+ }
+ elems = append(elems, elements...)
+ return elems
+}
+
+func toServiceElement(semi *ast.RuneNode) ast.ServiceElement {
+ return ast.NewEmptyDeclNode(semi)
+}
+
+func toMethodElement(semi *ast.RuneNode) ast.RPCElement {
+ return ast.NewEmptyDeclNode(semi)
+}
+
+func toFileElement(semi *ast.RuneNode) ast.FileElement {
+ return ast.NewEmptyDeclNode(semi)
+}
+
+func toEnumElement(semi *ast.RuneNode) ast.EnumElement {
+ return ast.NewEmptyDeclNode(semi)
+}
+
+func toMessageElement(semi *ast.RuneNode) ast.MessageElement {
+ return ast.NewEmptyDeclNode(semi)
+}
+
+type nodeWithRunes[T ast.Node] struct {
+ Node T
+ Runes []*ast.RuneNode
+}
+
+func newNodeWithRunes[T ast.Node](node T, trailingRunes ...*ast.RuneNode) nodeWithRunes[T] {
+ return nodeWithRunes[T]{
+ Node: node,
+ Runes: trailingRunes,
+ }
+}
+
+func toElements[T ast.Node](convert func(*ast.RuneNode) T, node T, runes []*ast.RuneNode) []T {
+ elements := make([]T, 1+len(runes))
+ elements[0] = node
+ for i, rune := range runes {
+ elements[i+1] = convert(rune)
+ }
+ return elements
+}
diff --git a/vendor/github.com/bufbuild/protocompile/parser/clone.go b/vendor/github.com/bufbuild/protocompile/parser/clone.go
new file mode 100644
index 0000000..0432248
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/parser/clone.go
@@ -0,0 +1,183 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package parser
+
+import (
+ "google.golang.org/protobuf/proto"
+ "google.golang.org/protobuf/types/descriptorpb"
+
+ "github.com/bufbuild/protocompile/ast"
+ "github.com/bufbuild/protocompile/reporter"
+)
+
+// Clone returns a copy of the given result. Since descriptor protos may be
+// mutated during linking, this can return a defensive copy so that mutations
+// don't impact concurrent operations in an unsafe way. This is called if the
+// parse result could be re-used across concurrent operations and has unresolved
+// references and options which will require mutation by the linker.
+//
+// If the given value has a method with the following signature, it will be
+// called to perform the operation:
+//
+// Clone() Result
+//
+// If the given value does not provide a Clone method and is not the implementation
+// provided by this package, it is possible for an error to occur in creating the
+// copy, which may result in a panic. This can happen if the AST of the given result
+// is not actually valid and a file descriptor proto cannot be successfully derived
+// from it.
+func Clone(r Result) Result {
+ if cl, ok := r.(interface{ Clone() Result }); ok {
+ return cl.Clone()
+ }
+ if res, ok := r.(*result); ok {
+ newProto := proto.Clone(res.proto).(*descriptorpb.FileDescriptorProto) //nolint:errcheck
+ newNodes := make(map[proto.Message]ast.Node, len(res.nodes))
+ newResult := &result{
+ file: res.file,
+ proto: newProto,
+ nodes: newNodes,
+ }
+ recreateNodeIndexForFile(res, newResult, res.proto, newProto)
+ return newResult
+ }
+
+ // Can't do the deep-copy we know how to do. So we have to take a
+ // different tactic.
+ if r.AST() == nil {
+ // no AST? all we have to do is copy the proto
+ fileProto := proto.Clone(r.FileDescriptorProto()).(*descriptorpb.FileDescriptorProto) //nolint:errcheck
+ return ResultWithoutAST(fileProto)
+ }
+ // Otherwise, we have an AST, but no way to clone the result's
+ // internals. So just re-create them from scratch.
+ res, err := ResultFromAST(r.AST(), false, reporter.NewHandler(nil))
+ if err != nil {
+ panic(err)
+ }
+ return res
+}
+
+func recreateNodeIndexForFile(orig, clone *result, origProto, cloneProto *descriptorpb.FileDescriptorProto) {
+ updateNodeIndexWithOptions[*descriptorpb.FileOptions](orig, clone, origProto, cloneProto)
+ for i, origMd := range origProto.MessageType {
+ cloneMd := cloneProto.MessageType[i]
+ recreateNodeIndexForMessage(orig, clone, origMd, cloneMd)
+ }
+ for i, origEd := range origProto.EnumType {
+ cloneEd := cloneProto.EnumType[i]
+ recreateNodeIndexForEnum(orig, clone, origEd, cloneEd)
+ }
+ for i, origExtd := range origProto.Extension {
+ cloneExtd := cloneProto.Extension[i]
+ updateNodeIndexWithOptions[*descriptorpb.FieldOptions](orig, clone, origExtd, cloneExtd)
+ }
+ for i, origSd := range origProto.Service {
+ cloneSd := cloneProto.Service[i]
+ updateNodeIndexWithOptions[*descriptorpb.ServiceOptions](orig, clone, origSd, cloneSd)
+ for j, origMtd := range origSd.Method {
+ cloneMtd := cloneSd.Method[j]
+ updateNodeIndexWithOptions[*descriptorpb.MethodOptions](orig, clone, origMtd, cloneMtd)
+ }
+ }
+}
+
+func recreateNodeIndexForMessage(orig, clone *result, origProto, cloneProto *descriptorpb.DescriptorProto) {
+ updateNodeIndexWithOptions[*descriptorpb.MessageOptions](orig, clone, origProto, cloneProto)
+ for i, origFld := range origProto.Field {
+ cloneFld := cloneProto.Field[i]
+ updateNodeIndexWithOptions[*descriptorpb.FieldOptions](orig, clone, origFld, cloneFld)
+ }
+ for i, origOod := range origProto.OneofDecl {
+ cloneOod := cloneProto.OneofDecl[i]
+ updateNodeIndexWithOptions[*descriptorpb.OneofOptions](orig, clone, origOod, cloneOod)
+ }
+ for i, origExtr := range origProto.ExtensionRange {
+ cloneExtr := cloneProto.ExtensionRange[i]
+ updateNodeIndex(orig, clone, asExtsNode(origExtr), asExtsNode(cloneExtr))
+ updateNodeIndexWithOptions[*descriptorpb.ExtensionRangeOptions](orig, clone, origExtr, cloneExtr)
+ }
+ for i, origRr := range origProto.ReservedRange {
+ cloneRr := cloneProto.ReservedRange[i]
+ updateNodeIndex(orig, clone, origRr, cloneRr)
+ }
+ for i, origNmd := range origProto.NestedType {
+ cloneNmd := cloneProto.NestedType[i]
+ recreateNodeIndexForMessage(orig, clone, origNmd, cloneNmd)
+ }
+ for i, origEd := range origProto.EnumType {
+ cloneEd := cloneProto.EnumType[i]
+ recreateNodeIndexForEnum(orig, clone, origEd, cloneEd)
+ }
+ for i, origExtd := range origProto.Extension {
+ cloneExtd := cloneProto.Extension[i]
+ updateNodeIndexWithOptions[*descriptorpb.FieldOptions](orig, clone, origExtd, cloneExtd)
+ }
+}
+
+func recreateNodeIndexForEnum(orig, clone *result, origProto, cloneProto *descriptorpb.EnumDescriptorProto) {
+ updateNodeIndexWithOptions[*descriptorpb.EnumOptions](orig, clone, origProto, cloneProto)
+ for i, origEvd := range origProto.Value {
+ cloneEvd := cloneProto.Value[i]
+ updateNodeIndexWithOptions[*descriptorpb.EnumValueOptions](orig, clone, origEvd, cloneEvd)
+ }
+ for i, origRr := range origProto.ReservedRange {
+ cloneRr := cloneProto.ReservedRange[i]
+ updateNodeIndex(orig, clone, origRr, cloneRr)
+ }
+}
+
+func recreateNodeIndexForOptions(orig, clone *result, origProtos, cloneProtos []*descriptorpb.UninterpretedOption) {
+ for i, origOpt := range origProtos {
+ cloneOpt := cloneProtos[i]
+ updateNodeIndex(orig, clone, origOpt, cloneOpt)
+ for j, origName := range origOpt.Name {
+ cloneName := cloneOpt.Name[j]
+ updateNodeIndex(orig, clone, origName, cloneName)
+ }
+ }
+}
+
+func updateNodeIndex[M proto.Message](orig, clone *result, origProto, cloneProto M) {
+ node := orig.nodes[origProto]
+ if node != nil {
+ clone.nodes[cloneProto] = node
+ }
+}
+
+type pointerMessage[T any] interface {
+ *T
+ proto.Message
+}
+
+type options[T any] interface {
+ // need this type instead of just proto.Message so we can check for nil pointer
+ pointerMessage[T]
+ GetUninterpretedOption() []*descriptorpb.UninterpretedOption
+}
+
+type withOptions[O options[T], T any] interface {
+ proto.Message
+ GetOptions() O
+}
+
+func updateNodeIndexWithOptions[O options[T], M withOptions[O, T], T any](orig, clone *result, origProto, cloneProto M) {
+ updateNodeIndex(orig, clone, origProto, cloneProto)
+ origOpts := origProto.GetOptions()
+ cloneOpts := cloneProto.GetOptions()
+ if origOpts != nil {
+ recreateNodeIndexForOptions(orig, clone, origOpts.GetUninterpretedOption(), cloneOpts.GetUninterpretedOption())
+ }
+}
diff --git a/vendor/github.com/bufbuild/protocompile/parser/doc.go b/vendor/github.com/bufbuild/protocompile/parser/doc.go
new file mode 100644
index 0000000..4055554
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/parser/doc.go
@@ -0,0 +1,25 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package parser contains the logic for parsing protobuf source code into an
+// AST (abstract syntax tree) and also for converting an AST into a descriptor
+// proto.
+//
+// A FileDescriptorProto is very similar to an AST, but the AST this package
+// uses is more useful because it contains more information about the source
+// code, including details about whitespace and comments, that cannot be
+// represented by a descriptor proto. This makes it ideal for things like
+// code formatters, which may want to preserve things like whitespace and
+// comment format.
+package parser
diff --git a/vendor/github.com/bufbuild/protocompile/parser/errors.go b/vendor/github.com/bufbuild/protocompile/parser/errors.go
new file mode 100644
index 0000000..e78bdda
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/parser/errors.go
@@ -0,0 +1,22 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package parser
+
+import "errors"
+
+// ErrNoSyntax is a sentinel error that may be passed to a warning reporter.
+// The error the reporter receives will be wrapped with source position that
+// indicates the file that had no syntax statement.
+var ErrNoSyntax = errors.New("no syntax specified; defaulting to proto2 syntax")
diff --git a/vendor/github.com/bufbuild/protocompile/parser/lexer.go b/vendor/github.com/bufbuild/protocompile/parser/lexer.go
new file mode 100644
index 0000000..71cbc7a
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/parser/lexer.go
@@ -0,0 +1,771 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package parser
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "math"
+ "strconv"
+ "strings"
+ "unicode/utf8"
+
+ "github.com/bufbuild/protocompile/ast"
+ "github.com/bufbuild/protocompile/reporter"
+)
+
+type runeReader struct {
+ data []byte
+ pos int
+ err error
+ mark int
+ // Enable this check to make input required to be valid UTF-8.
+ // For now, since protoc allows invalid UTF-8, default to false.
+ utf8Strict bool
+}
+
+func (rr *runeReader) readRune() (r rune, size int, err error) {
+ if rr.err != nil {
+ return 0, 0, rr.err
+ }
+ if rr.pos == len(rr.data) {
+ rr.err = io.EOF
+ return 0, 0, rr.err
+ }
+ r, sz := utf8.DecodeRune(rr.data[rr.pos:])
+ if rr.utf8Strict && r == utf8.RuneError {
+ rr.err = fmt.Errorf("invalid UTF8 at offset %d: %x", rr.pos, rr.data[rr.pos])
+ return 0, 0, rr.err
+ }
+ rr.pos += sz
+ return r, sz, nil
+}
+
+func (rr *runeReader) offset() int {
+ return rr.pos
+}
+
+func (rr *runeReader) unreadRune(sz int) {
+ newPos := rr.pos - sz
+ if newPos < rr.mark {
+ panic("unread past mark")
+ }
+ rr.pos = newPos
+}
+
+func (rr *runeReader) setMark() {
+ rr.mark = rr.pos
+}
+
+func (rr *runeReader) getMark() string {
+ return string(rr.data[rr.mark:rr.pos])
+}
+
+type protoLex struct {
+ input *runeReader
+ info *ast.FileInfo
+ handler *reporter.Handler
+ res *ast.FileNode
+
+ prevSym ast.TerminalNode
+ prevOffset int
+ eof ast.Token
+
+ comments []ast.Token
+}
+
+var utf8Bom = []byte{0xEF, 0xBB, 0xBF}
+
+func newLexer(in io.Reader, filename string, handler *reporter.Handler) (*protoLex, error) {
+ br := bufio.NewReader(in)
+
+ // if file has UTF8 byte order marker preface, consume it
+ marker, err := br.Peek(3)
+ if err == nil && bytes.Equal(marker, utf8Bom) {
+ _, _ = br.Discard(3)
+ }
+
+ contents, err := io.ReadAll(br)
+ if err != nil {
+ return nil, err
+ }
+ return &protoLex{
+ input: &runeReader{data: contents},
+ info: ast.NewFileInfo(filename, contents),
+ handler: handler,
+ }, nil
+}
+
+var keywords = map[string]int{
+ "syntax": _SYNTAX,
+ "edition": _EDITION,
+ "import": _IMPORT,
+ "weak": _WEAK,
+ "public": _PUBLIC,
+ "package": _PACKAGE,
+ "option": _OPTION,
+ "true": _TRUE,
+ "false": _FALSE,
+ "inf": _INF,
+ "nan": _NAN,
+ "repeated": _REPEATED,
+ "optional": _OPTIONAL,
+ "required": _REQUIRED,
+ "double": _DOUBLE,
+ "float": _FLOAT,
+ "int32": _INT32,
+ "int64": _INT64,
+ "uint32": _UINT32,
+ "uint64": _UINT64,
+ "sint32": _SINT32,
+ "sint64": _SINT64,
+ "fixed32": _FIXED32,
+ "fixed64": _FIXED64,
+ "sfixed32": _SFIXED32,
+ "sfixed64": _SFIXED64,
+ "bool": _BOOL,
+ "string": _STRING,
+ "bytes": _BYTES,
+ "group": _GROUP,
+ "oneof": _ONEOF,
+ "map": _MAP,
+ "extensions": _EXTENSIONS,
+ "to": _TO,
+ "max": _MAX,
+ "reserved": _RESERVED,
+ "enum": _ENUM,
+ "message": _MESSAGE,
+ "extend": _EXTEND,
+ "service": _SERVICE,
+ "rpc": _RPC,
+ "stream": _STREAM,
+ "returns": _RETURNS,
+}
+
+func (l *protoLex) maybeNewLine(r rune) {
+ if r == '\n' {
+ l.info.AddLine(l.input.offset())
+ }
+}
+
+func (l *protoLex) prev() ast.SourcePos {
+ return l.info.SourcePos(l.prevOffset)
+}
+
+func (l *protoLex) Lex(lval *protoSymType) int {
+ if l.handler.ReporterError() != nil {
+ // if error reporter already returned non-nil error,
+ // we can skip the rest of the input
+ return 0
+ }
+
+ l.comments = nil
+
+ for {
+ l.input.setMark()
+
+ l.prevOffset = l.input.offset()
+ c, _, err := l.input.readRune()
+ if err == io.EOF {
+ // we're not actually returning a rune, but this will associate
+ // accumulated comments as a trailing comment on last symbol
+ // (if appropriate)
+ l.setRune(lval, 0)
+ l.eof = lval.b.Token()
+ return 0
+ }
+ if err != nil {
+ l.setError(lval, err)
+ return _ERROR
+ }
+
+ if strings.ContainsRune("\n\r\t\f\v ", c) {
+ // skip whitespace
+ l.maybeNewLine(c)
+ continue
+ }
+
+ if c == '.' {
+ // decimal literals could start with a dot
+ cn, szn, err := l.input.readRune()
+ if err != nil {
+ l.setRune(lval, c)
+ return int(c)
+ }
+ if cn >= '0' && cn <= '9' {
+ l.readNumber()
+ token := l.input.getMark()
+ f, err := parseFloat(token)
+ if err != nil {
+ l.setError(lval, numError(err, "float", token))
+ return _ERROR
+ }
+ l.setFloat(lval, f)
+ return _FLOAT_LIT
+ }
+ l.input.unreadRune(szn)
+ l.setRune(lval, c)
+ return int(c)
+ }
+
+ if c == '_' || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') {
+ // identifier
+ l.readIdentifier()
+ str := l.input.getMark()
+ if t, ok := keywords[str]; ok {
+ l.setIdent(lval, str)
+ return t
+ }
+ l.setIdent(lval, str)
+ return _NAME
+ }
+
+ if c >= '0' && c <= '9' {
+ // integer or float literal
+ l.readNumber()
+ token := l.input.getMark()
+ if strings.HasPrefix(token, "0x") || strings.HasPrefix(token, "0X") {
+ // hexadecimal
+ ui, err := strconv.ParseUint(token[2:], 16, 64)
+ if err != nil {
+ l.setError(lval, numError(err, "hexadecimal integer", token[2:]))
+ return _ERROR
+ }
+ l.setInt(lval, ui)
+ return _INT_LIT
+ }
+ if strings.ContainsAny(token, ".eE") {
+ // floating point!
+ f, err := parseFloat(token)
+ if err != nil {
+ l.setError(lval, numError(err, "float", token))
+ return _ERROR
+ }
+ l.setFloat(lval, f)
+ return _FLOAT_LIT
+ }
+ // integer! (decimal or octal)
+ base := 10
+ if token[0] == '0' {
+ base = 8
+ }
+ ui, err := strconv.ParseUint(token, base, 64)
+ if err != nil {
+ kind := "integer"
+ if base == 8 {
+ kind = "octal integer"
+ } else if numErr, ok := err.(*strconv.NumError); ok && numErr.Err == strconv.ErrRange {
+ // if it's too big to be an int, parse it as a float
+ var f float64
+ kind = "float"
+ f, err = parseFloat(token)
+ if err == nil {
+ l.setFloat(lval, f)
+ return _FLOAT_LIT
+ }
+ }
+ l.setError(lval, numError(err, kind, token))
+ return _ERROR
+ }
+ l.setInt(lval, ui)
+ return _INT_LIT
+ }
+
+ if c == '\'' || c == '"' {
+ // string literal
+ str, err := l.readStringLiteral(c)
+ if err != nil {
+ l.setError(lval, err)
+ return _ERROR
+ }
+ l.setString(lval, str)
+ return _STRING_LIT
+ }
+
+ if c == '/' {
+ // comment
+ cn, szn, err := l.input.readRune()
+ if err != nil {
+ l.setRune(lval, '/')
+ return int(c)
+ }
+ if cn == '/' {
+ if hasErr := l.skipToEndOfLineComment(lval); hasErr {
+ return _ERROR
+ }
+ l.comments = append(l.comments, l.newToken())
+ continue
+ }
+ if cn == '*' {
+ ok, hasErr := l.skipToEndOfBlockComment(lval)
+ if hasErr {
+ return _ERROR
+ }
+ if !ok {
+ l.setError(lval, errors.New("block comment never terminates, unexpected EOF"))
+ return _ERROR
+ }
+ l.comments = append(l.comments, l.newToken())
+ continue
+ }
+ l.input.unreadRune(szn)
+ }
+
+ if c < 32 || c == 127 {
+ l.setError(lval, errors.New("invalid control character"))
+ return _ERROR
+ }
+ if !strings.ContainsRune(";,.:=-+(){}[]<>/", c) {
+ l.setError(lval, errors.New("invalid character"))
+ return _ERROR
+ }
+ l.setRune(lval, c)
+ return int(c)
+ }
+}
+
+func parseFloat(token string) (float64, error) {
+ // strconv.ParseFloat allows _ to separate digits, but protobuf does not
+ if strings.ContainsRune(token, '_') {
+ return 0, &strconv.NumError{
+ Func: "parseFloat",
+ Num: token,
+ Err: strconv.ErrSyntax,
+ }
+ }
+ f, err := strconv.ParseFloat(token, 64)
+ if err == nil {
+ return f, nil
+ }
+ if numErr, ok := err.(*strconv.NumError); ok && numErr.Err == strconv.ErrRange && math.IsInf(f, 1) {
+ // protoc doesn't complain about float overflow and instead just uses "infinity"
+ // so we mirror that behavior by just returning infinity and ignoring the error
+ return f, nil
+ }
+ return f, err
+}
+
+func (l *protoLex) newToken() ast.Token {
+ offset := l.input.mark
+ length := l.input.pos - l.input.mark
+ return l.info.AddToken(offset, length)
+}
+
+func (l *protoLex) setPrevAndAddComments(n ast.TerminalNode) {
+ comments := l.comments
+ l.comments = nil
+ var prevTrailingComments []ast.Token
+ if l.prevSym != nil && len(comments) > 0 {
+ prevEnd := l.info.NodeInfo(l.prevSym).End().Line
+ info := l.info.NodeInfo(n)
+ nStart := info.Start().Line
+ if nStart == prevEnd {
+ if rn, ok := n.(*ast.RuneNode); ok && rn.Rune == 0 {
+ // if current token is EOF, pretend its on separate line
+ // so that the logic below can attribute a final trailing
+ // comment to the previous token
+ nStart++
+ }
+ }
+ c := comments[0]
+ commentInfo := l.info.TokenInfo(c)
+ commentStart := commentInfo.Start().Line
+ if nStart > prevEnd && commentStart == prevEnd {
+ // Comment starts right after the previous token. If it's a
+ // line comment, we record that as a trailing comment.
+ //
+ // But if it's a block comment, it is only a trailing comment
+ // if there are multiple comments or if the block comment ends
+ // on a line before n.
+ canDonate := strings.HasPrefix(commentInfo.RawText(), "//") ||
+ len(comments) > 1 || commentInfo.End().Line < nStart
+
+ if canDonate {
+ prevTrailingComments = comments[:1]
+ comments = comments[1:]
+ }
+ }
+ }
+
+ // now we can associate comments
+ for _, c := range prevTrailingComments {
+ l.info.AddComment(c, l.prevSym.Token())
+ }
+ for _, c := range comments {
+ l.info.AddComment(c, n.Token())
+ }
+
+ l.prevSym = n
+}
+
+func (l *protoLex) setString(lval *protoSymType, val string) {
+ lval.s = ast.NewStringLiteralNode(val, l.newToken())
+ l.setPrevAndAddComments(lval.s)
+}
+
+func (l *protoLex) setIdent(lval *protoSymType, val string) {
+ lval.id = ast.NewIdentNode(val, l.newToken())
+ l.setPrevAndAddComments(lval.id)
+}
+
+func (l *protoLex) setInt(lval *protoSymType, val uint64) {
+ lval.i = ast.NewUintLiteralNode(val, l.newToken())
+ l.setPrevAndAddComments(lval.i)
+}
+
+func (l *protoLex) setFloat(lval *protoSymType, val float64) {
+ lval.f = ast.NewFloatLiteralNode(val, l.newToken())
+ l.setPrevAndAddComments(lval.f)
+}
+
+func (l *protoLex) setRune(lval *protoSymType, val rune) {
+ lval.b = ast.NewRuneNode(val, l.newToken())
+ l.setPrevAndAddComments(lval.b)
+}
+
+func (l *protoLex) setError(lval *protoSymType, err error) {
+ lval.err, _ = l.addSourceError(err)
+}
+
+func (l *protoLex) readNumber() {
+ allowExpSign := false
+ for {
+ c, sz, err := l.input.readRune()
+ if err != nil {
+ break
+ }
+ if (c == '-' || c == '+') && !allowExpSign {
+ l.input.unreadRune(sz)
+ break
+ }
+ allowExpSign = false
+ if c != '.' && c != '_' && (c < '0' || c > '9') &&
+ (c < 'a' || c > 'z') && (c < 'A' || c > 'Z') &&
+ c != '-' && c != '+' {
+ // no more chars in the number token
+ l.input.unreadRune(sz)
+ break
+ }
+ if c == 'e' || c == 'E' {
+ // scientific notation char can be followed by
+ // an exponent sign
+ allowExpSign = true
+ }
+ }
+}
+
+func numError(err error, kind, s string) error {
+ ne, ok := err.(*strconv.NumError)
+ if !ok {
+ return err
+ }
+ if ne.Err == strconv.ErrRange {
+ return fmt.Errorf("value out of range for %s: %s", kind, s)
+ }
+ // syntax error
+ return fmt.Errorf("invalid syntax in %s value: %s", kind, s)
+}
+
+func (l *protoLex) readIdentifier() {
+ for {
+ c, sz, err := l.input.readRune()
+ if err != nil {
+ break
+ }
+ if c != '_' && (c < 'a' || c > 'z') && (c < 'A' || c > 'Z') && (c < '0' || c > '9') {
+ l.input.unreadRune(sz)
+ break
+ }
+ }
+}
+
+func (l *protoLex) readStringLiteral(quote rune) (string, error) {
+ var buf bytes.Buffer
+ var escapeError reporter.ErrorWithPos
+ var noMoreErrors bool
+ reportErr := func(msg, badEscape string) {
+ if noMoreErrors {
+ return
+ }
+ if escapeError != nil {
+ // report previous one
+ _, ok := l.addSourceError(escapeError)
+ if !ok {
+ noMoreErrors = true
+ }
+ }
+ var err error
+ if strings.HasSuffix(msg, "%s") {
+ err = fmt.Errorf(msg, badEscape)
+ } else {
+ err = errors.New(msg)
+ }
+ // we've now consumed the bad escape and lexer position is after it, so we need
+ // to back up to the beginning of the escape to report the correct position
+ escapeError = l.errWithCurrentPos(err, -len(badEscape))
+ }
+ for {
+ c, _, err := l.input.readRune()
+ if err != nil {
+ if err == io.EOF {
+ err = io.ErrUnexpectedEOF
+ }
+ return "", err
+ }
+ if c == '\n' {
+ return "", errors.New("encountered end-of-line before end of string literal")
+ }
+ if c == quote {
+ break
+ }
+ if c == 0 {
+ reportErr("null character ('\\0') not allowed in string literal", string(rune(0)))
+ continue
+ }
+ if c == '\\' {
+ // escape sequence
+ c, _, err = l.input.readRune()
+ if err != nil {
+ return "", err
+ }
+ switch {
+ case c == 'x' || c == 'X':
+ // hex escape
+ c1, sz1, err := l.input.readRune()
+ if err != nil {
+ return "", err
+ }
+ if c1 == quote || c1 == '\\' {
+ l.input.unreadRune(sz1)
+ reportErr("invalid hex escape: %s", "\\"+string(c))
+ continue
+ }
+ c2, sz2, err := l.input.readRune()
+ if err != nil {
+ return "", err
+ }
+ var hex string
+ if (c2 < '0' || c2 > '9') && (c2 < 'a' || c2 > 'f') && (c2 < 'A' || c2 > 'F') {
+ l.input.unreadRune(sz2)
+ hex = string(c1)
+ } else {
+ hex = string([]rune{c1, c2})
+ }
+ i, err := strconv.ParseInt(hex, 16, 32)
+ if err != nil {
+ reportErr("invalid hex escape: %s", "\\"+string(c)+hex)
+ continue
+ }
+ buf.WriteByte(byte(i))
+ case c >= '0' && c <= '7':
+ // octal escape
+ c2, sz2, err := l.input.readRune()
+ if err != nil {
+ return "", err
+ }
+ var octal string
+ if c2 < '0' || c2 > '7' {
+ l.input.unreadRune(sz2)
+ octal = string(c)
+ } else {
+ c3, sz3, err := l.input.readRune()
+ if err != nil {
+ return "", err
+ }
+ if c3 < '0' || c3 > '7' {
+ l.input.unreadRune(sz3)
+ octal = string([]rune{c, c2})
+ } else {
+ octal = string([]rune{c, c2, c3})
+ }
+ }
+ i, err := strconv.ParseInt(octal, 8, 32)
+ if err != nil {
+ reportErr("invalid octal escape: %s", "\\"+octal)
+ continue
+ }
+ if i > 0xff {
+ reportErr("octal escape is out range, must be between 0 and 377: %s", "\\"+octal)
+ continue
+ }
+ buf.WriteByte(byte(i))
+ case c == 'u':
+ // short unicode escape
+ u := make([]rune, 4)
+ for i := range u {
+ c2, sz2, err := l.input.readRune()
+ if err != nil {
+ return "", err
+ }
+ if c2 == quote || c2 == '\\' {
+ l.input.unreadRune(sz2)
+ u = u[:i]
+ break
+ }
+ u[i] = c2
+ }
+ codepointStr := string(u)
+ if len(u) < 4 {
+ reportErr("invalid unicode escape: %s", "\\u"+codepointStr)
+ continue
+ }
+ i, err := strconv.ParseInt(codepointStr, 16, 32)
+ if err != nil {
+ reportErr("invalid unicode escape: %s", "\\u"+codepointStr)
+ continue
+ }
+ buf.WriteRune(rune(i))
+ case c == 'U':
+ // long unicode escape
+ u := make([]rune, 8)
+ for i := range u {
+ c2, sz2, err := l.input.readRune()
+ if err != nil {
+ return "", err
+ }
+ if c2 == quote || c2 == '\\' {
+ l.input.unreadRune(sz2)
+ u = u[:i]
+ break
+ }
+ u[i] = c2
+ }
+ codepointStr := string(u)
+ if len(u) < 8 {
+ reportErr("invalid unicode escape: %s", "\\U"+codepointStr)
+ continue
+ }
+ i, err := strconv.ParseInt(string(u), 16, 32)
+ if err != nil {
+ reportErr("invalid unicode escape: %s", "\\U"+codepointStr)
+ continue
+ }
+ if i > 0x10ffff || i < 0 {
+ reportErr("unicode escape is out of range, must be between 0 and 0x10ffff: %s", "\\U"+codepointStr)
+ continue
+ }
+ buf.WriteRune(rune(i))
+ case c == 'a':
+ buf.WriteByte('\a')
+ case c == 'b':
+ buf.WriteByte('\b')
+ case c == 'f':
+ buf.WriteByte('\f')
+ case c == 'n':
+ buf.WriteByte('\n')
+ case c == 'r':
+ buf.WriteByte('\r')
+ case c == 't':
+ buf.WriteByte('\t')
+ case c == 'v':
+ buf.WriteByte('\v')
+ case c == '\\':
+ buf.WriteByte('\\')
+ case c == '\'':
+ buf.WriteByte('\'')
+ case c == '"':
+ buf.WriteByte('"')
+ case c == '?':
+ buf.WriteByte('?')
+ default:
+ reportErr("invalid escape sequence: %s", "\\"+string(c))
+ continue
+ }
+ } else {
+ buf.WriteRune(c)
+ }
+ }
+ if escapeError != nil {
+ return "", escapeError
+ }
+ return buf.String(), nil
+}
+
+func (l *protoLex) skipToEndOfLineComment(lval *protoSymType) (hasErr bool) {
+ for {
+ c, sz, err := l.input.readRune()
+ if err != nil {
+ // eof
+ return false
+ }
+ switch c {
+ case '\n':
+ // don't include newline in the comment
+ l.input.unreadRune(sz)
+ return false
+ case 0:
+ l.setError(lval, errors.New("invalid control character"))
+ return true
+ }
+ }
+}
+
+func (l *protoLex) skipToEndOfBlockComment(lval *protoSymType) (ok, hasErr bool) {
+ for {
+ c, _, err := l.input.readRune()
+ if err != nil {
+ return false, false
+ }
+ if c == 0 {
+ l.setError(lval, errors.New("invalid control character"))
+ return false, true
+ }
+ l.maybeNewLine(c)
+ if c == '*' {
+ c, sz, err := l.input.readRune()
+ if err != nil {
+ return false, false
+ }
+ if c == '/' {
+ return true, false
+ }
+ l.input.unreadRune(sz)
+ }
+ }
+}
+
+func (l *protoLex) addSourceError(err error) (reporter.ErrorWithPos, bool) {
+ ewp, ok := err.(reporter.ErrorWithPos)
+ if !ok {
+ // TODO: Store the previous span instead of just the position.
+ ewp = reporter.Error(ast.NewSourceSpan(l.prev(), l.prev()), err)
+ }
+ handlerErr := l.handler.HandleError(ewp)
+ return ewp, handlerErr == nil
+}
+
+func (l *protoLex) Error(s string) {
+ _, _ = l.addSourceError(errors.New(s))
+}
+
+// TODO: Accept both a start and end offset, and use that to create a span.
+func (l *protoLex) errWithCurrentPos(err error, offset int) reporter.ErrorWithPos {
+ if ewp, ok := err.(reporter.ErrorWithPos); ok {
+ return ewp
+ }
+ pos := l.info.SourcePos(l.input.offset() + offset)
+ return reporter.Error(ast.NewSourceSpan(pos, pos), err)
+}
+
+func (l *protoLex) requireSemicolon(semicolons []*ast.RuneNode) (*ast.RuneNode, []*ast.RuneNode) {
+ if len(semicolons) == 0 {
+ l.Error("syntax error: expecting ';'")
+ return nil, nil
+ }
+ return semicolons[0], semicolons[1:]
+}
diff --git a/vendor/github.com/bufbuild/protocompile/parser/parser.go b/vendor/github.com/bufbuild/protocompile/parser/parser.go
new file mode 100644
index 0000000..21314d5
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/parser/parser.go
@@ -0,0 +1,201 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package parser
+
+import (
+ "fmt"
+ "io"
+
+ "google.golang.org/protobuf/proto"
+ "google.golang.org/protobuf/types/descriptorpb"
+
+ "github.com/bufbuild/protocompile/ast"
+ "github.com/bufbuild/protocompile/reporter"
+)
+
+// The path ../.tmp/bin/goyacc is built when using `make generate` from repo root.
+//go:generate ../.tmp/bin/goyacc -o proto.y.go -l -p proto proto.y
+
+func init() {
+ protoErrorVerbose = true
+
+ // fix up the generated "token name" array so that error messages are nicer
+ setTokenName(_STRING_LIT, "string literal")
+ setTokenName(_INT_LIT, "int literal")
+ setTokenName(_FLOAT_LIT, "float literal")
+ setTokenName(_NAME, "identifier")
+ setTokenName(_ERROR, "error")
+ // for keywords, just show the keyword itself wrapped in quotes
+ for str, i := range keywords {
+ setTokenName(i, fmt.Sprintf(`"%s"`, str))
+ }
+}
+
+func setTokenName(token int, text string) {
+ // NB: this is based on logic in generated parse code that translates the
+ // int returned from the lexer into an internal token number.
+ var intern int8
+ if token < len(protoTok1) {
+ intern = protoTok1[token]
+ } else {
+ if token >= protoPrivate {
+ if token < protoPrivate+len(protoTok2) {
+ intern = protoTok2[token-protoPrivate]
+ }
+ }
+ if intern == 0 {
+ for i := 0; i+1 < len(protoTok3); i += 2 {
+ if int(protoTok3[i]) == token {
+ intern = protoTok3[i+1]
+ break
+ }
+ }
+ }
+ }
+
+ if intern >= 1 && int(intern-1) < len(protoToknames) {
+ protoToknames[intern-1] = text
+ return
+ }
+
+ panic(fmt.Sprintf("Unknown token value: %d", token))
+}
+
+// Parse parses the given source code info and returns an AST. The given filename
+// is used to construct error messages and position information. The given reader
+// supplies the source code. The given handler is used to report errors and
+// warnings encountered while parsing. If any errors are reported, this function
+// returns a non-nil error.
+//
+// If the error returned is due to a syntax error in the source, then a non-nil
+// AST is also returned. If the handler chooses to not abort the parse (e.g. the
+// underlying error reporter returns nil instead of an error), the parser will
+// attempt to recover and keep going. This allows multiple syntax errors to be
+// reported in a single pass. And it also means that more of the AST can be
+// populated (erroneous productions around the syntax error will of course be
+// absent).
+//
+// The degree to which the parser can recover from errors and populate the AST
+// depends on the nature of the syntax error and if there are any tokens after the
+// syntax error that can help the parser recover. This error recovery and partial
+// AST production is best effort.
+func Parse(filename string, r io.Reader, handler *reporter.Handler) (*ast.FileNode, error) {
+ lx, err := newLexer(r, filename, handler)
+ if err != nil {
+ return nil, err
+ }
+ protoParse(lx)
+ if lx.res == nil {
+ // nil AST means there was an error that prevented any parsing
+ // or the file was empty; synthesize empty non-nil AST
+ lx.res = ast.NewEmptyFileNode(filename)
+ }
+ return lx.res, handler.Error()
+}
+
+// Result is the result of constructing a descriptor proto from a parsed AST.
+// From this result, the AST and the file descriptor proto can be had. This
+// also contains numerous lookup functions, for looking up AST nodes that
+// correspond to various elements of the descriptor hierarchy.
+//
+// Results can be created without AST information, using the ResultWithoutAST()
+// function. All functions other than AST() will still return non-nil values,
+// allowing compile operations to work with files that have only intermediate
+// descriptor protos and no source code. For such results, the function that
+// return AST nodes will return placeholder nodes. The position information for
+// placeholder nodes contains only the filename.
+type Result interface {
+ // AST returns the parsed abstract syntax tree. This returns nil if the
+ // Result was created without an AST.
+ AST() *ast.FileNode
+ // FileDescriptorProto returns the file descriptor proto.
+ FileDescriptorProto() *descriptorpb.FileDescriptorProto
+
+ // FileNode returns the root of the AST. If this result has no AST then a
+ // placeholder node is returned.
+ FileNode() ast.FileDeclNode
+ // Node returns the AST node from which the given message was created. This
+ // can return nil, such as if the given message is not part of the
+ // FileDescriptorProto hierarchy. If this result has no AST, this returns a
+ // placeholder node.
+ Node(proto.Message) ast.Node
+ // OptionNode returns the AST node corresponding to the given uninterpreted
+ // option. This can return nil, such as if the given option is not part of
+ // the FileDescriptorProto hierarchy. If this result has no AST, this
+ // returns a placeholder node.
+ OptionNode(*descriptorpb.UninterpretedOption) ast.OptionDeclNode
+ // OptionNamePartNode returns the AST node corresponding to the given name
+ // part for an uninterpreted option. This can return nil, such as if the
+ // given name part is not part of the FileDescriptorProto hierarchy. If this
+ // result has no AST, this returns a placeholder node.
+ OptionNamePartNode(*descriptorpb.UninterpretedOption_NamePart) ast.Node
+ // MessageNode returns the AST node corresponding to the given message. This
+ // can return nil, such as if the given message is not part of the
+ // FileDescriptorProto hierarchy. If this result has no AST, this returns a
+ // placeholder node.
+ MessageNode(*descriptorpb.DescriptorProto) ast.MessageDeclNode
+ // FieldNode returns the AST node corresponding to the given field. This can
+ // return nil, such as if the given field is not part of the
+ // FileDescriptorProto hierarchy. If this result has no AST, this returns a
+ // placeholder node.
+ FieldNode(*descriptorpb.FieldDescriptorProto) ast.FieldDeclNode
+ // OneofNode returns the AST node corresponding to the given oneof. This can
+ // return nil, such as if the given oneof is not part of the
+ // FileDescriptorProto hierarchy. If this result has no AST, this returns a
+ // placeholder node.
+ OneofNode(*descriptorpb.OneofDescriptorProto) ast.OneofDeclNode
+ // ExtensionRangeNode returns the AST node corresponding to the given
+ // extension range. This can return nil, such as if the given range is not
+ // part of the FileDescriptorProto hierarchy. If this result has no AST,
+ // this returns a placeholder node.
+ ExtensionRangeNode(*descriptorpb.DescriptorProto_ExtensionRange) ast.RangeDeclNode
+
+ // ExtensionsNode returns the AST node corresponding to the "extensions"
+ // statement in a message that corresponds to the given range. This will be
+ // the parent of the node returned by ExtensionRangeNode, which contains the
+ // options that apply to all child ranges.
+ ExtensionsNode(*descriptorpb.DescriptorProto_ExtensionRange) ast.NodeWithOptions
+
+ // MessageReservedRangeNode returns the AST node corresponding to the given
+ // reserved range. This can return nil, such as if the given range is not
+ // part of the FileDescriptorProto hierarchy. If this result has no AST,
+ // this returns a placeholder node.
+ MessageReservedRangeNode(*descriptorpb.DescriptorProto_ReservedRange) ast.RangeDeclNode
+ // EnumNode returns the AST node corresponding to the given enum. This can
+ // return nil, such as if the given enum is not part of the
+ // FileDescriptorProto hierarchy. If this result has no AST, this returns a
+ // placeholder node.
+ EnumNode(*descriptorpb.EnumDescriptorProto) ast.NodeWithOptions
+ // EnumValueNode returns the AST node corresponding to the given enum. This
+ // can return nil, such as if the given enum value is not part of the
+ // FileDescriptorProto hierarchy. If this result has no AST, this returns a
+ // placeholder node.
+ EnumValueNode(*descriptorpb.EnumValueDescriptorProto) ast.EnumValueDeclNode
+ // EnumReservedRangeNode returns the AST node corresponding to the given
+ // reserved range. This can return nil, such as if the given range is not
+ // part of the FileDescriptorProto hierarchy. If this result has no AST,
+ // this returns a placeholder node.
+ EnumReservedRangeNode(*descriptorpb.EnumDescriptorProto_EnumReservedRange) ast.RangeDeclNode
+ // ServiceNode returns the AST node corresponding to the given service. This
+ // can return nil, such as if the given service is not part of the
+ // FileDescriptorProto hierarchy. If this result has no AST, this returns a
+ // placeholder node.
+ ServiceNode(*descriptorpb.ServiceDescriptorProto) ast.NodeWithOptions
+ // MethodNode returns the AST node corresponding to the given method. This
+ // can return nil, such as if the given method is not part of the
+ // FileDescriptorProto hierarchy. If this result has no AST, this returns a
+ // placeholder node.
+ MethodNode(*descriptorpb.MethodDescriptorProto) ast.RPCDeclNode
+}
diff --git a/vendor/github.com/bufbuild/protocompile/parser/proto.y b/vendor/github.com/bufbuild/protocompile/parser/proto.y
new file mode 100644
index 0000000..e66cabd
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/parser/proto.y
@@ -0,0 +1,1498 @@
+%{
+package parser
+
+//lint:file-ignore SA4006 generated parser has unused values
+
+import (
+ "math"
+ "strings"
+
+ "github.com/bufbuild/protocompile/ast"
+)
+
+%}
+
+// fields inside this union end up as the fields in a structure known
+// as ${PREFIX}SymType, of which a reference is passed to the lexer.
+%union{
+ file *ast.FileNode
+ syn *ast.SyntaxNode
+ ed *ast.EditionNode
+ fileElements []ast.FileElement
+ pkg nodeWithRunes[*ast.PackageNode]
+ imprt nodeWithRunes[*ast.ImportNode]
+ msg nodeWithRunes[*ast.MessageNode]
+ msgElements []ast.MessageElement
+ fld *ast.FieldNode
+ msgFld nodeWithRunes[*ast.FieldNode]
+ mapFld nodeWithRunes[*ast.MapFieldNode]
+ mapType *ast.MapTypeNode
+ grp *ast.GroupNode
+ msgGrp nodeWithRunes[*ast.GroupNode]
+ oo nodeWithRunes[*ast.OneofNode]
+ ooElement ast.OneofElement
+ ooElements []ast.OneofElement
+ ext nodeWithRunes[*ast.ExtensionRangeNode]
+ resvd nodeWithRunes[*ast.ReservedNode]
+ en nodeWithRunes[*ast.EnumNode]
+ enElements []ast.EnumElement
+ env nodeWithRunes[*ast.EnumValueNode]
+ extend nodeWithRunes[*ast.ExtendNode]
+ extElement ast.ExtendElement
+ extElements []ast.ExtendElement
+ svc nodeWithRunes[*ast.ServiceNode]
+ svcElements []ast.ServiceElement
+ mtd nodeWithRunes[*ast.RPCNode]
+ mtdMsgType *ast.RPCTypeNode
+ mtdElements []ast.RPCElement
+ optRaw *ast.OptionNode
+ opt nodeWithRunes[*ast.OptionNode]
+ opts *compactOptionSlices
+ refRaw *ast.FieldReferenceNode
+ ref nodeWithRunes[*ast.FieldReferenceNode]
+ optNms *fieldRefSlices
+ cmpctOpts *ast.CompactOptionsNode
+ rng *ast.RangeNode
+ rngs *rangeSlices
+ names *nameSlices
+ cidPart nodeWithRunes[*ast.IdentNode]
+ cid *identSlices
+ tid ast.IdentValueNode
+ sl *valueSlices
+ msgLitFlds *messageFieldList
+ msgLitFld *ast.MessageFieldNode
+ v ast.ValueNode
+ il ast.IntValueNode
+ str []*ast.StringLiteralNode
+ s *ast.StringLiteralNode
+ i *ast.UintLiteralNode
+ f *ast.FloatLiteralNode
+ id *ast.IdentNode
+ b *ast.RuneNode
+ bs []*ast.RuneNode
+ err error
+}
+
+// any non-terminal which returns a value needs a type, which is
+// really a field name in the above union struct
+%type <file> file
+%type <syn> syntaxDecl
+%type <ed> editionDecl
+%type <fileElements> fileBody fileElement fileElements
+%type <imprt> importDecl
+%type <pkg> packageDecl
+%type <optRaw> compactOption oneofOptionDecl
+%type <opt> optionDecl compactOptionEntry compactOptionFinal
+%type <opts> compactOptionDecls compactOptionLeadingDecls
+%type <refRaw> extensionName messageLiteralFieldName optionNamePart
+%type <ref> optionNameEntry optionNameFinal
+%type <optNms> optionName optionNameLeading
+%type <cmpctOpts> compactOptions
+%type <v> fieldValue optionValue scalarValue fieldScalarValue messageLiteralWithBraces messageLiteral numLit specialFloatLit listLiteral listElement listOfMessagesLiteral messageValue
+%type <il> enumValueNumber
+%type <id> identifier mapKeyType msgElementName extElementName oneofElementName notGroupElementName mtdElementName enumValueName fieldCardinality
+%type <cidPart> qualifiedIdentifierEntry qualifiedIdentifierFinal mtdElementIdentEntry mtdElementIdentFinal
+%type <cid> qualifiedIdentifier msgElementIdent extElementIdent oneofElementIdent notGroupElementIdent mtdElementIdent qualifiedIdentifierDot qualifiedIdentifierLeading mtdElementIdentLeading
+%type <tid> typeName msgElementTypeIdent extElementTypeIdent oneofElementTypeIdent notGroupElementTypeIdent mtdElementTypeIdent
+%type <sl> listElements messageLiterals
+%type <msgLitFlds> messageLiteralFieldEntry messageLiteralFields messageTextFormat
+%type <msgLitFld> messageLiteralField
+%type <msgFld> messageFieldDecl
+%type <fld> oneofFieldDecl extensionFieldDecl
+%type <oo> oneofDecl
+%type <grp> groupDecl oneofGroupDecl
+%type <msgGrp> messageGroupDecl
+%type <mapFld> mapFieldDecl
+%type <mapType> mapType
+%type <msg> messageDecl
+%type <msgElements> messageElement messageElements messageBody
+%type <ooElement> oneofElement
+%type <ooElements> oneofElements oneofBody
+%type <names> fieldNameStrings fieldNameIdents
+%type <resvd> msgReserved enumReserved reservedNames
+%type <rng> tagRange enumValueRange
+%type <rngs> tagRanges enumValueRanges
+%type <ext> extensionRangeDecl
+%type <en> enumDecl
+%type <enElements> enumElement enumElements enumBody
+%type <env> enumValueDecl
+%type <extend> extensionDecl
+%type <extElement> extensionElement
+%type <extElements> extensionElements extensionBody
+%type <str> stringLit
+%type <svc> serviceDecl
+%type <svcElements> serviceElement serviceElements serviceBody
+%type <mtd> methodDecl
+%type <mtdElements> methodElement methodElements methodBody
+%type <mtdMsgType> methodMessageType
+%type <b> semicolon
+%type <bs> semicolons semicolonList
+
+// same for terminals
+%token <s> _STRING_LIT
+%token <i> _INT_LIT
+%token <f> _FLOAT_LIT
+%token <id> _NAME
+%token <id> _SYNTAX _EDITION _IMPORT _WEAK _PUBLIC _PACKAGE _OPTION _TRUE _FALSE _INF _NAN _REPEATED _OPTIONAL _REQUIRED
+%token <id> _DOUBLE _FLOAT _INT32 _INT64 _UINT32 _UINT64 _SINT32 _SINT64 _FIXED32 _FIXED64 _SFIXED32 _SFIXED64
+%token <id> _BOOL _STRING _BYTES _GROUP _ONEOF _MAP _EXTENSIONS _TO _MAX _RESERVED _ENUM _MESSAGE _EXTEND
+%token <id> _SERVICE _RPC _STREAM _RETURNS
+%token <err> _ERROR
+// we define all of these, even ones that aren't used, to improve error messages
+// so it shows the unexpected symbol instead of showing "$unk"
+%token <b> '=' ';' ':' '{' '}' '\\' '/' '?' '.' ',' '>' '<' '+' '-' '(' ')' '[' ']' '*' '&' '^' '%' '$' '#' '@' '!' '~' '`'
+
+%%
+
+file : syntaxDecl {
+ lex := protolex.(*protoLex)
+ $$ = ast.NewFileNode(lex.info, $1, nil, lex.eof)
+ lex.res = $$
+ }
+ | editionDecl {
+ lex := protolex.(*protoLex)
+ $$ = ast.NewFileNodeWithEdition(lex.info, $1, nil, lex.eof)
+ lex.res = $$
+ }
+ | fileBody {
+ lex := protolex.(*protoLex)
+ $$ = ast.NewFileNode(lex.info, nil, $1, lex.eof)
+ lex.res = $$
+ }
+ | syntaxDecl fileBody {
+ lex := protolex.(*protoLex)
+ $$ = ast.NewFileNode(lex.info, $1, $2, lex.eof)
+ lex.res = $$
+ }
+ | editionDecl fileBody {
+ lex := protolex.(*protoLex)
+ $$ = ast.NewFileNodeWithEdition(lex.info, $1, $2, lex.eof)
+ lex.res = $$
+ }
+ | {
+ lex := protolex.(*protoLex)
+ $$ = ast.NewFileNode(lex.info, nil, nil, lex.eof)
+ lex.res = $$
+ }
+
+fileBody : semicolons fileElements {
+ $$ = prependRunes(toFileElement, $1, $2)
+ }
+
+fileElements : fileElements fileElement {
+ $$ = append($1, $2...)
+ }
+ | fileElement {
+ $$ = $1
+ }
+
+fileElement : importDecl {
+ $$ = toElements[ast.FileElement](toFileElement, $1.Node, $1.Runes)
+ }
+ | packageDecl {
+ $$ = toElements[ast.FileElement](toFileElement, $1.Node, $1.Runes)
+ }
+ | optionDecl {
+ $$ = toElements[ast.FileElement](toFileElement, $1.Node, $1.Runes)
+ }
+ | messageDecl {
+ $$ = toElements[ast.FileElement](toFileElement, $1.Node, $1.Runes)
+ }
+ | enumDecl {
+ $$ = toElements[ast.FileElement](toFileElement, $1.Node, $1.Runes)
+ }
+ | extensionDecl {
+ $$ = toElements[ast.FileElement](toFileElement, $1.Node, $1.Runes)
+ }
+ | serviceDecl {
+ $$ = toElements[ast.FileElement](toFileElement, $1.Node, $1.Runes)
+ }
+ | error {
+ $$ = nil
+ }
+
+semicolonList : ';' {
+ $$ = []*ast.RuneNode{$1}
+ }
+ | semicolonList ';' {
+ $$ = append($1, $2)
+ }
+
+semicolons : semicolonList {
+ $$ = $1
+ }
+ | {
+ $$ = nil
+ }
+
+semicolon : ';' {
+ $$ = $1
+ } |
+ {
+ protolex.(*protoLex).Error("syntax error: expecting ';'")
+ $$ = nil
+ }
+
+syntaxDecl : _SYNTAX '=' stringLit ';' {
+ $$ = ast.NewSyntaxNode($1.ToKeyword(), $2, toStringValueNode($3), $4)
+ }
+
+editionDecl : _EDITION '=' stringLit ';' {
+ $$ = ast.NewEditionNode($1.ToKeyword(), $2, toStringValueNode($3), $4)
+ }
+
+importDecl : _IMPORT stringLit semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($3)
+ $$ = newNodeWithRunes(ast.NewImportNode($1.ToKeyword(), nil, nil, toStringValueNode($2), semi), extra...)
+ }
+ | _IMPORT _WEAK stringLit semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($4)
+ $$ = newNodeWithRunes(ast.NewImportNode($1.ToKeyword(), nil, $2.ToKeyword(), toStringValueNode($3), semi), extra...)
+ }
+ | _IMPORT _PUBLIC stringLit semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($4)
+ $$ = newNodeWithRunes(ast.NewImportNode($1.ToKeyword(), $2.ToKeyword(), nil, toStringValueNode($3), semi), extra...)
+ }
+
+packageDecl : _PACKAGE qualifiedIdentifier semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($3)
+ $$ = newNodeWithRunes(ast.NewPackageNode($1.ToKeyword(), $2.toIdentValueNode(nil), semi), extra...)
+ }
+
+qualifiedIdentifier : identifier {
+ $$ = &identSlices{idents: []*ast.IdentNode{$1}}
+ }
+ | qualifiedIdentifier '.' identifier {
+ $1.idents = append($1.idents, $3)
+ $1.dots = append($1.dots, $2)
+ $$ = $1
+ }
+
+qualifiedIdentifierDot : qualifiedIdentifierFinal {
+ $$ = &identSlices{idents: []*ast.IdentNode{$1.Node}, dots: $1.Runes}
+ }
+ | qualifiedIdentifierLeading qualifiedIdentifierFinal {
+ $1.idents = append($1.idents, $2.Node)
+ $1.dots = append($1.dots, $2.Runes...)
+ $$ = $1
+ }
+
+qualifiedIdentifierLeading : qualifiedIdentifierEntry {
+ $$ = &identSlices{idents: []*ast.IdentNode{$1.Node}, dots: $1.Runes}
+ }
+ | qualifiedIdentifierLeading qualifiedIdentifierEntry {
+ $1.idents = append($1.idents, $2.Node)
+ $1.dots = append($1.dots, $2.Runes...)
+ $$ = $1
+ }
+
+qualifiedIdentifierFinal : identifier {
+ $$ = newNodeWithRunes($1)
+ }
+ | qualifiedIdentifierEntry {
+ protolex.(*protoLex).Error("syntax error: unexpected '.'")
+ $$ = $1
+ }
+
+qualifiedIdentifierEntry : identifier '.' {
+ $$ = newNodeWithRunes($1, $2)
+ }
+
+// to mimic limitations of protoc recursive-descent parser,
+// we don't allowed message statement keywords as identifiers
+// (or oneof statement keywords [e.g. "option"] below)
+
+msgElementIdent : msgElementName {
+ $$ = &identSlices{idents: []*ast.IdentNode{$1}}
+ }
+ | msgElementIdent '.' identifier {
+ $1.idents = append($1.idents, $3)
+ $1.dots = append($1.dots, $2)
+ $$ = $1
+ }
+
+extElementIdent : extElementName {
+ $$ = &identSlices{idents: []*ast.IdentNode{$1}}
+ }
+ | extElementIdent '.' identifier {
+ $1.idents = append($1.idents, $3)
+ $1.dots = append($1.dots, $2)
+ $$ = $1
+ }
+
+oneofElementIdent : oneofElementName {
+ $$ = &identSlices{idents: []*ast.IdentNode{$1}}
+ }
+ | oneofElementIdent '.' identifier {
+ $1.idents = append($1.idents, $3)
+ $1.dots = append($1.dots, $2)
+ $$ = $1
+ }
+
+notGroupElementIdent : notGroupElementName {
+ $$ = &identSlices{idents: []*ast.IdentNode{$1}}
+ }
+ | notGroupElementIdent '.' identifier {
+ $1.idents = append($1.idents, $3)
+ $1.dots = append($1.dots, $2)
+ $$ = $1
+ }
+
+mtdElementIdent : mtdElementIdentFinal {
+ $$ = &identSlices{idents: []*ast.IdentNode{$1.Node}, dots: $1.Runes}
+ }
+ | mtdElementIdentLeading mtdElementIdentFinal {
+ $1.idents = append($1.idents, $2.Node)
+ $1.dots = append($1.dots, $2.Runes...)
+ $$ = $1
+ }
+
+mtdElementIdentLeading : mtdElementIdentEntry {
+ $$ = &identSlices{idents: []*ast.IdentNode{$1.Node}, dots: $1.Runes}
+ }
+ | mtdElementIdentLeading mtdElementIdentEntry {
+ $1.idents = append($1.idents, $2.Node)
+ $1.dots = append($1.dots, $2.Runes...)
+ $$ = $1
+ }
+
+mtdElementIdentFinal : mtdElementName {
+ $$ = newNodeWithRunes($1)
+ }
+ | mtdElementIdentEntry {
+ protolex.(*protoLex).Error("syntax error: unexpected '.'")
+ $$ = $1
+ }
+
+mtdElementIdentEntry : mtdElementName '.' {
+ $$ = newNodeWithRunes($1, $2)
+ }
+
+oneofOptionDecl : _OPTION optionName '=' optionValue semicolon {
+ optName := ast.NewOptionNameNode($2.refs, $2.dots)
+ $$ = ast.NewOptionNode($1.ToKeyword(), optName, $3, $4, $5)
+ }
+
+optionDecl : _OPTION optionName '=' optionValue semicolons {
+ optName := ast.NewOptionNameNode($2.refs, $2.dots)
+ semi, extra := protolex.(*protoLex).requireSemicolon($5)
+ $$ = newNodeWithRunes(ast.NewOptionNode($1.ToKeyword(), optName, $3, $4, semi), extra...)
+ }
+
+optionNamePart : identifier {
+ $$ = ast.NewFieldReferenceNode($1)
+ }
+ | extensionName {
+ $$ = $1
+ }
+
+optionNameEntry : optionNamePart '.' {
+ $$ = newNodeWithRunes($1, $2)
+ }
+
+optionNameFinal : optionNamePart {
+ $$ = newNodeWithRunes($1)
+ }
+ | optionNameEntry {
+ protolex.(*protoLex).Error("syntax error: unexpected '.'")
+ $$ = $1
+ }
+
+optionNameLeading : optionNameEntry {
+ $$ = &fieldRefSlices{refs: []*ast.FieldReferenceNode{$1.Node}, dots: $1.Runes}
+ }
+ | optionNameLeading optionNameEntry {
+ $1.refs = append($1.refs, $2.Node)
+ $1.dots = append($1.dots, $2.Runes...)
+ $$ = $1
+ }
+
+optionName : optionNameFinal {
+ $$ = &fieldRefSlices{refs: []*ast.FieldReferenceNode{$1.Node}, dots: $1.Runes}
+ }
+ | optionNameLeading optionNameFinal {
+ $1.refs = append($1.refs, $2.Node)
+ $1.dots = append($1.dots, $2.Runes...)
+ $$ = $1
+ }
+
+extensionName : '(' typeName ')' {
+ $$ = ast.NewExtensionFieldReferenceNode($1, $2, $3)
+ }
+
+optionValue : scalarValue
+ | messageLiteralWithBraces
+
+scalarValue : stringLit {
+ $$ = toStringValueNode($1)
+ }
+ | numLit
+ | specialFloatLit
+ | identifier {
+ $$ = $1
+ }
+
+numLit : _FLOAT_LIT {
+ $$ = $1
+ }
+ | '-' _FLOAT_LIT {
+ $$ = ast.NewSignedFloatLiteralNode($1, $2)
+ }
+ | _INT_LIT {
+ $$ = $1
+ }
+ | '-' _INT_LIT {
+ if $2.Val > math.MaxInt64 + 1 {
+ // can't represent as int so treat as float literal
+ $$ = ast.NewSignedFloatLiteralNode($1, $2)
+ } else {
+ $$ = ast.NewNegativeIntLiteralNode($1, $2)
+ }
+ }
+
+specialFloatLit : '-' _INF {
+ f := ast.NewSpecialFloatLiteralNode($2.ToKeyword())
+ $$ = ast.NewSignedFloatLiteralNode($1, f)
+ }
+ | '-' _NAN {
+ f := ast.NewSpecialFloatLiteralNode($2.ToKeyword())
+ $$ = ast.NewSignedFloatLiteralNode($1, f)
+ }
+
+stringLit : _STRING_LIT {
+ $$ = []*ast.StringLiteralNode{$1}
+ }
+ | stringLit _STRING_LIT {
+ $$ = append($1, $2)
+ }
+
+messageLiteralWithBraces : '{' messageTextFormat '}' {
+ if $2 == nil {
+ $$ = ast.NewMessageLiteralNode($1, nil, nil, $3)
+ } else {
+ fields, delimiters := $2.toNodes()
+ $$ = ast.NewMessageLiteralNode($1, fields, delimiters, $3)
+ }
+ }
+ | '{' '}' {
+ $$ = ast.NewMessageLiteralNode($1, nil, nil, $2)
+ }
+
+messageTextFormat : messageLiteralFields
+
+messageLiteralFields : messageLiteralFieldEntry
+ | messageLiteralFieldEntry messageLiteralFields {
+ if $1 != nil {
+ $1.next = $2
+ $$ = $1
+ } else {
+ $$ = $2
+ }
+ }
+
+messageLiteralFieldEntry : messageLiteralField {
+ if $1 != nil {
+ $$ = &messageFieldList{field: $1}
+ } else {
+ $$ = nil
+ }
+ }
+ | messageLiteralField ',' {
+ if $1 != nil {
+ $$ = &messageFieldList{field: $1, delimiter: $2}
+ } else {
+ $$ = nil
+ }
+ }
+ | messageLiteralField ';' {
+ if $1 != nil {
+ $$ = &messageFieldList{field: $1, delimiter: $2}
+ } else {
+ $$ = nil
+ }
+ }
+ | error ',' {
+ $$ = nil
+ }
+ | error ';' {
+ $$ = nil
+ }
+ | error {
+ $$ = nil
+ }
+
+messageLiteralField : messageLiteralFieldName ':' fieldValue {
+ if $1 != nil && $2 != nil {
+ $$ = ast.NewMessageFieldNode($1, $2, $3)
+ } else {
+ $$ = nil
+ }
+ }
+ | messageLiteralFieldName messageValue {
+ if $1 != nil && $2 != nil {
+ $$ = ast.NewMessageFieldNode($1, nil, $2)
+ } else {
+ $$ = nil
+ }
+ }
+ | error ':' fieldValue {
+ $$ = nil
+ }
+
+messageLiteralFieldName : identifier {
+ $$ = ast.NewFieldReferenceNode($1)
+ }
+ | '[' qualifiedIdentifierDot ']' {
+ $$ = ast.NewExtensionFieldReferenceNode($1, $2.toIdentValueNode(nil), $3)
+ }
+ | '[' qualifiedIdentifierDot '/' qualifiedIdentifierDot ']' {
+ $$ = ast.NewAnyTypeReferenceNode($1, $2.toIdentValueNode(nil), $3, $4.toIdentValueNode(nil), $5)
+ }
+ | '[' error ']' {
+ $$ = nil
+ }
+
+fieldValue : fieldScalarValue
+ | messageLiteral
+ | listLiteral
+
+fieldScalarValue : stringLit {
+ $$ = toStringValueNode($1)
+ }
+ | numLit
+ | '-' identifier {
+ kw := $2.ToKeyword()
+ switch strings.ToLower(kw.Val) {
+ case "inf", "infinity", "nan":
+ // these are acceptable
+ default:
+ // anything else is not
+ protolex.(*protoLex).Error(`only identifiers "inf", "infinity", or "nan" may appear after negative sign`)
+ }
+ // we'll validate the identifier later
+ f := ast.NewSpecialFloatLiteralNode(kw)
+ $$ = ast.NewSignedFloatLiteralNode($1, f)
+ }
+ | identifier {
+ $$ = $1
+ }
+
+messageValue : messageLiteral
+ | listOfMessagesLiteral
+
+messageLiteral : messageLiteralWithBraces
+ | '<' messageTextFormat '>' {
+ if $2 == nil {
+ $$ = ast.NewMessageLiteralNode($1, nil, nil, $3)
+ } else {
+ fields, delimiters := $2.toNodes()
+ $$ = ast.NewMessageLiteralNode($1, fields, delimiters, $3)
+ }
+ }
+ | '<' '>' {
+ $$ = ast.NewMessageLiteralNode($1, nil, nil, $2)
+ }
+
+listLiteral : '[' listElements ']' {
+ if $2 == nil {
+ $$ = ast.NewArrayLiteralNode($1, nil, nil, $3)
+ } else {
+ $$ = ast.NewArrayLiteralNode($1, $2.vals, $2.commas, $3)
+ }
+ }
+ | '[' ']' {
+ $$ = ast.NewArrayLiteralNode($1, nil, nil, $2)
+ }
+ | '[' error ']' {
+ $$ = ast.NewArrayLiteralNode($1, nil, nil, $3)
+ }
+
+listElements : listElement {
+ $$ = &valueSlices{vals: []ast.ValueNode{$1}}
+ }
+ | listElements ',' listElement {
+ $1.vals = append($1.vals, $3)
+ $1.commas = append($1.commas, $2)
+ $$ = $1
+ }
+
+listElement : fieldScalarValue
+ | messageLiteral
+
+listOfMessagesLiteral : '[' messageLiterals ']' {
+ if $2 == nil {
+ $$ = ast.NewArrayLiteralNode($1, nil, nil, $3)
+ } else {
+ $$ = ast.NewArrayLiteralNode($1, $2.vals, $2.commas, $3)
+ }
+ }
+ | '[' ']' {
+ $$ = ast.NewArrayLiteralNode($1, nil, nil, $2)
+ }
+ | '[' error ']' {
+ $$ = ast.NewArrayLiteralNode($1, nil, nil, $3)
+ }
+
+messageLiterals : messageLiteral {
+ $$ = &valueSlices{vals: []ast.ValueNode{$1}}
+ }
+ | messageLiterals ',' messageLiteral {
+ $1.vals = append($1.vals, $3)
+ $1.commas = append($1.commas, $2)
+ $$ = $1
+ }
+
+typeName : qualifiedIdentifierDot {
+ $$ = $1.toIdentValueNode(nil)
+ }
+ | '.' qualifiedIdentifierDot {
+ $$ = $2.toIdentValueNode($1)
+ }
+
+msgElementTypeIdent : msgElementIdent {
+ $$ = $1.toIdentValueNode(nil)
+ }
+ | '.' qualifiedIdentifier {
+ $$ = $2.toIdentValueNode($1)
+ }
+
+extElementTypeIdent : extElementIdent {
+ $$ = $1.toIdentValueNode(nil)
+ }
+ | '.' qualifiedIdentifier {
+ $$ = $2.toIdentValueNode($1)
+ }
+
+oneofElementTypeIdent : oneofElementIdent {
+ $$ = $1.toIdentValueNode(nil)
+ }
+ | '.' qualifiedIdentifier {
+ $$ = $2.toIdentValueNode($1)
+ }
+
+notGroupElementTypeIdent : notGroupElementIdent {
+ $$ = $1.toIdentValueNode(nil)
+ }
+ | '.' qualifiedIdentifier {
+ $$ = $2.toIdentValueNode($1)
+ }
+
+mtdElementTypeIdent : mtdElementIdent {
+ $$ = $1.toIdentValueNode(nil)
+ }
+ | '.' qualifiedIdentifierDot {
+ $$ = $2.toIdentValueNode($1)
+ }
+
+fieldCardinality : _REQUIRED
+ | _OPTIONAL
+ | _REPEATED
+
+compactOptions : '[' compactOptionDecls ']' {
+ $$ = ast.NewCompactOptionsNode($1, $2.options, $2.commas, $3)
+ }
+ | '[' ']' {
+ protolex.(*protoLex).Error("compact options must have at least one option")
+ $$ = ast.NewCompactOptionsNode($1, nil, nil, $2)
+ }
+
+compactOptionDecls : compactOptionFinal {
+ $$ = &compactOptionSlices{options: []*ast.OptionNode{$1.Node}, commas: $1.Runes}
+ }
+ | compactOptionLeadingDecls compactOptionFinal {
+ $1.options = append($1.options, $2.Node)
+ $1.commas = append($1.commas, $2.Runes...)
+ $$ = $1
+ }
+
+compactOptionLeadingDecls : compactOptionEntry {
+ $$ = &compactOptionSlices{options: []*ast.OptionNode{$1.Node}, commas: $1.Runes}
+ }
+ | compactOptionLeadingDecls compactOptionEntry {
+ $1.options = append($1.options, $2.Node)
+ $1.commas = append($1.commas, $2.Runes...)
+ $$ = $1
+ }
+
+compactOptionFinal : compactOption {
+ $$ = newNodeWithRunes($1)
+ }
+ | compactOptionEntry {
+ protolex.(*protoLex).Error("syntax error: unexpected ','")
+ $$ = $1
+ }
+
+compactOptionEntry : compactOption ',' {
+ $$ = newNodeWithRunes($1, $2)
+ }
+
+compactOption : optionName '=' optionValue {
+ optName := ast.NewOptionNameNode($1.refs, $1.dots)
+ $$ = ast.NewCompactOptionNode(optName, $2, $3)
+ }
+ | optionName {
+ optName := ast.NewOptionNameNode($1.refs, $1.dots)
+ protolex.(*protoLex).Error("compact option must have a value")
+ $$ = ast.NewCompactOptionNode(optName, nil, nil)
+ }
+
+
+groupDecl : fieldCardinality _GROUP identifier '=' _INT_LIT '{' messageBody '}' {
+ $$ = ast.NewGroupNode($1.ToKeyword(), $2.ToKeyword(), $3, $4, $5, nil, $6, $7, $8)
+ }
+ | fieldCardinality _GROUP identifier '=' _INT_LIT compactOptions '{' messageBody '}' {
+ $$ = ast.NewGroupNode($1.ToKeyword(), $2.ToKeyword(), $3, $4, $5, $6, $7, $8, $9)
+ }
+
+messageGroupDecl : fieldCardinality _GROUP identifier '=' _INT_LIT '{' messageBody '}' semicolons {
+ $$ = newNodeWithRunes(ast.NewGroupNode($1.ToKeyword(), $2.ToKeyword(), $3, $4, $5, nil, $6, $7, $8), $9...)
+ }
+ | fieldCardinality _GROUP identifier '=' _INT_LIT compactOptions '{' messageBody '}' semicolons {
+ $$ = newNodeWithRunes(ast.NewGroupNode($1.ToKeyword(), $2.ToKeyword(), $3, $4, $5, $6, $7, $8, $9), $10...)
+ }
+ | fieldCardinality _GROUP identifier '{' messageBody '}' semicolons {
+ $$ = newNodeWithRunes(ast.NewGroupNode($1.ToKeyword(), $2.ToKeyword(), $3, nil, nil, nil, $4, $5, $6), $7...)
+ }
+ | fieldCardinality _GROUP identifier compactOptions '{' messageBody '}' semicolons {
+ $$ = newNodeWithRunes(ast.NewGroupNode($1.ToKeyword(), $2.ToKeyword(), $3, nil, nil, $4, $5, $6, $7), $8...)
+ }
+
+oneofDecl : _ONEOF identifier '{' oneofBody '}' semicolons {
+ $$ = newNodeWithRunes(ast.NewOneofNode($1.ToKeyword(), $2, $3, $4, $5), $6...)
+ }
+
+oneofBody : {
+ $$ = nil
+ }
+ | oneofElements
+
+oneofElements : oneofElements oneofElement {
+ if $2 != nil {
+ $$ = append($1, $2)
+ } else {
+ $$ = $1
+ }
+ }
+ | oneofElement {
+ if $1 != nil {
+ $$ = []ast.OneofElement{$1}
+ } else {
+ $$ = nil
+ }
+ }
+
+oneofElement : oneofOptionDecl {
+ $$ = $1
+ }
+ | oneofFieldDecl {
+ $$ = $1
+ }
+ | oneofGroupDecl {
+ $$ = $1
+ }
+ | error ';' {
+ $$ = nil
+ }
+ | error {
+ $$ = nil
+ }
+
+oneofFieldDecl : oneofElementTypeIdent identifier '=' _INT_LIT semicolon {
+ $$ = ast.NewFieldNode(nil, $1, $2, $3, $4, nil, $5)
+ }
+ | oneofElementTypeIdent identifier '=' _INT_LIT compactOptions semicolon {
+ $$ = ast.NewFieldNode(nil, $1, $2, $3, $4, $5, $6)
+ }
+ | oneofElementTypeIdent identifier semicolon {
+ $$ = ast.NewFieldNode(nil, $1, $2, nil, nil, nil, $3)
+ }
+ | oneofElementTypeIdent identifier compactOptions semicolon {
+ $$ = ast.NewFieldNode(nil, $1, $2, nil, nil, $3, $4)
+ }
+
+oneofGroupDecl : _GROUP identifier '=' _INT_LIT '{' messageBody '}' {
+ $$ = ast.NewGroupNode(nil, $1.ToKeyword(), $2, $3, $4, nil, $5, $6, $7)
+ }
+ | _GROUP identifier '=' _INT_LIT compactOptions '{' messageBody '}' {
+ $$ = ast.NewGroupNode(nil, $1.ToKeyword(), $2, $3, $4, $5, $6, $7, $8)
+ }
+ | _GROUP identifier '{' messageBody '}' {
+ $$ = ast.NewGroupNode(nil, $1.ToKeyword(), $2, nil, nil, nil, $3, $4, $5)
+ }
+ | _GROUP identifier compactOptions '{' messageBody '}' {
+ $$ = ast.NewGroupNode(nil, $1.ToKeyword(), $2, nil, nil, $3, $4, $5, $6)
+ }
+
+
+mapFieldDecl : mapType identifier '=' _INT_LIT semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($5)
+ $$ = newNodeWithRunes(ast.NewMapFieldNode($1, $2, $3, $4, nil, semi), extra...)
+ }
+ | mapType identifier '=' _INT_LIT compactOptions semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($6)
+ $$ = newNodeWithRunes(ast.NewMapFieldNode($1, $2, $3, $4, $5, semi), extra...)
+ }
+ | mapType identifier semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($3)
+ $$ = newNodeWithRunes(ast.NewMapFieldNode($1, $2, nil, nil, nil, semi), extra...)
+ }
+ | mapType identifier compactOptions semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($4)
+ $$ = newNodeWithRunes(ast.NewMapFieldNode($1, $2, nil, nil, $3, semi), extra...)
+ }
+
+mapType : _MAP '<' mapKeyType ',' typeName '>' {
+ $$ = ast.NewMapTypeNode($1.ToKeyword(), $2, $3, $4, $5, $6)
+ }
+
+mapKeyType : _INT32
+ | _INT64
+ | _UINT32
+ | _UINT64
+ | _SINT32
+ | _SINT64
+ | _FIXED32
+ | _FIXED64
+ | _SFIXED32
+ | _SFIXED64
+ | _BOOL
+ | _STRING
+
+extensionRangeDecl : _EXTENSIONS tagRanges ';' semicolons {
+ // TODO: Tolerate a missing semicolon here. This currnelty creates a shift/reduce conflict
+ // between `extensions 1 to 10` and `extensions 1` followed by `to = 10`.
+ $$ = newNodeWithRunes(ast.NewExtensionRangeNode($1.ToKeyword(), $2.ranges, $2.commas, nil, $3), $4...)
+ }
+ | _EXTENSIONS tagRanges compactOptions semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($4)
+ $$ = newNodeWithRunes(ast.NewExtensionRangeNode($1.ToKeyword(), $2.ranges, $2.commas, $3, semi), extra...)
+ }
+
+tagRanges : tagRange {
+ $$ = &rangeSlices{ranges: []*ast.RangeNode{$1}}
+ }
+ | tagRanges ',' tagRange {
+ $1.ranges = append($1.ranges, $3)
+ $1.commas = append($1.commas, $2)
+ $$ = $1
+ }
+
+tagRange : _INT_LIT {
+ $$ = ast.NewRangeNode($1, nil, nil, nil)
+ }
+ | _INT_LIT _TO _INT_LIT {
+ $$ = ast.NewRangeNode($1, $2.ToKeyword(), $3, nil)
+ }
+ | _INT_LIT _TO _MAX {
+ $$ = ast.NewRangeNode($1, $2.ToKeyword(), nil, $3.ToKeyword())
+ }
+
+enumValueRanges : enumValueRange {
+ $$ = &rangeSlices{ranges: []*ast.RangeNode{$1}}
+ }
+ | enumValueRanges ',' enumValueRange {
+ $1.ranges = append($1.ranges, $3)
+ $1.commas = append($1.commas, $2)
+ $$ = $1
+ }
+
+enumValueRange : enumValueNumber {
+ $$ = ast.NewRangeNode($1, nil, nil, nil)
+ }
+ | enumValueNumber _TO enumValueNumber {
+ $$ = ast.NewRangeNode($1, $2.ToKeyword(), $3, nil)
+ }
+ | enumValueNumber _TO _MAX {
+ $$ = ast.NewRangeNode($1, $2.ToKeyword(), nil, $3.ToKeyword())
+ }
+
+enumValueNumber : _INT_LIT {
+ $$ = $1
+ }
+ | '-' _INT_LIT {
+ $$ = ast.NewNegativeIntLiteralNode($1, $2)
+ }
+
+msgReserved : _RESERVED tagRanges ';' semicolons {
+ // TODO: Tolerate a missing semicolon here. This currnelty creates a shift/reduce conflict
+ // between `reserved 1 to 10` and `reserved 1` followed by `to = 10`.
+ $$ = newNodeWithRunes(ast.NewReservedRangesNode($1.ToKeyword(), $2.ranges, $2.commas, $3), $4...)
+ }
+ | reservedNames
+
+enumReserved : _RESERVED enumValueRanges ';' semicolons {
+ // TODO: Tolerate a missing semicolon here. This currnelty creates a shift/reduce conflict
+ // between `reserved 1 to 10` and `reserved 1` followed by `to = 10`.
+ $$ = newNodeWithRunes(ast.NewReservedRangesNode($1.ToKeyword(), $2.ranges, $2.commas, $3), $4...)
+ }
+ | reservedNames
+
+reservedNames : _RESERVED fieldNameStrings semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($3)
+ $$ = newNodeWithRunes(ast.NewReservedNamesNode($1.ToKeyword(), $2.names, $2.commas, semi), extra...)
+ }
+ | _RESERVED fieldNameIdents semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($3)
+ $$ = newNodeWithRunes(ast.NewReservedIdentifiersNode($1.ToKeyword(), $2.idents, $2.commas, semi), extra...)
+ }
+
+fieldNameStrings : stringLit {
+ $$ = &nameSlices{names: []ast.StringValueNode{toStringValueNode($1)}}
+ }
+ | fieldNameStrings ',' stringLit {
+ $1.names = append($1.names, toStringValueNode($3))
+ $1.commas = append($1.commas, $2)
+ $$ = $1
+ }
+
+fieldNameIdents : identifier {
+ $$ = &nameSlices{idents: []*ast.IdentNode{$1}}
+ }
+ | fieldNameIdents ',' identifier {
+ $1.idents = append($1.idents, $3)
+ $1.commas = append($1.commas, $2)
+ $$ = $1
+ }
+
+enumDecl : _ENUM identifier '{' enumBody '}' semicolons {
+ $$ = newNodeWithRunes(ast.NewEnumNode($1.ToKeyword(), $2, $3, $4, $5), $6...)
+ }
+
+enumBody : semicolons {
+ $$ = prependRunes(toEnumElement, $1, nil)
+ }
+ | semicolons enumElements {
+ $$ = prependRunes(toEnumElement, $1, $2)
+ }
+
+enumElements : enumElements enumElement {
+ $$ = append($1, $2...)
+ }
+ | enumElement {
+ $$ = $1
+ }
+
+enumElement : optionDecl {
+ $$ = toElements[ast.EnumElement](toEnumElement, $1.Node, $1.Runes)
+ }
+ | enumValueDecl {
+ $$ = toElements[ast.EnumElement](toEnumElement, $1.Node, $1.Runes)
+ }
+ | enumReserved {
+ $$ = toElements[ast.EnumElement](toEnumElement, $1.Node, $1.Runes)
+ }
+ | error {
+ $$ = nil
+ }
+
+enumValueDecl : enumValueName '=' enumValueNumber semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($4)
+ $$ = newNodeWithRunes(ast.NewEnumValueNode($1, $2, $3, nil, semi), extra...)
+ }
+ | enumValueName '=' enumValueNumber compactOptions semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($5)
+ $$ = newNodeWithRunes(ast.NewEnumValueNode($1, $2, $3, $4, semi), extra...)
+ }
+
+messageDecl : _MESSAGE identifier '{' messageBody '}' semicolons {
+ $$ = newNodeWithRunes(ast.NewMessageNode($1.ToKeyword(), $2, $3, $4, $5), $6...)
+ }
+
+messageBody : semicolons {
+ $$ = prependRunes(toMessageElement, $1, nil)
+ }
+ | semicolons messageElements {
+ $$ = prependRunes(toMessageElement, $1, $2)
+ }
+
+messageElements : messageElements messageElement {
+ $$ = append($1, $2...)
+ }
+ | messageElement {
+ $$ = $1
+ }
+
+messageElement : messageFieldDecl {
+ $$ = toElements[ast.MessageElement](toMessageElement, $1.Node, $1.Runes)
+ }
+ | enumDecl {
+ $$ = toElements[ast.MessageElement](toMessageElement, $1.Node, $1.Runes)
+ }
+ | messageDecl {
+ $$ = toElements[ast.MessageElement](toMessageElement, $1.Node, $1.Runes)
+ }
+ | extensionDecl {
+ $$ = toElements[ast.MessageElement](toMessageElement, $1.Node, $1.Runes)
+ }
+ | extensionRangeDecl {
+ $$ = toElements[ast.MessageElement](toMessageElement, $1.Node, $1.Runes)
+ }
+ | messageGroupDecl {
+ $$ = toElements[ast.MessageElement](toMessageElement, $1.Node, $1.Runes)
+ }
+ | optionDecl {
+ $$ = toElements[ast.MessageElement](toMessageElement, $1.Node, $1.Runes)
+ }
+ | oneofDecl {
+ $$ = toElements[ast.MessageElement](toMessageElement, $1.Node, $1.Runes)
+ }
+ | mapFieldDecl {
+ $$ = toElements[ast.MessageElement](toMessageElement, $1.Node, $1.Runes)
+ }
+ | msgReserved {
+ $$ = toElements[ast.MessageElement](toMessageElement, $1.Node, $1.Runes)
+ }
+ | error {
+ $$ = nil
+ }
+
+messageFieldDecl : fieldCardinality notGroupElementTypeIdent identifier '=' _INT_LIT semicolons {
+ semis, extra := protolex.(*protoLex).requireSemicolon($6)
+ $$ = newNodeWithRunes(ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, nil, semis), extra...)
+ }
+ | fieldCardinality notGroupElementTypeIdent identifier '=' _INT_LIT compactOptions semicolons {
+ semis, extra := protolex.(*protoLex).requireSemicolon($7)
+ $$ = newNodeWithRunes(ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, $6, semis), extra...)
+ }
+ | msgElementTypeIdent identifier '=' _INT_LIT semicolons {
+ semis, extra := protolex.(*protoLex).requireSemicolon($5)
+ $$ = newNodeWithRunes(ast.NewFieldNode(nil, $1, $2, $3, $4, nil, semis), extra...)
+ }
+ | msgElementTypeIdent identifier '=' _INT_LIT compactOptions semicolons {
+ semis, extra := protolex.(*protoLex).requireSemicolon($6)
+ $$ = newNodeWithRunes(ast.NewFieldNode(nil, $1, $2, $3, $4, $5, semis), extra...)
+ }
+ | fieldCardinality notGroupElementTypeIdent identifier semicolons {
+ semis, extra := protolex.(*protoLex).requireSemicolon($4)
+ $$ = newNodeWithRunes(ast.NewFieldNode($1.ToKeyword(), $2, $3, nil, nil, nil, semis), extra...)
+ }
+ | fieldCardinality notGroupElementTypeIdent identifier compactOptions semicolons {
+ semis, extra := protolex.(*protoLex).requireSemicolon($5)
+ $$ = newNodeWithRunes(ast.NewFieldNode($1.ToKeyword(), $2, $3, nil, nil, $4, semis), extra...)
+ }
+ | msgElementTypeIdent identifier semicolons {
+ semis, extra := protolex.(*protoLex).requireSemicolon($3)
+ $$ = newNodeWithRunes(ast.NewFieldNode(nil, $1, $2, nil, nil, nil, semis), extra...)
+ }
+ | msgElementTypeIdent identifier compactOptions semicolons {
+ semis, extra := protolex.(*protoLex).requireSemicolon($4)
+ $$ = newNodeWithRunes(ast.NewFieldNode(nil, $1, $2, nil, nil, $3, semis), extra...)
+ }
+
+
+extensionDecl : _EXTEND typeName '{' extensionBody '}' semicolons {
+ $$ = newNodeWithRunes(ast.NewExtendNode($1.ToKeyword(), $2, $3, $4, $5), $6...)
+ }
+
+extensionBody : {
+ $$ = nil
+ }
+ | extensionElements
+
+extensionElements : extensionElements extensionElement {
+ if $2 != nil {
+ $$ = append($1, $2)
+ } else {
+ $$ = $1
+ }
+ }
+ | extensionElement {
+ if $1 != nil {
+ $$ = []ast.ExtendElement{$1}
+ } else {
+ $$ = nil
+ }
+ }
+
+extensionElement : extensionFieldDecl {
+ $$ = $1
+ }
+ | groupDecl {
+ $$ = $1
+ }
+ | error ';' {
+ $$ = nil
+ }
+ | error {
+ $$ = nil
+ }
+
+extensionFieldDecl : fieldCardinality notGroupElementTypeIdent identifier '=' _INT_LIT semicolon {
+ $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, nil, $6)
+ }
+ | fieldCardinality notGroupElementTypeIdent identifier '=' _INT_LIT compactOptions semicolon {
+ $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, $6, $7)
+ }
+ | extElementTypeIdent identifier '=' _INT_LIT semicolon {
+ $$ = ast.NewFieldNode(nil, $1, $2, $3, $4, nil, $5)
+ }
+ | extElementTypeIdent identifier '=' _INT_LIT compactOptions semicolon {
+ $$ = ast.NewFieldNode(nil, $1, $2, $3, $4, $5, $6)
+ }
+
+serviceDecl : _SERVICE identifier '{' serviceBody '}' semicolons {
+ $$ = newNodeWithRunes(ast.NewServiceNode($1.ToKeyword(), $2, $3, $4, $5), $6...)
+ }
+
+serviceBody : semicolons {
+ $$ = prependRunes(toServiceElement, $1, nil)
+ }
+ | semicolons serviceElements {
+ $$ = prependRunes(toServiceElement, $1, $2)
+ }
+
+serviceElements : serviceElements serviceElement {
+ $$ = append($1, $2...)
+ }
+ | serviceElement {
+ $$ = $1
+ }
+
+// NB: doc suggests support for "stream" declaration, separate from "rpc", but
+// it does not appear to be supported in protoc (doc is likely from grammar for
+// Google-internal version of protoc, with support for streaming stubby)
+serviceElement : optionDecl {
+ $$ = toElements[ast.ServiceElement](toServiceElement, $1.Node, $1.Runes)
+ }
+ | methodDecl {
+ $$ = toElements[ast.ServiceElement](toServiceElement, $1.Node, $1.Runes)
+ }
+ | error {
+ $$ = nil
+ }
+
+methodDecl : _RPC identifier methodMessageType _RETURNS methodMessageType semicolons {
+ semi, extra := protolex.(*protoLex).requireSemicolon($6)
+ $$ = newNodeWithRunes(ast.NewRPCNode($1.ToKeyword(), $2, $3, $4.ToKeyword(), $5, semi), extra...)
+ }
+ | _RPC identifier methodMessageType _RETURNS methodMessageType '{' methodBody '}' semicolons {
+ $$ = newNodeWithRunes(ast.NewRPCNodeWithBody($1.ToKeyword(), $2, $3, $4.ToKeyword(), $5, $6, $7, $8), $9...)
+ }
+
+methodMessageType : '(' _STREAM typeName ')' {
+ $$ = ast.NewRPCTypeNode($1, $2.ToKeyword(), $3, $4)
+ }
+ | '(' mtdElementTypeIdent ')' {
+ $$ = ast.NewRPCTypeNode($1, nil, $2, $3)
+ }
+
+methodBody : semicolons {
+ $$ = prependRunes(toMethodElement, $1, nil)
+ }
+ | semicolons methodElements {
+ $$ = prependRunes(toMethodElement, $1, $2)
+ }
+
+methodElements : methodElements methodElement {
+ $$ = append($1, $2...)
+ }
+ | methodElement {
+ $$ = $1
+ }
+
+methodElement : optionDecl {
+ $$ = toElements[ast.RPCElement](toMethodElement, $1.Node, $1.Runes)
+ }
+ | error {
+ $$ = nil
+ }
+
+// excludes message, enum, oneof, extensions, reserved, extend,
+// option, group, optional, required, and repeated
+msgElementName : _NAME
+ | _SYNTAX
+ | _EDITION
+ | _IMPORT
+ | _WEAK
+ | _PUBLIC
+ | _PACKAGE
+ | _TRUE
+ | _FALSE
+ | _INF
+ | _NAN
+ | _DOUBLE
+ | _FLOAT
+ | _INT32
+ | _INT64
+ | _UINT32
+ | _UINT64
+ | _SINT32
+ | _SINT64
+ | _FIXED32
+ | _FIXED64
+ | _SFIXED32
+ | _SFIXED64
+ | _BOOL
+ | _STRING
+ | _BYTES
+ | _MAP
+ | _TO
+ | _MAX
+ | _SERVICE
+ | _RPC
+ | _STREAM
+ | _RETURNS
+
+// excludes group, optional, required, and repeated
+extElementName : _NAME
+ | _SYNTAX
+ | _EDITION
+ | _IMPORT
+ | _WEAK
+ | _PUBLIC
+ | _PACKAGE
+ | _OPTION
+ | _TRUE
+ | _FALSE
+ | _INF
+ | _NAN
+ | _DOUBLE
+ | _FLOAT
+ | _INT32
+ | _INT64
+ | _UINT32
+ | _UINT64
+ | _SINT32
+ | _SINT64
+ | _FIXED32
+ | _FIXED64
+ | _SFIXED32
+ | _SFIXED64
+ | _BOOL
+ | _STRING
+ | _BYTES
+ | _ONEOF
+ | _MAP
+ | _EXTENSIONS
+ | _TO
+ | _MAX
+ | _RESERVED
+ | _ENUM
+ | _MESSAGE
+ | _EXTEND
+ | _SERVICE
+ | _RPC
+ | _STREAM
+ | _RETURNS
+
+// excludes reserved, option
+enumValueName : _NAME
+ | _SYNTAX
+ | _EDITION
+ | _IMPORT
+ | _WEAK
+ | _PUBLIC
+ | _PACKAGE
+ | _TRUE
+ | _FALSE
+ | _INF
+ | _NAN
+ | _REPEATED
+ | _OPTIONAL
+ | _REQUIRED
+ | _DOUBLE
+ | _FLOAT
+ | _INT32
+ | _INT64
+ | _UINT32
+ | _UINT64
+ | _SINT32
+ | _SINT64
+ | _FIXED32
+ | _FIXED64
+ | _SFIXED32
+ | _SFIXED64
+ | _BOOL
+ | _STRING
+ | _BYTES
+ | _GROUP
+ | _ONEOF
+ | _MAP
+ | _EXTENSIONS
+ | _TO
+ | _MAX
+ | _ENUM
+ | _MESSAGE
+ | _EXTEND
+ | _SERVICE
+ | _RPC
+ | _STREAM
+ | _RETURNS
+
+// excludes group, option, optional, required, and repeated
+oneofElementName : _NAME
+ | _SYNTAX
+ | _EDITION
+ | _IMPORT
+ | _WEAK
+ | _PUBLIC
+ | _PACKAGE
+ | _TRUE
+ | _FALSE
+ | _INF
+ | _NAN
+ | _DOUBLE
+ | _FLOAT
+ | _INT32
+ | _INT64
+ | _UINT32
+ | _UINT64
+ | _SINT32
+ | _SINT64
+ | _FIXED32
+ | _FIXED64
+ | _SFIXED32
+ | _SFIXED64
+ | _BOOL
+ | _STRING
+ | _BYTES
+ | _ONEOF
+ | _MAP
+ | _EXTENSIONS
+ | _TO
+ | _MAX
+ | _RESERVED
+ | _ENUM
+ | _MESSAGE
+ | _EXTEND
+ | _SERVICE
+ | _RPC
+ | _STREAM
+ | _RETURNS
+
+// excludes group
+notGroupElementName : _NAME
+ | _SYNTAX
+ | _EDITION
+ | _IMPORT
+ | _WEAK
+ | _PUBLIC
+ | _PACKAGE
+ | _OPTION
+ | _TRUE
+ | _FALSE
+ | _INF
+ | _NAN
+ | _REPEATED
+ | _OPTIONAL
+ | _REQUIRED
+ | _DOUBLE
+ | _FLOAT
+ | _INT32
+ | _INT64
+ | _UINT32
+ | _UINT64
+ | _SINT32
+ | _SINT64
+ | _FIXED32
+ | _FIXED64
+ | _SFIXED32
+ | _SFIXED64
+ | _BOOL
+ | _STRING
+ | _BYTES
+ | _ONEOF
+ | _MAP
+ | _EXTENSIONS
+ | _TO
+ | _MAX
+ | _RESERVED
+ | _ENUM
+ | _MESSAGE
+ | _EXTEND
+ | _SERVICE
+ | _RPC
+ | _STREAM
+ | _RETURNS
+
+// excludes stream
+mtdElementName : _NAME
+ | _SYNTAX
+ | _EDITION
+ | _IMPORT
+ | _WEAK
+ | _PUBLIC
+ | _PACKAGE
+ | _OPTION
+ | _TRUE
+ | _FALSE
+ | _INF
+ | _NAN
+ | _REPEATED
+ | _OPTIONAL
+ | _REQUIRED
+ | _DOUBLE
+ | _FLOAT
+ | _INT32
+ | _INT64
+ | _UINT32
+ | _UINT64
+ | _SINT32
+ | _SINT64
+ | _FIXED32
+ | _FIXED64
+ | _SFIXED32
+ | _SFIXED64
+ | _BOOL
+ | _STRING
+ | _BYTES
+ | _GROUP
+ | _ONEOF
+ | _MAP
+ | _EXTENSIONS
+ | _TO
+ | _MAX
+ | _RESERVED
+ | _ENUM
+ | _MESSAGE
+ | _EXTEND
+ | _SERVICE
+ | _RPC
+ | _RETURNS
+
+identifier : _NAME
+ | _SYNTAX
+ | _EDITION
+ | _IMPORT
+ | _WEAK
+ | _PUBLIC
+ | _PACKAGE
+ | _OPTION
+ | _TRUE
+ | _FALSE
+ | _INF
+ | _NAN
+ | _REPEATED
+ | _OPTIONAL
+ | _REQUIRED
+ | _DOUBLE
+ | _FLOAT
+ | _INT32
+ | _INT64
+ | _UINT32
+ | _UINT64
+ | _SINT32
+ | _SINT64
+ | _FIXED32
+ | _FIXED64
+ | _SFIXED32
+ | _SFIXED64
+ | _BOOL
+ | _STRING
+ | _BYTES
+ | _GROUP
+ | _ONEOF
+ | _MAP
+ | _EXTENSIONS
+ | _TO
+ | _MAX
+ | _RESERVED
+ | _ENUM
+ | _MESSAGE
+ | _EXTEND
+ | _SERVICE
+ | _RPC
+ | _STREAM
+ | _RETURNS
+
+%%
diff --git a/vendor/github.com/bufbuild/protocompile/parser/proto.y.go b/vendor/github.com/bufbuild/protocompile/parser/proto.y.go
new file mode 100644
index 0000000..048e5cc
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/parser/proto.y.go
@@ -0,0 +1,2659 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Code generated by goyacc -o proto.y.go -l -p proto proto.y. DO NOT EDIT.
+package parser
+
+import __yyfmt__ "fmt"
+
+//lint:file-ignore SA4006 generated parser has unused values
+
+import (
+ "math"
+ "strings"
+
+ "github.com/bufbuild/protocompile/ast"
+)
+
+type protoSymType struct {
+ yys int
+ file *ast.FileNode
+ syn *ast.SyntaxNode
+ ed *ast.EditionNode
+ fileElements []ast.FileElement
+ pkg nodeWithRunes[*ast.PackageNode]
+ imprt nodeWithRunes[*ast.ImportNode]
+ msg nodeWithRunes[*ast.MessageNode]
+ msgElements []ast.MessageElement
+ fld *ast.FieldNode
+ msgFld nodeWithRunes[*ast.FieldNode]
+ mapFld nodeWithRunes[*ast.MapFieldNode]
+ mapType *ast.MapTypeNode
+ grp *ast.GroupNode
+ msgGrp nodeWithRunes[*ast.GroupNode]
+ oo nodeWithRunes[*ast.OneofNode]
+ ooElement ast.OneofElement
+ ooElements []ast.OneofElement
+ ext nodeWithRunes[*ast.ExtensionRangeNode]
+ resvd nodeWithRunes[*ast.ReservedNode]
+ en nodeWithRunes[*ast.EnumNode]
+ enElements []ast.EnumElement
+ env nodeWithRunes[*ast.EnumValueNode]
+ extend nodeWithRunes[*ast.ExtendNode]
+ extElement ast.ExtendElement
+ extElements []ast.ExtendElement
+ svc nodeWithRunes[*ast.ServiceNode]
+ svcElements []ast.ServiceElement
+ mtd nodeWithRunes[*ast.RPCNode]
+ mtdMsgType *ast.RPCTypeNode
+ mtdElements []ast.RPCElement
+ optRaw *ast.OptionNode
+ opt nodeWithRunes[*ast.OptionNode]
+ opts *compactOptionSlices
+ refRaw *ast.FieldReferenceNode
+ ref nodeWithRunes[*ast.FieldReferenceNode]
+ optNms *fieldRefSlices
+ cmpctOpts *ast.CompactOptionsNode
+ rng *ast.RangeNode
+ rngs *rangeSlices
+ names *nameSlices
+ cidPart nodeWithRunes[*ast.IdentNode]
+ cid *identSlices
+ tid ast.IdentValueNode
+ sl *valueSlices
+ msgLitFlds *messageFieldList
+ msgLitFld *ast.MessageFieldNode
+ v ast.ValueNode
+ il ast.IntValueNode
+ str []*ast.StringLiteralNode
+ s *ast.StringLiteralNode
+ i *ast.UintLiteralNode
+ f *ast.FloatLiteralNode
+ id *ast.IdentNode
+ b *ast.RuneNode
+ bs []*ast.RuneNode
+ err error
+}
+
+const _STRING_LIT = 57346
+const _INT_LIT = 57347
+const _FLOAT_LIT = 57348
+const _NAME = 57349
+const _SYNTAX = 57350
+const _EDITION = 57351
+const _IMPORT = 57352
+const _WEAK = 57353
+const _PUBLIC = 57354
+const _PACKAGE = 57355
+const _OPTION = 57356
+const _TRUE = 57357
+const _FALSE = 57358
+const _INF = 57359
+const _NAN = 57360
+const _REPEATED = 57361
+const _OPTIONAL = 57362
+const _REQUIRED = 57363
+const _DOUBLE = 57364
+const _FLOAT = 57365
+const _INT32 = 57366
+const _INT64 = 57367
+const _UINT32 = 57368
+const _UINT64 = 57369
+const _SINT32 = 57370
+const _SINT64 = 57371
+const _FIXED32 = 57372
+const _FIXED64 = 57373
+const _SFIXED32 = 57374
+const _SFIXED64 = 57375
+const _BOOL = 57376
+const _STRING = 57377
+const _BYTES = 57378
+const _GROUP = 57379
+const _ONEOF = 57380
+const _MAP = 57381
+const _EXTENSIONS = 57382
+const _TO = 57383
+const _MAX = 57384
+const _RESERVED = 57385
+const _ENUM = 57386
+const _MESSAGE = 57387
+const _EXTEND = 57388
+const _SERVICE = 57389
+const _RPC = 57390
+const _STREAM = 57391
+const _RETURNS = 57392
+const _ERROR = 57393
+
+var protoToknames = [...]string{
+ "$end",
+ "error",
+ "$unk",
+ "_STRING_LIT",
+ "_INT_LIT",
+ "_FLOAT_LIT",
+ "_NAME",
+ "_SYNTAX",
+ "_EDITION",
+ "_IMPORT",
+ "_WEAK",
+ "_PUBLIC",
+ "_PACKAGE",
+ "_OPTION",
+ "_TRUE",
+ "_FALSE",
+ "_INF",
+ "_NAN",
+ "_REPEATED",
+ "_OPTIONAL",
+ "_REQUIRED",
+ "_DOUBLE",
+ "_FLOAT",
+ "_INT32",
+ "_INT64",
+ "_UINT32",
+ "_UINT64",
+ "_SINT32",
+ "_SINT64",
+ "_FIXED32",
+ "_FIXED64",
+ "_SFIXED32",
+ "_SFIXED64",
+ "_BOOL",
+ "_STRING",
+ "_BYTES",
+ "_GROUP",
+ "_ONEOF",
+ "_MAP",
+ "_EXTENSIONS",
+ "_TO",
+ "_MAX",
+ "_RESERVED",
+ "_ENUM",
+ "_MESSAGE",
+ "_EXTEND",
+ "_SERVICE",
+ "_RPC",
+ "_STREAM",
+ "_RETURNS",
+ "_ERROR",
+ "'='",
+ "';'",
+ "':'",
+ "'{'",
+ "'}'",
+ "'\\\\'",
+ "'/'",
+ "'?'",
+ "'.'",
+ "','",
+ "'>'",
+ "'<'",
+ "'+'",
+ "'-'",
+ "'('",
+ "')'",
+ "'['",
+ "']'",
+ "'*'",
+ "'&'",
+ "'^'",
+ "'%'",
+ "'$'",
+ "'#'",
+ "'@'",
+ "'!'",
+ "'~'",
+ "'`'",
+}
+
+var protoStatenames = [...]string{}
+
+const protoEofCode = 1
+const protoErrCode = 2
+const protoInitialStackSize = 16
+
+var protoExca = [...]int16{
+ -1, 0,
+ 1, 6,
+ -2, 21,
+ -1, 1,
+ 1, -1,
+ -2, 0,
+ -1, 2,
+ 1, 1,
+ -2, 21,
+ -1, 3,
+ 1, 2,
+ -2, 21,
+ -1, 14,
+ 1, 7,
+ -2, 0,
+ -1, 89,
+ 52, 60,
+ 61, 60,
+ 69, 60,
+ -2, 61,
+ -1, 101,
+ 55, 37,
+ 58, 37,
+ 62, 37,
+ 67, 37,
+ 69, 37,
+ -2, 34,
+ -1, 112,
+ 52, 60,
+ 61, 60,
+ 69, 60,
+ -2, 62,
+ -1, 118,
+ 56, 249,
+ -2, 0,
+ -1, 121,
+ 55, 37,
+ 58, 37,
+ 62, 37,
+ 67, 37,
+ 69, 37,
+ -2, 35,
+ -1, 140,
+ 56, 225,
+ -2, 0,
+ -1, 142,
+ 56, 214,
+ -2, 0,
+ -1, 144,
+ 56, 250,
+ -2, 0,
+ -1, 198,
+ 56, 262,
+ -2, 0,
+ -1, 203,
+ 56, 83,
+ 62, 83,
+ -2, 0,
+ -1, 214,
+ 56, 226,
+ -2, 0,
+ -1, 271,
+ 56, 215,
+ -2, 0,
+ -1, 377,
+ 56, 263,
+ -2, 0,
+ -1, 464,
+ 56, 155,
+ -2, 0,
+ -1, 523,
+ 69, 144,
+ -2, 141,
+ -1, 531,
+ 56, 156,
+ -2, 0,
+ -1, 607,
+ 67, 52,
+ -2, 49,
+ -1, 665,
+ 69, 144,
+ -2, 142,
+ -1, 690,
+ 67, 52,
+ -2, 50,
+ -1, 732,
+ 56, 273,
+ -2, 0,
+ -1, 745,
+ 56, 274,
+ -2, 0,
+}
+
+const protoPrivate = 57344
+
+const protoLast = 2053
+
+var protoAct = [...]int16{
+ 140, 7, 746, 7, 7, 100, 139, 18, 440, 394,
+ 604, 436, 607, 439, 502, 39, 524, 596, 95, 532,
+ 496, 127, 437, 422, 520, 200, 32, 34, 523, 233,
+ 421, 40, 90, 93, 94, 405, 102, 106, 36, 96,
+ 109, 435, 272, 85, 378, 458, 326, 404, 21, 20,
+ 19, 107, 108, 149, 215, 202, 145, 98, 101, 86,
+ 663, 89, 449, 390, 134, 706, 703, 598, 707, 513,
+ 9, 652, 395, 510, 465, 9, 511, 396, 717, 651,
+ 507, 459, 459, 460, 452, 459, 456, 9, 506, 459,
+ 459, 462, 739, 90, 693, 451, 655, 598, 459, 9,
+ 680, 653, 459, 687, 508, 459, 423, 459, 124, 125,
+ 453, 115, 459, 459, 459, 134, 126, 133, 142, 138,
+ 131, 129, 497, 395, 198, 130, 423, 134, 199, 448,
+ 416, 388, 389, 711, 489, 395, 505, 119, 9, 387,
+ 207, 666, 488, 593, 9, 468, 472, 113, 222, 112,
+ 273, 386, 470, 462, 587, 9, 373, 120, 121, 385,
+ 110, 40, 110, 691, 674, 428, 424, 414, 374, 122,
+ 114, 375, 279, 760, 758, 754, 750, 104, 744, 743,
+ 741, 733, 729, 721, 695, 9, 424, 716, 753, 219,
+ 217, 218, 668, 383, 227, 376, 322, 270, 213, 728,
+ 719, 323, 713, 658, 464, 123, 379, 118, 117, 207,
+ 116, 5, 6, 104, 399, 9, 598, 104, 670, 324,
+ 31, 702, 222, 667, 493, 490, 9, 492, 430, 392,
+ 419, 111, 13, 12, 403, 599, 407, 408, 413, 528,
+ 463, 40, 381, 748, 726, 8, 412, 724, 397, 659,
+ 33, 415, 15, 656, 26, 26, 9, 37, 38, 384,
+ 210, 209, 105, 219, 217, 218, 103, 35, 227, 400,
+ 595, 417, 211, 212, 402, 23, 529, 594, 104, 273,
+ 409, 582, 406, 24, 413, 516, 25, 26, 382, 495,
+ 491, 4, 412, 33, 10, 11, 731, 745, 380, 197,
+ 377, 279, 475, 476, 477, 478, 479, 480, 481, 482,
+ 483, 484, 485, 486, 418, 22, 143, 28, 27, 29,
+ 30, 144, 274, 425, 141, 271, 220, 420, 275, 225,
+ 411, 426, 427, 410, 40, 530, 531, 214, 231, 224,
+ 221, 535, 147, 223, 429, 146, 534, 216, 204, 203,
+ 447, 499, 601, 538, 150, 228, 605, 99, 602, 327,
+ 540, 154, 234, 277, 606, 329, 542, 156, 237, 474,
+ 391, 393, 438, 132, 128, 87, 88, 432, 206, 91,
+ 431, 521, 518, 533, 522, 379, 17, 16, 434, 14,
+ 3, 2, 1, 0, 0, 442, 442, 0, 0, 0,
+ 0, 207, 0, 0, 457, 0, 0, 454, 455, 466,
+ 0, 469, 471, 0, 0, 0, 0, 0, 0, 450,
+ 473, 445, 433, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 444, 0, 494, 0, 0, 0, 0, 0,
+ 0, 0, 0, 487, 0, 0, 0, 498, 0, 442,
+ 461, 0, 0, 0, 467, 503, 514, 0, 0, 517,
+ 0, 525, 526, 0, 0, 90, 504, 0, 583, 584,
+ 0, 0, 0, 0, 0, 0, 0, 0, 586, 0,
+ 0, 0, 0, 0, 585, 0, 0, 0, 588, 0,
+ 591, 0, 509, 0, 0, 0, 0, 0, 527, 0,
+ 512, 515, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 592, 0, 660, 661, 657, 590,
+ 0, 0, 0, 0, 0, 0, 0, 90, 0, 0,
+ 654, 0, 0, 589, 0, 0, 0, 0, 0, 0,
+ 0, 597, 0, 90, 672, 673, 664, 40, 0, 0,
+ 665, 669, 0, 0, 671, 0, 0, 675, 0, 0,
+ 0, 0, 662, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 676, 0, 0, 0, 0, 0, 0,
+ 679, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 678, 0, 0, 0,
+ 0, 0, 0, 682, 0, 684, 689, 0, 690, 686,
+ 685, 0, 0, 0, 0, 0, 0, 0, 677, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 681, 683, 0, 688, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 442, 0,
+ 697, 0, 0, 699, 503, 696, 0, 692, 0, 701,
+ 0, 0, 0, 133, 0, 504, 131, 129, 710, 0,
+ 709, 130, 0, 0, 0, 0, 715, 712, 0, 700,
+ 704, 0, 0, 0, 0, 0, 720, 0, 0, 722,
+ 718, 714, 694, 0, 0, 698, 0, 0, 133, 0,
+ 0, 131, 129, 0, 727, 0, 130, 732, 705, 708,
+ 730, 0, 735, 725, 723, 0, 734, 0, 0, 0,
+ 0, 0, 0, 0, 749, 742, 0, 0, 0, 0,
+ 747, 736, 737, 0, 0, 755, 752, 0, 756, 0,
+ 0, 757, 0, 747, 0, 0, 751, 0, 0, 0,
+ 759, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 738, 501, 740, 33, 137, 135, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 0, 0, 0, 0, 134, 0, 0, 0, 0, 0,
+ 0, 0, 395, 0, 441, 0, 0, 0, 500, 33,
+ 137, 135, 41, 42, 43, 44, 45, 46, 47, 48,
+ 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
+ 59, 60, 61, 62, 63, 64, 65, 66, 67, 68,
+ 69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
+ 79, 80, 81, 82, 83, 84, 0, 0, 0, 0,
+ 134, 0, 0, 0, 0, 0, 0, 0, 395, 0,
+ 441, 0, 0, 443, 33, 137, 135, 41, 42, 43,
+ 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+ 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+ 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+ 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
+ 84, 0, 0, 0, 0, 134, 0, 0, 0, 0,
+ 0, 0, 0, 395, 0, 441, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 205, 92, 0, 0, 519, 41,
+ 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+ 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
+ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
+ 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
+ 82, 83, 84, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 446, 0, 205, 0, 0, 0,
+ 208, 41, 42, 43, 44, 45, 46, 47, 48, 49,
+ 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 0, 0, 0, 0, 0,
+ 201, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 208, 33, 137, 135, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 0, 0, 0, 0, 134, 0, 0, 0, 0, 0,
+ 205, 0, 0, 0, 136, 41, 42, 43, 44, 45,
+ 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
+ 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
+ 66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
+ 76, 77, 78, 79, 80, 81, 82, 83, 84, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 33, 423, 208, 41, 42, 43,
+ 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+ 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+ 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+ 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
+ 84, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 424, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 92, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 97,
+ 608, 609, 610, 611, 612, 613, 614, 615, 616, 617,
+ 618, 619, 620, 621, 622, 623, 624, 625, 626, 627,
+ 628, 629, 630, 631, 632, 633, 634, 635, 636, 637,
+ 638, 639, 640, 641, 642, 643, 644, 645, 646, 647,
+ 648, 649, 600, 650, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 603, 330, 331, 332, 333, 334, 335,
+ 336, 337, 338, 339, 340, 341, 342, 343, 344, 345,
+ 346, 347, 348, 349, 350, 351, 352, 353, 354, 355,
+ 356, 357, 358, 359, 401, 360, 361, 362, 363, 364,
+ 365, 366, 367, 368, 369, 370, 371, 372, 0, 0,
+ 0, 0, 0, 226, 0, 0, 0, 328, 238, 239,
+ 240, 241, 242, 243, 244, 26, 245, 246, 247, 248,
+ 153, 152, 151, 249, 250, 251, 252, 253, 254, 255,
+ 256, 257, 258, 259, 260, 261, 262, 263, 0, 230,
+ 236, 229, 264, 265, 232, 28, 27, 29, 266, 267,
+ 268, 269, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 235, 330, 331, 332, 333, 334, 335, 336, 337,
+ 338, 339, 340, 341, 342, 343, 344, 345, 346, 347,
+ 348, 349, 350, 351, 352, 353, 354, 355, 356, 357,
+ 358, 359, 325, 360, 361, 362, 363, 364, 365, 366,
+ 367, 368, 369, 370, 371, 372, 0, 0, 0, 0,
+ 0, 148, 0, 0, 0, 328, 157, 158, 159, 160,
+ 161, 162, 163, 164, 165, 166, 167, 168, 153, 152,
+ 151, 169, 170, 171, 172, 173, 174, 175, 176, 177,
+ 178, 179, 180, 181, 182, 183, 0, 184, 185, 186,
+ 187, 188, 189, 190, 191, 192, 193, 194, 195, 196,
+ 0, 0, 0, 0, 0, 536, 0, 0, 0, 155,
+ 543, 544, 545, 546, 547, 548, 549, 537, 550, 551,
+ 552, 553, 0, 0, 0, 554, 555, 556, 557, 558,
+ 559, 560, 561, 562, 563, 564, 565, 566, 567, 568,
+ 539, 569, 570, 571, 572, 573, 574, 575, 576, 577,
+ 578, 579, 580, 581, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 541, 210, 209, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 33, 406, 0, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+ 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
+ 78, 79, 80, 81, 82, 83, 84, 276, 0, 0,
+ 0, 0, 280, 281, 282, 283, 284, 285, 286, 26,
+ 287, 288, 289, 290, 291, 292, 293, 294, 295, 296,
+ 297, 298, 299, 300, 301, 302, 303, 304, 305, 306,
+ 307, 308, 309, 310, 311, 312, 313, 314, 278, 315,
+ 316, 317, 318, 319, 320, 321, 398, 0, 0, 0,
+ 0, 41, 42, 43, 44, 45, 46, 47, 48, 49,
+ 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 41, 42, 43, 44, 45,
+ 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
+ 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
+ 66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
+ 76, 77, 78, 79, 80, 81, 82, 83, 84, 608,
+ 609, 610, 611, 612, 613, 614, 615, 616, 617, 618,
+ 619, 620, 621, 622, 623, 624, 625, 626, 627, 628,
+ 629, 630, 631, 632, 633, 634, 635, 636, 637, 638,
+ 639, 640, 641, 642, 643, 644, 645, 646, 647, 648,
+ 649, 0, 650,
+}
+
+var protoPact = [...]int16{
+ 203, -1000, 162, 162, -1000, 181, 180, 273, 167, -1000,
+ -1000, -1000, 289, 289, 273, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, 246, 1958, 1329, 1958, 1958, 1389,
+ 1958, -1000, 213, -1000, 209, -1000, 173, 289, 289, 102,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, 179, -1000, 1329, 110, -1000,
+ -1000, -1000, 1389, 155, 153, 152, -1000, 1958, -1000, 1958,
+ 109, -1000, 150, -1000, -1000, -1000, -1000, 173, 173, -1000,
+ 1958, 1149, -1000, -1000, -1000, 52, 162, 162, 1659, -1000,
+ -1000, -1000, -1000, 162, -1000, -1000, -1000, 162, -1000, -1000,
+ 274, -1000, -1000, -1000, 1084, -1000, 255, -1000, -1000, 142,
+ 1551, 141, 1865, 140, 1659, -1000, -1000, -1000, 166, 1605,
+ 1958, -1000, -1000, -1000, 108, 1958, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, 139, 240, -1000,
+ 137, -1000, -1000, 1208, 98, 78, 9, -1000, 1914, -1000,
+ -1000, -1000, -1000, 162, 1551, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, 1497, 1958, 277,
+ 1958, 1958, 1816, -1000, 107, 1958, 67, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ 162, 1865, -1000, -1000, -1000, -1000, -1000, 178, 1270, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, 162, -1000, -1000, 1958, 1958, 105, 1958, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, 176, 1958, 100, 162, 240, -1000, -1000,
+ -1000, -1000, 1958, -1000, -1000, -1000, -1000, -1000, -1000, 835,
+ 835, -1000, -1000, -1000, -1000, 1022, 60, 26, 41, -1000,
+ -1000, 1958, 1958, 34, 30, -1000, 199, 149, 22, 92,
+ 91, 85, 274, -1000, 1958, 100, 278, -1000, -1000, 121,
+ 81, -1000, 184, -1000, 285, -1000, 175, 172, 1958, 100,
+ 284, -1000, -1000, -1000, 56, -1000, -1000, -1000, -1000, 274,
+ -1000, 1769, -1000, 769, -1000, 74, -1000, 19, -1000, 35,
+ -1000, -1000, 1958, -1000, 21, 17, 280, -1000, 162, 959,
+ 162, 162, 277, 234, 1713, 276, -1000, 162, 162, -1000,
+ 289, -1000, 1958, -1000, 93, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, 46, 121, 162,
+ 101, -1000, 272, 265, -1000, 44, 185, 1443, -1000, 10,
+ -1000, 32, -1000, -1000, -1000, -1000, -1000, 72, -1000, 27,
+ 248, 162, 148, 244, -1000, 162, 46, -1000, -9, -1000,
+ -1000, 1329, 80, -1000, 171, -1000, -1000, -1000, -1000, -1000,
+ 136, 1713, -1000, -1000, -1000, -1000, 165, 1329, 1958, 1958,
+ 104, 1958, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, 46, -1000, -1000, 274, -1000, 1389, -1000, 162,
+ -1000, -1000, -1000, -1000, 45, 44, -1000, 163, -1000, 56,
+ 1389, 36, -1000, 1958, -1000, 2002, 103, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, 900, -1000, -1000, -1000, 39, 128, 162, 46,
+ -1000, -1000, 162, -1000, -1000, -1000, -1000, 1149, 162, -1000,
+ -1000, 169, 14, 13, 1958, 100, -1000, 162, 71, -1000,
+ 162, 147, -1000, 163, -1000, 132, 11, -1000, -1000, -1000,
+ -1000, -1000, -1000, 162, 145, 162, 127, -1000, 162, -1000,
+ -1000, -1000, 1149, 242, -1000, 163, 239, 162, 144, -1000,
+ -1000, -1000, 126, 162, -1000, -1000, 162, -1000, 125, 162,
+ -1000, 162, -1000, 163, 44, -1000, 37, 124, 162, -1000,
+ 123, 122, 241, 162, 120, -1000, -1000, -1000, 163, 162,
+ 133, -1000, 119, -1000, 162, 241, -1000, -1000, -1000, -1000,
+ 162, -1000, 118, 162, -1000, -1000, -1000, -1000, -1000, 117,
+ -1000,
+}
+
+var protoPgo = [...]int16{
+ 0, 392, 391, 390, 291, 252, 389, 387, 386, 384,
+ 383, 7, 28, 24, 382, 381, 379, 378, 376, 61,
+ 59, 16, 375, 45, 41, 21, 374, 11, 9, 22,
+ 8, 373, 372, 14, 371, 370, 23, 5, 369, 368,
+ 367, 366, 365, 364, 363, 53, 58, 57, 12, 10,
+ 15, 362, 361, 360, 359, 358, 39, 357, 356, 18,
+ 355, 354, 353, 46, 352, 351, 350, 349, 55, 25,
+ 348, 347, 346, 345, 343, 342, 341, 340, 339, 338,
+ 50, 54, 337, 6, 19, 336, 335, 333, 330, 329,
+ 328, 29, 35, 30, 47, 327, 326, 49, 42, 325,
+ 324, 322, 48, 56, 321, 316, 13, 315, 44, 300,
+ 299, 298, 2, 297, 296, 20, 17, 0, 245,
+}
+
+var protoR1 = [...]int8{
+ 0, 1, 1, 1, 1, 1, 1, 4, 6, 6,
+ 5, 5, 5, 5, 5, 5, 5, 5, 118, 118,
+ 117, 117, 116, 116, 2, 3, 7, 7, 7, 8,
+ 50, 50, 56, 56, 57, 57, 47, 47, 46, 51,
+ 51, 52, 52, 53, 53, 54, 54, 55, 55, 58,
+ 58, 49, 49, 48, 10, 11, 18, 18, 19, 20,
+ 20, 22, 22, 21, 21, 16, 25, 25, 26, 26,
+ 26, 26, 30, 30, 30, 30, 31, 31, 106, 106,
+ 28, 28, 69, 68, 68, 67, 67, 67, 67, 67,
+ 67, 70, 70, 70, 17, 17, 17, 17, 24, 24,
+ 24, 27, 27, 27, 27, 35, 35, 29, 29, 29,
+ 32, 32, 32, 65, 65, 33, 33, 34, 34, 34,
+ 66, 66, 59, 59, 60, 60, 61, 61, 62, 62,
+ 63, 63, 64, 64, 45, 45, 45, 23, 23, 14,
+ 14, 15, 15, 13, 13, 12, 9, 9, 75, 75,
+ 77, 77, 77, 77, 74, 86, 86, 85, 85, 84,
+ 84, 84, 84, 84, 72, 72, 72, 72, 76, 76,
+ 76, 76, 78, 78, 78, 78, 79, 38, 38, 38,
+ 38, 38, 38, 38, 38, 38, 38, 38, 38, 96,
+ 96, 94, 94, 92, 92, 92, 95, 95, 93, 93,
+ 93, 36, 36, 89, 89, 90, 90, 91, 91, 87,
+ 87, 88, 88, 97, 100, 100, 99, 99, 98, 98,
+ 98, 98, 101, 101, 80, 83, 83, 82, 82, 81,
+ 81, 81, 81, 81, 81, 81, 81, 81, 81, 81,
+ 71, 71, 71, 71, 71, 71, 71, 71, 102, 105,
+ 105, 104, 104, 103, 103, 103, 103, 73, 73, 73,
+ 73, 107, 110, 110, 109, 109, 108, 108, 108, 111,
+ 111, 115, 115, 114, 114, 113, 113, 112, 112, 39,
+ 39, 39, 39, 39, 39, 39, 39, 39, 39, 39,
+ 39, 39, 39, 39, 39, 39, 39, 39, 39, 39,
+ 39, 39, 39, 39, 39, 39, 39, 39, 39, 39,
+ 39, 39, 40, 40, 40, 40, 40, 40, 40, 40,
+ 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
+ 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
+ 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
+ 40, 40, 44, 44, 44, 44, 44, 44, 44, 44,
+ 44, 44, 44, 44, 44, 44, 44, 44, 44, 44,
+ 44, 44, 44, 44, 44, 44, 44, 44, 44, 44,
+ 44, 44, 44, 44, 44, 44, 44, 44, 44, 44,
+ 44, 44, 44, 44, 41, 41, 41, 41, 41, 41,
+ 41, 41, 41, 41, 41, 41, 41, 41, 41, 41,
+ 41, 41, 41, 41, 41, 41, 41, 41, 41, 41,
+ 41, 41, 41, 41, 41, 41, 41, 41, 41, 41,
+ 41, 41, 41, 42, 42, 42, 42, 42, 42, 42,
+ 42, 42, 42, 42, 42, 42, 42, 42, 42, 42,
+ 42, 42, 42, 42, 42, 42, 42, 42, 42, 42,
+ 42, 42, 42, 42, 42, 42, 42, 42, 42, 42,
+ 42, 42, 42, 42, 42, 42, 43, 43, 43, 43,
+ 43, 43, 43, 43, 43, 43, 43, 43, 43, 43,
+ 43, 43, 43, 43, 43, 43, 43, 43, 43, 43,
+ 43, 43, 43, 43, 43, 43, 43, 43, 43, 43,
+ 43, 43, 43, 43, 43, 43, 43, 43, 43, 37,
+ 37, 37, 37, 37, 37, 37, 37, 37, 37, 37,
+ 37, 37, 37, 37, 37, 37, 37, 37, 37, 37,
+ 37, 37, 37, 37, 37, 37, 37, 37, 37, 37,
+ 37, 37, 37, 37, 37, 37, 37, 37, 37, 37,
+ 37, 37, 37,
+}
+
+var protoR2 = [...]int8{
+ 0, 1, 1, 1, 2, 2, 0, 2, 2, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 2,
+ 1, 0, 1, 0, 4, 4, 3, 4, 4, 3,
+ 1, 3, 1, 2, 1, 2, 1, 1, 2, 1,
+ 3, 1, 3, 1, 3, 1, 3, 1, 2, 1,
+ 2, 1, 1, 2, 5, 5, 1, 1, 2, 1,
+ 1, 1, 2, 1, 2, 3, 1, 1, 1, 1,
+ 1, 1, 1, 2, 1, 2, 2, 2, 1, 2,
+ 3, 2, 1, 1, 2, 1, 2, 2, 2, 2,
+ 1, 3, 2, 3, 1, 3, 5, 3, 1, 1,
+ 1, 1, 1, 2, 1, 1, 1, 1, 3, 2,
+ 3, 2, 3, 1, 3, 1, 1, 3, 2, 3,
+ 1, 3, 1, 2, 1, 2, 1, 2, 1, 2,
+ 1, 2, 1, 2, 1, 1, 1, 3, 2, 1,
+ 2, 1, 2, 1, 1, 2, 3, 1, 8, 9,
+ 9, 10, 7, 8, 6, 0, 1, 2, 1, 1,
+ 1, 1, 2, 1, 5, 6, 3, 4, 7, 8,
+ 5, 6, 5, 6, 3, 4, 6, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 4,
+ 4, 1, 3, 1, 3, 3, 1, 3, 1, 3,
+ 3, 1, 2, 4, 1, 4, 1, 3, 3, 1,
+ 3, 1, 3, 6, 1, 2, 2, 1, 1, 1,
+ 1, 1, 4, 5, 6, 1, 2, 2, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 6, 7, 5, 6, 4, 5, 3, 4, 6, 0,
+ 1, 2, 1, 1, 1, 2, 1, 6, 7, 5,
+ 6, 6, 1, 2, 2, 1, 1, 1, 1, 6,
+ 9, 4, 3, 1, 2, 2, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1,
+}
+
+var protoChk = [...]int16{
+ -1000, -1, -2, -3, -4, 8, 9, -117, -118, 53,
+ -4, -4, 52, 52, -6, -5, -7, -8, -11, -80,
+ -97, -102, -107, 2, 10, 13, 14, 45, 44, 46,
+ 47, 53, -106, 4, -106, -5, -106, 11, 12, -50,
+ -37, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
+ 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
+ 36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
+ 46, 47, 48, 49, 50, -21, -20, -22, -18, -19,
+ -37, -16, 66, -37, -37, -59, -56, 60, -47, -57,
+ -37, -46, -37, 53, 4, 53, -117, -106, -106, -117,
+ 60, 52, -19, -20, 60, -59, 55, 55, 55, -56,
+ -47, -46, 60, 55, -117, -117, -37, -25, -26, -28,
+ -106, -30, -31, -37, 55, 6, 65, 5, 67, -83,
+ -117, -100, -117, -105, -104, -103, -73, -75, 2, -45,
+ -61, 21, 20, 19, -52, 60, -40, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17, 18, 22,
+ 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
+ 33, 34, 35, 36, 38, 39, 40, 41, 42, 43,
+ 44, 45, 46, 47, 48, 49, 50, -110, -117, -117,
+ -69, 56, -68, -67, -70, 2, -17, -37, 68, 6,
+ 5, 17, 18, 56, -82, -81, -71, -97, -80, -102,
+ -96, -77, -11, -74, -78, -89, 2, -45, -60, 40,
+ 38, -79, 43, -91, -51, 60, 39, -39, 7, 8,
+ 9, 10, 11, 12, 13, 15, 16, 17, 18, 22,
+ 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
+ 33, 34, 35, 36, 41, 42, 47, 48, 49, 50,
+ 56, -99, -98, -11, -101, -90, 2, -44, 43, -91,
+ 7, 8, 9, 10, 11, 12, 13, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+ 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 40, 41, 42, 44, 45, 46, 47, 48,
+ 49, 50, 56, -103, 53, 37, -63, -54, 60, -42,
+ 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
+ 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
+ 27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
+ 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, -37, 60, -50, 56, -109, -108, -11,
+ -111, 2, 48, 56, -68, 61, 53, 61, 53, 54,
+ 54, -35, -29, -34, -28, 63, 68, -56, 2, -117,
+ -81, 37, -63, -37, -94, -92, 5, -37, -37, -94,
+ -87, -88, -106, -37, 60, -50, 63, -117, -98, 52,
+ -95, -93, -36, 5, 65, -117, -37, -37, 60, -50,
+ 52, -37, -117, -108, -37, -24, -27, -29, -32, -106,
+ -30, 65, -37, 68, -24, -69, 62, -66, 69, 2,
+ -29, 69, 58, 69, -37, -37, 52, -117, -23, 68,
+ 53, -23, 61, 41, 55, 52, -117, -23, 53, -117,
+ 61, -117, 61, -37, -38, 24, 25, 26, 27, 28,
+ 29, 30, 31, 32, 33, 34, 35, -36, 61, 53,
+ 41, 5, 52, 52, -37, 5, -115, 66, -37, -65,
+ 69, 2, -33, -27, -29, 62, 69, 61, 69, -56,
+ 52, 55, -23, 52, -117, -23, 5, -117, -14, 69,
+ -13, -15, -9, -12, -21, -117, -117, -92, 5, 42,
+ -86, -85, -84, -10, -72, -76, 2, 14, -62, 37,
+ -53, 60, -41, 7, 8, 9, 10, 11, 12, 13,
+ 15, 16, 17, 18, 22, 23, 24, 25, 26, 27,
+ 28, 29, 30, 31, 32, 33, 34, 35, 36, 38,
+ 39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+ 49, 50, 5, -117, -117, -106, -37, 61, -117, -23,
+ -93, -117, -36, 42, 5, 5, -116, -23, 53, 50,
+ 49, -64, -55, 60, -49, -58, -43, -48, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
+ 19, 20, 21, 22, 23, 24, 25, 26, 27, 28,
+ 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
+ 39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+ 50, 69, 61, 69, -29, 69, 5, -83, 55, 5,
+ -117, -117, -23, 69, -13, -12, 61, 52, 56, -84,
+ 53, -21, -37, -37, 60, -50, -117, -23, -59, -117,
+ 55, -23, -116, -23, -116, -115, -59, 67, -56, -49,
+ -48, 60, -33, 55, -23, 56, -83, -117, -23, -117,
+ -25, -117, 52, 52, -116, -23, 52, 55, -23, -37,
+ -117, 62, -83, 55, -116, -117, 55, 67, -83, 55,
+ -117, 56, -117, -25, 5, -116, 5, -83, 55, 56,
+ -83, -114, -117, 56, -83, -117, -116, -116, -23, 55,
+ -23, 56, -83, 56, 56, -113, -112, -11, 2, -117,
+ 56, -116, -83, 55, 56, -117, -112, -117, 56, -83,
+ 56,
+}
+
+var protoDef = [...]int16{
+ -2, -2, -2, -2, 3, 0, 0, 0, 20, 18,
+ 4, 5, 0, 0, -2, 9, 10, 11, 12, 13,
+ 14, 15, 16, 17, 0, 0, 0, 0, 0, 0,
+ 0, 19, 0, 78, 0, 8, 21, 0, 0, 21,
+ 30, 519, 520, 521, 522, 523, 524, 525, 526, 527,
+ 528, 529, 530, 531, 532, 533, 534, 535, 536, 537,
+ 538, 539, 540, 541, 542, 543, 544, 545, 546, 547,
+ 548, 549, 550, 551, 552, 553, 554, 555, 556, 557,
+ 558, 559, 560, 561, 562, 0, 63, 0, 59, -2,
+ 56, 57, 0, 0, 0, 0, 122, 0, 32, 0,
+ 36, -2, 0, 24, 79, 25, 26, 21, 21, 29,
+ 0, 0, -2, 64, 58, 0, 21, 21, -2, 123,
+ 33, -2, 38, 21, 27, 28, 31, 21, 66, 67,
+ 68, 69, 70, 71, 0, 72, 0, 74, 65, 0,
+ -2, 0, -2, 0, -2, 252, 253, 254, 256, 0,
+ 0, 134, 135, 136, 126, 0, 41, 312, 313, 314,
+ 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
+ 325, 326, 327, 328, 329, 330, 331, 332, 333, 334,
+ 335, 336, 337, 338, 339, 340, 341, 342, 343, 344,
+ 345, 346, 347, 348, 349, 350, 351, 0, -2, 55,
+ 0, 81, 82, -2, 85, 90, 0, 94, 0, 73,
+ 75, 76, 77, 21, -2, 228, 229, 230, 231, 232,
+ 233, 234, 235, 236, 237, 238, 239, 0, 0, 0,
+ 0, 0, 0, 204, 124, 0, 305, 39, 279, 280,
+ 281, 282, 283, 284, 285, 286, 287, 288, 289, 290,
+ 291, 292, 293, 294, 295, 296, 297, 298, 299, 300,
+ 301, 302, 303, 304, 306, 307, 308, 309, 310, 311,
+ 21, -2, 217, 218, 219, 220, 221, 0, 0, 206,
+ 352, 353, 354, 355, 356, 357, 358, 359, 360, 361,
+ 362, 363, 364, 365, 366, 367, 368, 369, 370, 371,
+ 372, 373, 374, 375, 376, 377, 378, 379, 380, 381,
+ 382, 383, 384, 385, 386, 387, 388, 389, 390, 391,
+ 392, 393, 21, 251, 255, 0, 0, 130, 0, 45,
+ 433, 434, 435, 436, 437, 438, 439, 440, 441, 442,
+ 443, 444, 445, 446, 447, 448, 449, 450, 451, 452,
+ 453, 454, 455, 456, 457, 458, 459, 460, 461, 462,
+ 463, 464, 465, 466, 467, 468, 469, 470, 471, 472,
+ 473, 474, 475, 0, 0, 127, 21, -2, 265, 266,
+ 267, 268, 0, 80, 84, 86, 87, 88, 89, 0,
+ 0, 92, 105, 106, 107, 0, 0, 0, 0, 224,
+ 227, 0, 0, 21, 0, 191, 193, 0, 21, 0,
+ 21, 21, 209, 211, 0, 125, 0, 213, 216, 0,
+ 0, 196, 198, 201, 0, 248, 0, 0, 0, 131,
+ 0, 42, 261, 264, 0, 93, 98, 99, 100, 101,
+ 102, 0, 104, 0, 91, 0, 109, 0, 118, 0,
+ 120, 95, 0, 97, 0, 21, 0, 246, 21, 0,
+ 21, 21, 0, 0, -2, 0, 174, 21, 21, 207,
+ 0, 208, 0, 40, 0, 177, 178, 179, 180, 181,
+ 182, 183, 184, 185, 186, 187, 188, 21, 0, 21,
+ 0, 202, 0, 0, 46, 23, 0, 0, 103, 0,
+ 111, 0, 113, 115, 116, 108, 117, 0, 119, 0,
+ 0, 21, 0, 0, 244, 21, 21, 247, 0, 138,
+ 139, 0, 143, -2, 147, 189, 190, 192, 194, 195,
+ 0, -2, 158, 159, 160, 161, 163, 0, 0, 0,
+ 128, 0, 43, 394, 395, 396, 397, 398, 399, 400,
+ 401, 402, 403, 404, 405, 406, 407, 408, 409, 410,
+ 411, 412, 413, 414, 415, 416, 417, 418, 419, 420,
+ 421, 422, 423, 424, 425, 426, 427, 428, 429, 430,
+ 431, 432, 21, 175, 203, 210, 212, 0, 222, 21,
+ 197, 205, 199, 200, 0, 23, 259, 23, 22, 0,
+ 0, 0, 132, 0, 47, 0, 51, -2, 476, 477,
+ 478, 479, 480, 481, 482, 483, 484, 485, 486, 487,
+ 488, 489, 490, 491, 492, 493, 494, 495, 496, 497,
+ 498, 499, 500, 501, 502, 503, 504, 505, 506, 507,
+ 508, 509, 510, 511, 512, 513, 514, 515, 516, 517,
+ 518, 110, 0, 112, 121, 96, 0, 0, 21, 21,
+ 245, 242, 21, 137, 140, -2, 145, 0, 21, 157,
+ 162, 0, 23, 0, 0, 129, 172, 21, 0, 223,
+ 21, 0, 257, 23, 260, 21, 0, 272, 133, 48,
+ -2, 53, 114, 21, 0, 21, 0, 240, 21, 243,
+ 146, 154, 0, 0, 166, 23, 0, 21, 0, 44,
+ 173, 176, 0, 21, 258, 269, 21, 271, 0, 21,
+ 152, 21, 241, 23, 23, 167, 0, 0, 21, 148,
+ 0, 0, -2, 21, 0, 153, 54, 164, 23, 21,
+ 0, 170, 0, 149, 21, -2, 276, 277, 278, 150,
+ 21, 165, 0, 21, 171, 270, 275, 151, 168, 0,
+ 169,
+}
+
+var protoTok1 = [...]int8{
+ 1, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 77, 3, 75, 74, 73, 71, 3,
+ 66, 67, 70, 64, 61, 65, 60, 58, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 54, 53,
+ 63, 52, 62, 59, 76, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 68, 57, 69, 72, 3, 79, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 55, 3, 56, 78,
+}
+
+var protoTok2 = [...]int8{
+ 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
+ 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
+ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
+ 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+}
+
+var protoTok3 = [...]int8{
+ 0,
+}
+
+var protoErrorMessages = [...]struct {
+ state int
+ token int
+ msg string
+}{}
+
+/* parser for yacc output */
+
+var (
+ protoDebug = 0
+ protoErrorVerbose = false
+)
+
+type protoLexer interface {
+ Lex(lval *protoSymType) int
+ Error(s string)
+}
+
+type protoParser interface {
+ Parse(protoLexer) int
+ Lookahead() int
+}
+
+type protoParserImpl struct {
+ lval protoSymType
+ stack [protoInitialStackSize]protoSymType
+ char int
+}
+
+func (p *protoParserImpl) Lookahead() int {
+ return p.char
+}
+
+func protoNewParser() protoParser {
+ return &protoParserImpl{}
+}
+
+const protoFlag = -1000
+
+func protoTokname(c int) string {
+ if c >= 1 && c-1 < len(protoToknames) {
+ if protoToknames[c-1] != "" {
+ return protoToknames[c-1]
+ }
+ }
+ return __yyfmt__.Sprintf("tok-%v", c)
+}
+
+func protoStatname(s int) string {
+ if s >= 0 && s < len(protoStatenames) {
+ if protoStatenames[s] != "" {
+ return protoStatenames[s]
+ }
+ }
+ return __yyfmt__.Sprintf("state-%v", s)
+}
+
+func protoErrorMessage(state, lookAhead int) string {
+ const TOKSTART = 4
+
+ if !protoErrorVerbose {
+ return "syntax error"
+ }
+
+ for _, e := range protoErrorMessages {
+ if e.state == state && e.token == lookAhead {
+ return "syntax error: " + e.msg
+ }
+ }
+
+ res := "syntax error: unexpected " + protoTokname(lookAhead)
+
+ // To match Bison, suggest at most four expected tokens.
+ expected := make([]int, 0, 4)
+
+ // Look for shiftable tokens.
+ base := int(protoPact[state])
+ for tok := TOKSTART; tok-1 < len(protoToknames); tok++ {
+ if n := base + tok; n >= 0 && n < protoLast && int(protoChk[int(protoAct[n])]) == tok {
+ if len(expected) == cap(expected) {
+ return res
+ }
+ expected = append(expected, tok)
+ }
+ }
+
+ if protoDef[state] == -2 {
+ i := 0
+ for protoExca[i] != -1 || int(protoExca[i+1]) != state {
+ i += 2
+ }
+
+ // Look for tokens that we accept or reduce.
+ for i += 2; protoExca[i] >= 0; i += 2 {
+ tok := int(protoExca[i])
+ if tok < TOKSTART || protoExca[i+1] == 0 {
+ continue
+ }
+ if len(expected) == cap(expected) {
+ return res
+ }
+ expected = append(expected, tok)
+ }
+
+ // If the default action is to accept or reduce, give up.
+ if protoExca[i+1] != 0 {
+ return res
+ }
+ }
+
+ for i, tok := range expected {
+ if i == 0 {
+ res += ", expecting "
+ } else {
+ res += " or "
+ }
+ res += protoTokname(tok)
+ }
+ return res
+}
+
+func protolex1(lex protoLexer, lval *protoSymType) (char, token int) {
+ token = 0
+ char = lex.Lex(lval)
+ if char <= 0 {
+ token = int(protoTok1[0])
+ goto out
+ }
+ if char < len(protoTok1) {
+ token = int(protoTok1[char])
+ goto out
+ }
+ if char >= protoPrivate {
+ if char < protoPrivate+len(protoTok2) {
+ token = int(protoTok2[char-protoPrivate])
+ goto out
+ }
+ }
+ for i := 0; i < len(protoTok3); i += 2 {
+ token = int(protoTok3[i+0])
+ if token == char {
+ token = int(protoTok3[i+1])
+ goto out
+ }
+ }
+
+out:
+ if token == 0 {
+ token = int(protoTok2[1]) /* unknown char */
+ }
+ if protoDebug >= 3 {
+ __yyfmt__.Printf("lex %s(%d)\n", protoTokname(token), uint(char))
+ }
+ return char, token
+}
+
+func protoParse(protolex protoLexer) int {
+ return protoNewParser().Parse(protolex)
+}
+
+func (protorcvr *protoParserImpl) Parse(protolex protoLexer) int {
+ var proton int
+ var protoVAL protoSymType
+ var protoDollar []protoSymType
+ _ = protoDollar // silence set and not used
+ protoS := protorcvr.stack[:]
+
+ Nerrs := 0 /* number of errors */
+ Errflag := 0 /* error recovery flag */
+ protostate := 0
+ protorcvr.char = -1
+ prototoken := -1 // protorcvr.char translated into internal numbering
+ defer func() {
+ // Make sure we report no lookahead when not parsing.
+ protostate = -1
+ protorcvr.char = -1
+ prototoken = -1
+ }()
+ protop := -1
+ goto protostack
+
+ret0:
+ return 0
+
+ret1:
+ return 1
+
+protostack:
+ /* put a state and value onto the stack */
+ if protoDebug >= 4 {
+ __yyfmt__.Printf("char %v in %v\n", protoTokname(prototoken), protoStatname(protostate))
+ }
+
+ protop++
+ if protop >= len(protoS) {
+ nyys := make([]protoSymType, len(protoS)*2)
+ copy(nyys, protoS)
+ protoS = nyys
+ }
+ protoS[protop] = protoVAL
+ protoS[protop].yys = protostate
+
+protonewstate:
+ proton = int(protoPact[protostate])
+ if proton <= protoFlag {
+ goto protodefault /* simple state */
+ }
+ if protorcvr.char < 0 {
+ protorcvr.char, prototoken = protolex1(protolex, &protorcvr.lval)
+ }
+ proton += prototoken
+ if proton < 0 || proton >= protoLast {
+ goto protodefault
+ }
+ proton = int(protoAct[proton])
+ if int(protoChk[proton]) == prototoken { /* valid shift */
+ protorcvr.char = -1
+ prototoken = -1
+ protoVAL = protorcvr.lval
+ protostate = proton
+ if Errflag > 0 {
+ Errflag--
+ }
+ goto protostack
+ }
+
+protodefault:
+ /* default state action */
+ proton = int(protoDef[protostate])
+ if proton == -2 {
+ if protorcvr.char < 0 {
+ protorcvr.char, prototoken = protolex1(protolex, &protorcvr.lval)
+ }
+
+ /* look through exception table */
+ xi := 0
+ for {
+ if protoExca[xi+0] == -1 && int(protoExca[xi+1]) == protostate {
+ break
+ }
+ xi += 2
+ }
+ for xi += 2; ; xi += 2 {
+ proton = int(protoExca[xi+0])
+ if proton < 0 || proton == prototoken {
+ break
+ }
+ }
+ proton = int(protoExca[xi+1])
+ if proton < 0 {
+ goto ret0
+ }
+ }
+ if proton == 0 {
+ /* error ... attempt to resume parsing */
+ switch Errflag {
+ case 0: /* brand new error */
+ protolex.Error(protoErrorMessage(protostate, prototoken))
+ Nerrs++
+ if protoDebug >= 1 {
+ __yyfmt__.Printf("%s", protoStatname(protostate))
+ __yyfmt__.Printf(" saw %s\n", protoTokname(prototoken))
+ }
+ fallthrough
+
+ case 1, 2: /* incompletely recovered error ... try again */
+ Errflag = 3
+
+ /* find a state where "error" is a legal shift action */
+ for protop >= 0 {
+ proton = int(protoPact[protoS[protop].yys]) + protoErrCode
+ if proton >= 0 && proton < protoLast {
+ protostate = int(protoAct[proton]) /* simulate a shift of "error" */
+ if int(protoChk[protostate]) == protoErrCode {
+ goto protostack
+ }
+ }
+
+ /* the current p has no shift on "error", pop stack */
+ if protoDebug >= 2 {
+ __yyfmt__.Printf("error recovery pops state %d\n", protoS[protop].yys)
+ }
+ protop--
+ }
+ /* there is no state on the stack with an error shift ... abort */
+ goto ret1
+
+ case 3: /* no shift yet; clobber input char */
+ if protoDebug >= 2 {
+ __yyfmt__.Printf("error recovery discards %s\n", protoTokname(prototoken))
+ }
+ if prototoken == protoEofCode {
+ goto ret1
+ }
+ protorcvr.char = -1
+ prototoken = -1
+ goto protonewstate /* try again in the same state */
+ }
+ }
+
+ /* reduction by production proton */
+ if protoDebug >= 2 {
+ __yyfmt__.Printf("reduce %v in:\n\t%v\n", proton, protoStatname(protostate))
+ }
+
+ protont := proton
+ protopt := protop
+ _ = protopt // guard against "declared and not used"
+
+ protop -= int(protoR2[proton])
+ // protop is now the index of $0. Perform the default action. Iff the
+ // reduced production is ε, $1 is possibly out of range.
+ if protop+1 >= len(protoS) {
+ nyys := make([]protoSymType, len(protoS)*2)
+ copy(nyys, protoS)
+ protoS = nyys
+ }
+ protoVAL = protoS[protop+1]
+
+ /* consult goto table to find next state */
+ proton = int(protoR1[proton])
+ protog := int(protoPgo[proton])
+ protoj := protog + protoS[protop].yys + 1
+
+ if protoj >= protoLast {
+ protostate = int(protoAct[protog])
+ } else {
+ protostate = int(protoAct[protoj])
+ if int(protoChk[protostate]) != -proton {
+ protostate = int(protoAct[protog])
+ }
+ }
+ // dummy call; replaced with literal code
+ switch protont {
+
+ case 1:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ lex := protolex.(*protoLex)
+ protoVAL.file = ast.NewFileNode(lex.info, protoDollar[1].syn, nil, lex.eof)
+ lex.res = protoVAL.file
+ }
+ case 2:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ lex := protolex.(*protoLex)
+ protoVAL.file = ast.NewFileNodeWithEdition(lex.info, protoDollar[1].ed, nil, lex.eof)
+ lex.res = protoVAL.file
+ }
+ case 3:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ lex := protolex.(*protoLex)
+ protoVAL.file = ast.NewFileNode(lex.info, nil, protoDollar[1].fileElements, lex.eof)
+ lex.res = protoVAL.file
+ }
+ case 4:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ lex := protolex.(*protoLex)
+ protoVAL.file = ast.NewFileNode(lex.info, protoDollar[1].syn, protoDollar[2].fileElements, lex.eof)
+ lex.res = protoVAL.file
+ }
+ case 5:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ lex := protolex.(*protoLex)
+ protoVAL.file = ast.NewFileNodeWithEdition(lex.info, protoDollar[1].ed, protoDollar[2].fileElements, lex.eof)
+ lex.res = protoVAL.file
+ }
+ case 6:
+ protoDollar = protoS[protopt-0 : protopt+1]
+ {
+ lex := protolex.(*protoLex)
+ protoVAL.file = ast.NewFileNode(lex.info, nil, nil, lex.eof)
+ lex.res = protoVAL.file
+ }
+ case 7:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.fileElements = prependRunes(toFileElement, protoDollar[1].bs, protoDollar[2].fileElements)
+ }
+ case 8:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.fileElements = append(protoDollar[1].fileElements, protoDollar[2].fileElements...)
+ }
+ case 9:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.fileElements = protoDollar[1].fileElements
+ }
+ case 10:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.fileElements = toElements[ast.FileElement](toFileElement, protoDollar[1].imprt.Node, protoDollar[1].imprt.Runes)
+ }
+ case 11:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.fileElements = toElements[ast.FileElement](toFileElement, protoDollar[1].pkg.Node, protoDollar[1].pkg.Runes)
+ }
+ case 12:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.fileElements = toElements[ast.FileElement](toFileElement, protoDollar[1].opt.Node, protoDollar[1].opt.Runes)
+ }
+ case 13:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.fileElements = toElements[ast.FileElement](toFileElement, protoDollar[1].msg.Node, protoDollar[1].msg.Runes)
+ }
+ case 14:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.fileElements = toElements[ast.FileElement](toFileElement, protoDollar[1].en.Node, protoDollar[1].en.Runes)
+ }
+ case 15:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.fileElements = toElements[ast.FileElement](toFileElement, protoDollar[1].extend.Node, protoDollar[1].extend.Runes)
+ }
+ case 16:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.fileElements = toElements[ast.FileElement](toFileElement, protoDollar[1].svc.Node, protoDollar[1].svc.Runes)
+ }
+ case 17:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.fileElements = nil
+ }
+ case 18:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.bs = []*ast.RuneNode{protoDollar[1].b}
+ }
+ case 19:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.bs = append(protoDollar[1].bs, protoDollar[2].b)
+ }
+ case 20:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.bs = protoDollar[1].bs
+ }
+ case 21:
+ protoDollar = protoS[protopt-0 : protopt+1]
+ {
+ protoVAL.bs = nil
+ }
+ case 22:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.b = protoDollar[1].b
+ }
+ case 23:
+ protoDollar = protoS[protopt-0 : protopt+1]
+ {
+ protolex.(*protoLex).Error("syntax error: expecting ';'")
+ protoVAL.b = nil
+ }
+ case 24:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ protoVAL.syn = ast.NewSyntaxNode(protoDollar[1].id.ToKeyword(), protoDollar[2].b, toStringValueNode(protoDollar[3].str), protoDollar[4].b)
+ }
+ case 25:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ protoVAL.ed = ast.NewEditionNode(protoDollar[1].id.ToKeyword(), protoDollar[2].b, toStringValueNode(protoDollar[3].str), protoDollar[4].b)
+ }
+ case 26:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[3].bs)
+ protoVAL.imprt = newNodeWithRunes(ast.NewImportNode(protoDollar[1].id.ToKeyword(), nil, nil, toStringValueNode(protoDollar[2].str), semi), extra...)
+ }
+ case 27:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[4].bs)
+ protoVAL.imprt = newNodeWithRunes(ast.NewImportNode(protoDollar[1].id.ToKeyword(), nil, protoDollar[2].id.ToKeyword(), toStringValueNode(protoDollar[3].str), semi), extra...)
+ }
+ case 28:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[4].bs)
+ protoVAL.imprt = newNodeWithRunes(ast.NewImportNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), nil, toStringValueNode(protoDollar[3].str), semi), extra...)
+ }
+ case 29:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[3].bs)
+ protoVAL.pkg = newNodeWithRunes(ast.NewPackageNode(protoDollar[1].id.ToKeyword(), protoDollar[2].cid.toIdentValueNode(nil), semi), extra...)
+ }
+ case 30:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.cid = &identSlices{idents: []*ast.IdentNode{protoDollar[1].id}}
+ }
+ case 31:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoDollar[1].cid.idents = append(protoDollar[1].cid.idents, protoDollar[3].id)
+ protoDollar[1].cid.dots = append(protoDollar[1].cid.dots, protoDollar[2].b)
+ protoVAL.cid = protoDollar[1].cid
+ }
+ case 32:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.cid = &identSlices{idents: []*ast.IdentNode{protoDollar[1].cidPart.Node}, dots: protoDollar[1].cidPart.Runes}
+ }
+ case 33:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoDollar[1].cid.idents = append(protoDollar[1].cid.idents, protoDollar[2].cidPart.Node)
+ protoDollar[1].cid.dots = append(protoDollar[1].cid.dots, protoDollar[2].cidPart.Runes...)
+ protoVAL.cid = protoDollar[1].cid
+ }
+ case 34:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.cid = &identSlices{idents: []*ast.IdentNode{protoDollar[1].cidPart.Node}, dots: protoDollar[1].cidPart.Runes}
+ }
+ case 35:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoDollar[1].cid.idents = append(protoDollar[1].cid.idents, protoDollar[2].cidPart.Node)
+ protoDollar[1].cid.dots = append(protoDollar[1].cid.dots, protoDollar[2].cidPart.Runes...)
+ protoVAL.cid = protoDollar[1].cid
+ }
+ case 36:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.cidPart = newNodeWithRunes(protoDollar[1].id)
+ }
+ case 37:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protolex.(*protoLex).Error("syntax error: unexpected '.'")
+ protoVAL.cidPart = protoDollar[1].cidPart
+ }
+ case 38:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.cidPart = newNodeWithRunes(protoDollar[1].id, protoDollar[2].b)
+ }
+ case 39:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.cid = &identSlices{idents: []*ast.IdentNode{protoDollar[1].id}}
+ }
+ case 40:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoDollar[1].cid.idents = append(protoDollar[1].cid.idents, protoDollar[3].id)
+ protoDollar[1].cid.dots = append(protoDollar[1].cid.dots, protoDollar[2].b)
+ protoVAL.cid = protoDollar[1].cid
+ }
+ case 41:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.cid = &identSlices{idents: []*ast.IdentNode{protoDollar[1].id}}
+ }
+ case 42:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoDollar[1].cid.idents = append(protoDollar[1].cid.idents, protoDollar[3].id)
+ protoDollar[1].cid.dots = append(protoDollar[1].cid.dots, protoDollar[2].b)
+ protoVAL.cid = protoDollar[1].cid
+ }
+ case 43:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.cid = &identSlices{idents: []*ast.IdentNode{protoDollar[1].id}}
+ }
+ case 44:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoDollar[1].cid.idents = append(protoDollar[1].cid.idents, protoDollar[3].id)
+ protoDollar[1].cid.dots = append(protoDollar[1].cid.dots, protoDollar[2].b)
+ protoVAL.cid = protoDollar[1].cid
+ }
+ case 45:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.cid = &identSlices{idents: []*ast.IdentNode{protoDollar[1].id}}
+ }
+ case 46:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoDollar[1].cid.idents = append(protoDollar[1].cid.idents, protoDollar[3].id)
+ protoDollar[1].cid.dots = append(protoDollar[1].cid.dots, protoDollar[2].b)
+ protoVAL.cid = protoDollar[1].cid
+ }
+ case 47:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.cid = &identSlices{idents: []*ast.IdentNode{protoDollar[1].cidPart.Node}, dots: protoDollar[1].cidPart.Runes}
+ }
+ case 48:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoDollar[1].cid.idents = append(protoDollar[1].cid.idents, protoDollar[2].cidPart.Node)
+ protoDollar[1].cid.dots = append(protoDollar[1].cid.dots, protoDollar[2].cidPart.Runes...)
+ protoVAL.cid = protoDollar[1].cid
+ }
+ case 49:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.cid = &identSlices{idents: []*ast.IdentNode{protoDollar[1].cidPart.Node}, dots: protoDollar[1].cidPart.Runes}
+ }
+ case 50:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoDollar[1].cid.idents = append(protoDollar[1].cid.idents, protoDollar[2].cidPart.Node)
+ protoDollar[1].cid.dots = append(protoDollar[1].cid.dots, protoDollar[2].cidPart.Runes...)
+ protoVAL.cid = protoDollar[1].cid
+ }
+ case 51:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.cidPart = newNodeWithRunes(protoDollar[1].id)
+ }
+ case 52:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protolex.(*protoLex).Error("syntax error: unexpected '.'")
+ protoVAL.cidPart = protoDollar[1].cidPart
+ }
+ case 53:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.cidPart = newNodeWithRunes(protoDollar[1].id, protoDollar[2].b)
+ }
+ case 54:
+ protoDollar = protoS[protopt-5 : protopt+1]
+ {
+ optName := ast.NewOptionNameNode(protoDollar[2].optNms.refs, protoDollar[2].optNms.dots)
+ protoVAL.optRaw = ast.NewOptionNode(protoDollar[1].id.ToKeyword(), optName, protoDollar[3].b, protoDollar[4].v, protoDollar[5].b)
+ }
+ case 55:
+ protoDollar = protoS[protopt-5 : protopt+1]
+ {
+ optName := ast.NewOptionNameNode(protoDollar[2].optNms.refs, protoDollar[2].optNms.dots)
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[5].bs)
+ protoVAL.opt = newNodeWithRunes(ast.NewOptionNode(protoDollar[1].id.ToKeyword(), optName, protoDollar[3].b, protoDollar[4].v, semi), extra...)
+ }
+ case 56:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.refRaw = ast.NewFieldReferenceNode(protoDollar[1].id)
+ }
+ case 57:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.refRaw = protoDollar[1].refRaw
+ }
+ case 58:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.ref = newNodeWithRunes(protoDollar[1].refRaw, protoDollar[2].b)
+ }
+ case 59:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.ref = newNodeWithRunes(protoDollar[1].refRaw)
+ }
+ case 60:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protolex.(*protoLex).Error("syntax error: unexpected '.'")
+ protoVAL.ref = protoDollar[1].ref
+ }
+ case 61:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.optNms = &fieldRefSlices{refs: []*ast.FieldReferenceNode{protoDollar[1].ref.Node}, dots: protoDollar[1].ref.Runes}
+ }
+ case 62:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoDollar[1].optNms.refs = append(protoDollar[1].optNms.refs, protoDollar[2].ref.Node)
+ protoDollar[1].optNms.dots = append(protoDollar[1].optNms.dots, protoDollar[2].ref.Runes...)
+ protoVAL.optNms = protoDollar[1].optNms
+ }
+ case 63:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.optNms = &fieldRefSlices{refs: []*ast.FieldReferenceNode{protoDollar[1].ref.Node}, dots: protoDollar[1].ref.Runes}
+ }
+ case 64:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoDollar[1].optNms.refs = append(protoDollar[1].optNms.refs, protoDollar[2].ref.Node)
+ protoDollar[1].optNms.dots = append(protoDollar[1].optNms.dots, protoDollar[2].ref.Runes...)
+ protoVAL.optNms = protoDollar[1].optNms
+ }
+ case 65:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoVAL.refRaw = ast.NewExtensionFieldReferenceNode(protoDollar[1].b, protoDollar[2].tid, protoDollar[3].b)
+ }
+ case 68:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.v = toStringValueNode(protoDollar[1].str)
+ }
+ case 71:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.v = protoDollar[1].id
+ }
+ case 72:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.v = protoDollar[1].f
+ }
+ case 73:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.v = ast.NewSignedFloatLiteralNode(protoDollar[1].b, protoDollar[2].f)
+ }
+ case 74:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.v = protoDollar[1].i
+ }
+ case 75:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ if protoDollar[2].i.Val > math.MaxInt64+1 {
+ // can't represent as int so treat as float literal
+ protoVAL.v = ast.NewSignedFloatLiteralNode(protoDollar[1].b, protoDollar[2].i)
+ } else {
+ protoVAL.v = ast.NewNegativeIntLiteralNode(protoDollar[1].b, protoDollar[2].i)
+ }
+ }
+ case 76:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ f := ast.NewSpecialFloatLiteralNode(protoDollar[2].id.ToKeyword())
+ protoVAL.v = ast.NewSignedFloatLiteralNode(protoDollar[1].b, f)
+ }
+ case 77:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ f := ast.NewSpecialFloatLiteralNode(protoDollar[2].id.ToKeyword())
+ protoVAL.v = ast.NewSignedFloatLiteralNode(protoDollar[1].b, f)
+ }
+ case 78:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.str = []*ast.StringLiteralNode{protoDollar[1].s}
+ }
+ case 79:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.str = append(protoDollar[1].str, protoDollar[2].s)
+ }
+ case 80:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ if protoDollar[2].msgLitFlds == nil {
+ protoVAL.v = ast.NewMessageLiteralNode(protoDollar[1].b, nil, nil, protoDollar[3].b)
+ } else {
+ fields, delimiters := protoDollar[2].msgLitFlds.toNodes()
+ protoVAL.v = ast.NewMessageLiteralNode(protoDollar[1].b, fields, delimiters, protoDollar[3].b)
+ }
+ }
+ case 81:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.v = ast.NewMessageLiteralNode(protoDollar[1].b, nil, nil, protoDollar[2].b)
+ }
+ case 84:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ if protoDollar[1].msgLitFlds != nil {
+ protoDollar[1].msgLitFlds.next = protoDollar[2].msgLitFlds
+ protoVAL.msgLitFlds = protoDollar[1].msgLitFlds
+ } else {
+ protoVAL.msgLitFlds = protoDollar[2].msgLitFlds
+ }
+ }
+ case 85:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ if protoDollar[1].msgLitFld != nil {
+ protoVAL.msgLitFlds = &messageFieldList{field: protoDollar[1].msgLitFld}
+ } else {
+ protoVAL.msgLitFlds = nil
+ }
+ }
+ case 86:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ if protoDollar[1].msgLitFld != nil {
+ protoVAL.msgLitFlds = &messageFieldList{field: protoDollar[1].msgLitFld, delimiter: protoDollar[2].b}
+ } else {
+ protoVAL.msgLitFlds = nil
+ }
+ }
+ case 87:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ if protoDollar[1].msgLitFld != nil {
+ protoVAL.msgLitFlds = &messageFieldList{field: protoDollar[1].msgLitFld, delimiter: protoDollar[2].b}
+ } else {
+ protoVAL.msgLitFlds = nil
+ }
+ }
+ case 88:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.msgLitFlds = nil
+ }
+ case 89:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.msgLitFlds = nil
+ }
+ case 90:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgLitFlds = nil
+ }
+ case 91:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ if protoDollar[1].refRaw != nil && protoDollar[2].b != nil {
+ protoVAL.msgLitFld = ast.NewMessageFieldNode(protoDollar[1].refRaw, protoDollar[2].b, protoDollar[3].v)
+ } else {
+ protoVAL.msgLitFld = nil
+ }
+ }
+ case 92:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ if protoDollar[1].refRaw != nil && protoDollar[2].v != nil {
+ protoVAL.msgLitFld = ast.NewMessageFieldNode(protoDollar[1].refRaw, nil, protoDollar[2].v)
+ } else {
+ protoVAL.msgLitFld = nil
+ }
+ }
+ case 93:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoVAL.msgLitFld = nil
+ }
+ case 94:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.refRaw = ast.NewFieldReferenceNode(protoDollar[1].id)
+ }
+ case 95:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoVAL.refRaw = ast.NewExtensionFieldReferenceNode(protoDollar[1].b, protoDollar[2].cid.toIdentValueNode(nil), protoDollar[3].b)
+ }
+ case 96:
+ protoDollar = protoS[protopt-5 : protopt+1]
+ {
+ protoVAL.refRaw = ast.NewAnyTypeReferenceNode(protoDollar[1].b, protoDollar[2].cid.toIdentValueNode(nil), protoDollar[3].b, protoDollar[4].cid.toIdentValueNode(nil), protoDollar[5].b)
+ }
+ case 97:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoVAL.refRaw = nil
+ }
+ case 101:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.v = toStringValueNode(protoDollar[1].str)
+ }
+ case 103:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ kw := protoDollar[2].id.ToKeyword()
+ switch strings.ToLower(kw.Val) {
+ case "inf", "infinity", "nan":
+ // these are acceptable
+ default:
+ // anything else is not
+ protolex.(*protoLex).Error(`only identifiers "inf", "infinity", or "nan" may appear after negative sign`)
+ }
+ // we'll validate the identifier later
+ f := ast.NewSpecialFloatLiteralNode(kw)
+ protoVAL.v = ast.NewSignedFloatLiteralNode(protoDollar[1].b, f)
+ }
+ case 104:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.v = protoDollar[1].id
+ }
+ case 108:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ if protoDollar[2].msgLitFlds == nil {
+ protoVAL.v = ast.NewMessageLiteralNode(protoDollar[1].b, nil, nil, protoDollar[3].b)
+ } else {
+ fields, delimiters := protoDollar[2].msgLitFlds.toNodes()
+ protoVAL.v = ast.NewMessageLiteralNode(protoDollar[1].b, fields, delimiters, protoDollar[3].b)
+ }
+ }
+ case 109:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.v = ast.NewMessageLiteralNode(protoDollar[1].b, nil, nil, protoDollar[2].b)
+ }
+ case 110:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ if protoDollar[2].sl == nil {
+ protoVAL.v = ast.NewArrayLiteralNode(protoDollar[1].b, nil, nil, protoDollar[3].b)
+ } else {
+ protoVAL.v = ast.NewArrayLiteralNode(protoDollar[1].b, protoDollar[2].sl.vals, protoDollar[2].sl.commas, protoDollar[3].b)
+ }
+ }
+ case 111:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.v = ast.NewArrayLiteralNode(protoDollar[1].b, nil, nil, protoDollar[2].b)
+ }
+ case 112:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoVAL.v = ast.NewArrayLiteralNode(protoDollar[1].b, nil, nil, protoDollar[3].b)
+ }
+ case 113:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.sl = &valueSlices{vals: []ast.ValueNode{protoDollar[1].v}}
+ }
+ case 114:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoDollar[1].sl.vals = append(protoDollar[1].sl.vals, protoDollar[3].v)
+ protoDollar[1].sl.commas = append(protoDollar[1].sl.commas, protoDollar[2].b)
+ protoVAL.sl = protoDollar[1].sl
+ }
+ case 117:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ if protoDollar[2].sl == nil {
+ protoVAL.v = ast.NewArrayLiteralNode(protoDollar[1].b, nil, nil, protoDollar[3].b)
+ } else {
+ protoVAL.v = ast.NewArrayLiteralNode(protoDollar[1].b, protoDollar[2].sl.vals, protoDollar[2].sl.commas, protoDollar[3].b)
+ }
+ }
+ case 118:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.v = ast.NewArrayLiteralNode(protoDollar[1].b, nil, nil, protoDollar[2].b)
+ }
+ case 119:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoVAL.v = ast.NewArrayLiteralNode(protoDollar[1].b, nil, nil, protoDollar[3].b)
+ }
+ case 120:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.sl = &valueSlices{vals: []ast.ValueNode{protoDollar[1].v}}
+ }
+ case 121:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoDollar[1].sl.vals = append(protoDollar[1].sl.vals, protoDollar[3].v)
+ protoDollar[1].sl.commas = append(protoDollar[1].sl.commas, protoDollar[2].b)
+ protoVAL.sl = protoDollar[1].sl
+ }
+ case 122:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.tid = protoDollar[1].cid.toIdentValueNode(nil)
+ }
+ case 123:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.tid = protoDollar[2].cid.toIdentValueNode(protoDollar[1].b)
+ }
+ case 124:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.tid = protoDollar[1].cid.toIdentValueNode(nil)
+ }
+ case 125:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.tid = protoDollar[2].cid.toIdentValueNode(protoDollar[1].b)
+ }
+ case 126:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.tid = protoDollar[1].cid.toIdentValueNode(nil)
+ }
+ case 127:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.tid = protoDollar[2].cid.toIdentValueNode(protoDollar[1].b)
+ }
+ case 128:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.tid = protoDollar[1].cid.toIdentValueNode(nil)
+ }
+ case 129:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.tid = protoDollar[2].cid.toIdentValueNode(protoDollar[1].b)
+ }
+ case 130:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.tid = protoDollar[1].cid.toIdentValueNode(nil)
+ }
+ case 131:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.tid = protoDollar[2].cid.toIdentValueNode(protoDollar[1].b)
+ }
+ case 132:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.tid = protoDollar[1].cid.toIdentValueNode(nil)
+ }
+ case 133:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.tid = protoDollar[2].cid.toIdentValueNode(protoDollar[1].b)
+ }
+ case 137:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoVAL.cmpctOpts = ast.NewCompactOptionsNode(protoDollar[1].b, protoDollar[2].opts.options, protoDollar[2].opts.commas, protoDollar[3].b)
+ }
+ case 138:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protolex.(*protoLex).Error("compact options must have at least one option")
+ protoVAL.cmpctOpts = ast.NewCompactOptionsNode(protoDollar[1].b, nil, nil, protoDollar[2].b)
+ }
+ case 139:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.opts = &compactOptionSlices{options: []*ast.OptionNode{protoDollar[1].opt.Node}, commas: protoDollar[1].opt.Runes}
+ }
+ case 140:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoDollar[1].opts.options = append(protoDollar[1].opts.options, protoDollar[2].opt.Node)
+ protoDollar[1].opts.commas = append(protoDollar[1].opts.commas, protoDollar[2].opt.Runes...)
+ protoVAL.opts = protoDollar[1].opts
+ }
+ case 141:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.opts = &compactOptionSlices{options: []*ast.OptionNode{protoDollar[1].opt.Node}, commas: protoDollar[1].opt.Runes}
+ }
+ case 142:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoDollar[1].opts.options = append(protoDollar[1].opts.options, protoDollar[2].opt.Node)
+ protoDollar[1].opts.commas = append(protoDollar[1].opts.commas, protoDollar[2].opt.Runes...)
+ protoVAL.opts = protoDollar[1].opts
+ }
+ case 143:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.opt = newNodeWithRunes(protoDollar[1].optRaw)
+ }
+ case 144:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protolex.(*protoLex).Error("syntax error: unexpected ','")
+ protoVAL.opt = protoDollar[1].opt
+ }
+ case 145:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.opt = newNodeWithRunes(protoDollar[1].optRaw, protoDollar[2].b)
+ }
+ case 146:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ optName := ast.NewOptionNameNode(protoDollar[1].optNms.refs, protoDollar[1].optNms.dots)
+ protoVAL.optRaw = ast.NewCompactOptionNode(optName, protoDollar[2].b, protoDollar[3].v)
+ }
+ case 147:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ optName := ast.NewOptionNameNode(protoDollar[1].optNms.refs, protoDollar[1].optNms.dots)
+ protolex.(*protoLex).Error("compact option must have a value")
+ protoVAL.optRaw = ast.NewCompactOptionNode(optName, nil, nil)
+ }
+ case 148:
+ protoDollar = protoS[protopt-8 : protopt+1]
+ {
+ protoVAL.grp = ast.NewGroupNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, nil, protoDollar[6].b, protoDollar[7].msgElements, protoDollar[8].b)
+ }
+ case 149:
+ protoDollar = protoS[protopt-9 : protopt+1]
+ {
+ protoVAL.grp = ast.NewGroupNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, protoDollar[6].cmpctOpts, protoDollar[7].b, protoDollar[8].msgElements, protoDollar[9].b)
+ }
+ case 150:
+ protoDollar = protoS[protopt-9 : protopt+1]
+ {
+ protoVAL.msgGrp = newNodeWithRunes(ast.NewGroupNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, nil, protoDollar[6].b, protoDollar[7].msgElements, protoDollar[8].b), protoDollar[9].bs...)
+ }
+ case 151:
+ protoDollar = protoS[protopt-10 : protopt+1]
+ {
+ protoVAL.msgGrp = newNodeWithRunes(ast.NewGroupNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, protoDollar[6].cmpctOpts, protoDollar[7].b, protoDollar[8].msgElements, protoDollar[9].b), protoDollar[10].bs...)
+ }
+ case 152:
+ protoDollar = protoS[protopt-7 : protopt+1]
+ {
+ protoVAL.msgGrp = newNodeWithRunes(ast.NewGroupNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), protoDollar[3].id, nil, nil, nil, protoDollar[4].b, protoDollar[5].msgElements, protoDollar[6].b), protoDollar[7].bs...)
+ }
+ case 153:
+ protoDollar = protoS[protopt-8 : protopt+1]
+ {
+ protoVAL.msgGrp = newNodeWithRunes(ast.NewGroupNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), protoDollar[3].id, nil, nil, protoDollar[4].cmpctOpts, protoDollar[5].b, protoDollar[6].msgElements, protoDollar[7].b), protoDollar[8].bs...)
+ }
+ case 154:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ protoVAL.oo = newNodeWithRunes(ast.NewOneofNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].b, protoDollar[4].ooElements, protoDollar[5].b), protoDollar[6].bs...)
+ }
+ case 155:
+ protoDollar = protoS[protopt-0 : protopt+1]
+ {
+ protoVAL.ooElements = nil
+ }
+ case 157:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ if protoDollar[2].ooElement != nil {
+ protoVAL.ooElements = append(protoDollar[1].ooElements, protoDollar[2].ooElement)
+ } else {
+ protoVAL.ooElements = protoDollar[1].ooElements
+ }
+ }
+ case 158:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ if protoDollar[1].ooElement != nil {
+ protoVAL.ooElements = []ast.OneofElement{protoDollar[1].ooElement}
+ } else {
+ protoVAL.ooElements = nil
+ }
+ }
+ case 159:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.ooElement = protoDollar[1].optRaw
+ }
+ case 160:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.ooElement = protoDollar[1].fld
+ }
+ case 161:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.ooElement = protoDollar[1].grp
+ }
+ case 162:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.ooElement = nil
+ }
+ case 163:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.ooElement = nil
+ }
+ case 164:
+ protoDollar = protoS[protopt-5 : protopt+1]
+ {
+ protoVAL.fld = ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, nil, protoDollar[5].b)
+ }
+ case 165:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ protoVAL.fld = ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, protoDollar[5].cmpctOpts, protoDollar[6].b)
+ }
+ case 166:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoVAL.fld = ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, nil, nil, nil, protoDollar[3].b)
+ }
+ case 167:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ protoVAL.fld = ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, nil, nil, protoDollar[3].cmpctOpts, protoDollar[4].b)
+ }
+ case 168:
+ protoDollar = protoS[protopt-7 : protopt+1]
+ {
+ protoVAL.grp = ast.NewGroupNode(nil, protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, nil, protoDollar[5].b, protoDollar[6].msgElements, protoDollar[7].b)
+ }
+ case 169:
+ protoDollar = protoS[protopt-8 : protopt+1]
+ {
+ protoVAL.grp = ast.NewGroupNode(nil, protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, protoDollar[5].cmpctOpts, protoDollar[6].b, protoDollar[7].msgElements, protoDollar[8].b)
+ }
+ case 170:
+ protoDollar = protoS[protopt-5 : protopt+1]
+ {
+ protoVAL.grp = ast.NewGroupNode(nil, protoDollar[1].id.ToKeyword(), protoDollar[2].id, nil, nil, nil, protoDollar[3].b, protoDollar[4].msgElements, protoDollar[5].b)
+ }
+ case 171:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ protoVAL.grp = ast.NewGroupNode(nil, protoDollar[1].id.ToKeyword(), protoDollar[2].id, nil, nil, protoDollar[3].cmpctOpts, protoDollar[4].b, protoDollar[5].msgElements, protoDollar[6].b)
+ }
+ case 172:
+ protoDollar = protoS[protopt-5 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[5].bs)
+ protoVAL.mapFld = newNodeWithRunes(ast.NewMapFieldNode(protoDollar[1].mapType, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, nil, semi), extra...)
+ }
+ case 173:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[6].bs)
+ protoVAL.mapFld = newNodeWithRunes(ast.NewMapFieldNode(protoDollar[1].mapType, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, protoDollar[5].cmpctOpts, semi), extra...)
+ }
+ case 174:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[3].bs)
+ protoVAL.mapFld = newNodeWithRunes(ast.NewMapFieldNode(protoDollar[1].mapType, protoDollar[2].id, nil, nil, nil, semi), extra...)
+ }
+ case 175:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[4].bs)
+ protoVAL.mapFld = newNodeWithRunes(ast.NewMapFieldNode(protoDollar[1].mapType, protoDollar[2].id, nil, nil, protoDollar[3].cmpctOpts, semi), extra...)
+ }
+ case 176:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ protoVAL.mapType = ast.NewMapTypeNode(protoDollar[1].id.ToKeyword(), protoDollar[2].b, protoDollar[3].id, protoDollar[4].b, protoDollar[5].tid, protoDollar[6].b)
+ }
+ case 189:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ // TODO: Tolerate a missing semicolon here. This currnelty creates a shift/reduce conflict
+ // between `extensions 1 to 10` and `extensions 1` followed by `to = 10`.
+ protoVAL.ext = newNodeWithRunes(ast.NewExtensionRangeNode(protoDollar[1].id.ToKeyword(), protoDollar[2].rngs.ranges, protoDollar[2].rngs.commas, nil, protoDollar[3].b), protoDollar[4].bs...)
+ }
+ case 190:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[4].bs)
+ protoVAL.ext = newNodeWithRunes(ast.NewExtensionRangeNode(protoDollar[1].id.ToKeyword(), protoDollar[2].rngs.ranges, protoDollar[2].rngs.commas, protoDollar[3].cmpctOpts, semi), extra...)
+ }
+ case 191:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.rngs = &rangeSlices{ranges: []*ast.RangeNode{protoDollar[1].rng}}
+ }
+ case 192:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoDollar[1].rngs.ranges = append(protoDollar[1].rngs.ranges, protoDollar[3].rng)
+ protoDollar[1].rngs.commas = append(protoDollar[1].rngs.commas, protoDollar[2].b)
+ protoVAL.rngs = protoDollar[1].rngs
+ }
+ case 193:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.rng = ast.NewRangeNode(protoDollar[1].i, nil, nil, nil)
+ }
+ case 194:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoVAL.rng = ast.NewRangeNode(protoDollar[1].i, protoDollar[2].id.ToKeyword(), protoDollar[3].i, nil)
+ }
+ case 195:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoVAL.rng = ast.NewRangeNode(protoDollar[1].i, protoDollar[2].id.ToKeyword(), nil, protoDollar[3].id.ToKeyword())
+ }
+ case 196:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.rngs = &rangeSlices{ranges: []*ast.RangeNode{protoDollar[1].rng}}
+ }
+ case 197:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoDollar[1].rngs.ranges = append(protoDollar[1].rngs.ranges, protoDollar[3].rng)
+ protoDollar[1].rngs.commas = append(protoDollar[1].rngs.commas, protoDollar[2].b)
+ protoVAL.rngs = protoDollar[1].rngs
+ }
+ case 198:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.rng = ast.NewRangeNode(protoDollar[1].il, nil, nil, nil)
+ }
+ case 199:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoVAL.rng = ast.NewRangeNode(protoDollar[1].il, protoDollar[2].id.ToKeyword(), protoDollar[3].il, nil)
+ }
+ case 200:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoVAL.rng = ast.NewRangeNode(protoDollar[1].il, protoDollar[2].id.ToKeyword(), nil, protoDollar[3].id.ToKeyword())
+ }
+ case 201:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.il = protoDollar[1].i
+ }
+ case 202:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.il = ast.NewNegativeIntLiteralNode(protoDollar[1].b, protoDollar[2].i)
+ }
+ case 203:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ // TODO: Tolerate a missing semicolon here. This currnelty creates a shift/reduce conflict
+ // between `reserved 1 to 10` and `reserved 1` followed by `to = 10`.
+ protoVAL.resvd = newNodeWithRunes(ast.NewReservedRangesNode(protoDollar[1].id.ToKeyword(), protoDollar[2].rngs.ranges, protoDollar[2].rngs.commas, protoDollar[3].b), protoDollar[4].bs...)
+ }
+ case 205:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ // TODO: Tolerate a missing semicolon here. This currnelty creates a shift/reduce conflict
+ // between `reserved 1 to 10` and `reserved 1` followed by `to = 10`.
+ protoVAL.resvd = newNodeWithRunes(ast.NewReservedRangesNode(protoDollar[1].id.ToKeyword(), protoDollar[2].rngs.ranges, protoDollar[2].rngs.commas, protoDollar[3].b), protoDollar[4].bs...)
+ }
+ case 207:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[3].bs)
+ protoVAL.resvd = newNodeWithRunes(ast.NewReservedNamesNode(protoDollar[1].id.ToKeyword(), protoDollar[2].names.names, protoDollar[2].names.commas, semi), extra...)
+ }
+ case 208:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[3].bs)
+ protoVAL.resvd = newNodeWithRunes(ast.NewReservedIdentifiersNode(protoDollar[1].id.ToKeyword(), protoDollar[2].names.idents, protoDollar[2].names.commas, semi), extra...)
+ }
+ case 209:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.names = &nameSlices{names: []ast.StringValueNode{toStringValueNode(protoDollar[1].str)}}
+ }
+ case 210:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoDollar[1].names.names = append(protoDollar[1].names.names, toStringValueNode(protoDollar[3].str))
+ protoDollar[1].names.commas = append(protoDollar[1].names.commas, protoDollar[2].b)
+ protoVAL.names = protoDollar[1].names
+ }
+ case 211:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.names = &nameSlices{idents: []*ast.IdentNode{protoDollar[1].id}}
+ }
+ case 212:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoDollar[1].names.idents = append(protoDollar[1].names.idents, protoDollar[3].id)
+ protoDollar[1].names.commas = append(protoDollar[1].names.commas, protoDollar[2].b)
+ protoVAL.names = protoDollar[1].names
+ }
+ case 213:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ protoVAL.en = newNodeWithRunes(ast.NewEnumNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].b, protoDollar[4].enElements, protoDollar[5].b), protoDollar[6].bs...)
+ }
+ case 214:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.enElements = prependRunes(toEnumElement, protoDollar[1].bs, nil)
+ }
+ case 215:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.enElements = prependRunes(toEnumElement, protoDollar[1].bs, protoDollar[2].enElements)
+ }
+ case 216:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.enElements = append(protoDollar[1].enElements, protoDollar[2].enElements...)
+ }
+ case 217:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.enElements = protoDollar[1].enElements
+ }
+ case 218:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.enElements = toElements[ast.EnumElement](toEnumElement, protoDollar[1].opt.Node, protoDollar[1].opt.Runes)
+ }
+ case 219:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.enElements = toElements[ast.EnumElement](toEnumElement, protoDollar[1].env.Node, protoDollar[1].env.Runes)
+ }
+ case 220:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.enElements = toElements[ast.EnumElement](toEnumElement, protoDollar[1].resvd.Node, protoDollar[1].resvd.Runes)
+ }
+ case 221:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.enElements = nil
+ }
+ case 222:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[4].bs)
+ protoVAL.env = newNodeWithRunes(ast.NewEnumValueNode(protoDollar[1].id, protoDollar[2].b, protoDollar[3].il, nil, semi), extra...)
+ }
+ case 223:
+ protoDollar = protoS[protopt-5 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[5].bs)
+ protoVAL.env = newNodeWithRunes(ast.NewEnumValueNode(protoDollar[1].id, protoDollar[2].b, protoDollar[3].il, protoDollar[4].cmpctOpts, semi), extra...)
+ }
+ case 224:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ protoVAL.msg = newNodeWithRunes(ast.NewMessageNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].b, protoDollar[4].msgElements, protoDollar[5].b), protoDollar[6].bs...)
+ }
+ case 225:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgElements = prependRunes(toMessageElement, protoDollar[1].bs, nil)
+ }
+ case 226:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.msgElements = prependRunes(toMessageElement, protoDollar[1].bs, protoDollar[2].msgElements)
+ }
+ case 227:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.msgElements = append(protoDollar[1].msgElements, protoDollar[2].msgElements...)
+ }
+ case 228:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgElements = protoDollar[1].msgElements
+ }
+ case 229:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgElements = toElements[ast.MessageElement](toMessageElement, protoDollar[1].msgFld.Node, protoDollar[1].msgFld.Runes)
+ }
+ case 230:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgElements = toElements[ast.MessageElement](toMessageElement, protoDollar[1].en.Node, protoDollar[1].en.Runes)
+ }
+ case 231:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgElements = toElements[ast.MessageElement](toMessageElement, protoDollar[1].msg.Node, protoDollar[1].msg.Runes)
+ }
+ case 232:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgElements = toElements[ast.MessageElement](toMessageElement, protoDollar[1].extend.Node, protoDollar[1].extend.Runes)
+ }
+ case 233:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgElements = toElements[ast.MessageElement](toMessageElement, protoDollar[1].ext.Node, protoDollar[1].ext.Runes)
+ }
+ case 234:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgElements = toElements[ast.MessageElement](toMessageElement, protoDollar[1].msgGrp.Node, protoDollar[1].msgGrp.Runes)
+ }
+ case 235:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgElements = toElements[ast.MessageElement](toMessageElement, protoDollar[1].opt.Node, protoDollar[1].opt.Runes)
+ }
+ case 236:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgElements = toElements[ast.MessageElement](toMessageElement, protoDollar[1].oo.Node, protoDollar[1].oo.Runes)
+ }
+ case 237:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgElements = toElements[ast.MessageElement](toMessageElement, protoDollar[1].mapFld.Node, protoDollar[1].mapFld.Runes)
+ }
+ case 238:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgElements = toElements[ast.MessageElement](toMessageElement, protoDollar[1].resvd.Node, protoDollar[1].resvd.Runes)
+ }
+ case 239:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.msgElements = nil
+ }
+ case 240:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ semis, extra := protolex.(*protoLex).requireSemicolon(protoDollar[6].bs)
+ protoVAL.msgFld = newNodeWithRunes(ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, nil, semis), extra...)
+ }
+ case 241:
+ protoDollar = protoS[protopt-7 : protopt+1]
+ {
+ semis, extra := protolex.(*protoLex).requireSemicolon(protoDollar[7].bs)
+ protoVAL.msgFld = newNodeWithRunes(ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, protoDollar[6].cmpctOpts, semis), extra...)
+ }
+ case 242:
+ protoDollar = protoS[protopt-5 : protopt+1]
+ {
+ semis, extra := protolex.(*protoLex).requireSemicolon(protoDollar[5].bs)
+ protoVAL.msgFld = newNodeWithRunes(ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, nil, semis), extra...)
+ }
+ case 243:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ semis, extra := protolex.(*protoLex).requireSemicolon(protoDollar[6].bs)
+ protoVAL.msgFld = newNodeWithRunes(ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, protoDollar[5].cmpctOpts, semis), extra...)
+ }
+ case 244:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ semis, extra := protolex.(*protoLex).requireSemicolon(protoDollar[4].bs)
+ protoVAL.msgFld = newNodeWithRunes(ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, nil, nil, nil, semis), extra...)
+ }
+ case 245:
+ protoDollar = protoS[protopt-5 : protopt+1]
+ {
+ semis, extra := protolex.(*protoLex).requireSemicolon(protoDollar[5].bs)
+ protoVAL.msgFld = newNodeWithRunes(ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, nil, nil, protoDollar[4].cmpctOpts, semis), extra...)
+ }
+ case 246:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ semis, extra := protolex.(*protoLex).requireSemicolon(protoDollar[3].bs)
+ protoVAL.msgFld = newNodeWithRunes(ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, nil, nil, nil, semis), extra...)
+ }
+ case 247:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ semis, extra := protolex.(*protoLex).requireSemicolon(protoDollar[4].bs)
+ protoVAL.msgFld = newNodeWithRunes(ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, nil, nil, protoDollar[3].cmpctOpts, semis), extra...)
+ }
+ case 248:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ protoVAL.extend = newNodeWithRunes(ast.NewExtendNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].b, protoDollar[4].extElements, protoDollar[5].b), protoDollar[6].bs...)
+ }
+ case 249:
+ protoDollar = protoS[protopt-0 : protopt+1]
+ {
+ protoVAL.extElements = nil
+ }
+ case 251:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ if protoDollar[2].extElement != nil {
+ protoVAL.extElements = append(protoDollar[1].extElements, protoDollar[2].extElement)
+ } else {
+ protoVAL.extElements = protoDollar[1].extElements
+ }
+ }
+ case 252:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ if protoDollar[1].extElement != nil {
+ protoVAL.extElements = []ast.ExtendElement{protoDollar[1].extElement}
+ } else {
+ protoVAL.extElements = nil
+ }
+ }
+ case 253:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.extElement = protoDollar[1].fld
+ }
+ case 254:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.extElement = protoDollar[1].grp
+ }
+ case 255:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.extElement = nil
+ }
+ case 256:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.extElement = nil
+ }
+ case 257:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, nil, protoDollar[6].b)
+ }
+ case 258:
+ protoDollar = protoS[protopt-7 : protopt+1]
+ {
+ protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, protoDollar[6].cmpctOpts, protoDollar[7].b)
+ }
+ case 259:
+ protoDollar = protoS[protopt-5 : protopt+1]
+ {
+ protoVAL.fld = ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, nil, protoDollar[5].b)
+ }
+ case 260:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ protoVAL.fld = ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, protoDollar[5].cmpctOpts, protoDollar[6].b)
+ }
+ case 261:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ protoVAL.svc = newNodeWithRunes(ast.NewServiceNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].b, protoDollar[4].svcElements, protoDollar[5].b), protoDollar[6].bs...)
+ }
+ case 262:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.svcElements = prependRunes(toServiceElement, protoDollar[1].bs, nil)
+ }
+ case 263:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.svcElements = prependRunes(toServiceElement, protoDollar[1].bs, protoDollar[2].svcElements)
+ }
+ case 264:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.svcElements = append(protoDollar[1].svcElements, protoDollar[2].svcElements...)
+ }
+ case 265:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.svcElements = protoDollar[1].svcElements
+ }
+ case 266:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.svcElements = toElements[ast.ServiceElement](toServiceElement, protoDollar[1].opt.Node, protoDollar[1].opt.Runes)
+ }
+ case 267:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.svcElements = toElements[ast.ServiceElement](toServiceElement, protoDollar[1].mtd.Node, protoDollar[1].mtd.Runes)
+ }
+ case 268:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.svcElements = nil
+ }
+ case 269:
+ protoDollar = protoS[protopt-6 : protopt+1]
+ {
+ semi, extra := protolex.(*protoLex).requireSemicolon(protoDollar[6].bs)
+ protoVAL.mtd = newNodeWithRunes(ast.NewRPCNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].mtdMsgType, protoDollar[4].id.ToKeyword(), protoDollar[5].mtdMsgType, semi), extra...)
+ }
+ case 270:
+ protoDollar = protoS[protopt-9 : protopt+1]
+ {
+ protoVAL.mtd = newNodeWithRunes(ast.NewRPCNodeWithBody(protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].mtdMsgType, protoDollar[4].id.ToKeyword(), protoDollar[5].mtdMsgType, protoDollar[6].b, protoDollar[7].mtdElements, protoDollar[8].b), protoDollar[9].bs...)
+ }
+ case 271:
+ protoDollar = protoS[protopt-4 : protopt+1]
+ {
+ protoVAL.mtdMsgType = ast.NewRPCTypeNode(protoDollar[1].b, protoDollar[2].id.ToKeyword(), protoDollar[3].tid, protoDollar[4].b)
+ }
+ case 272:
+ protoDollar = protoS[protopt-3 : protopt+1]
+ {
+ protoVAL.mtdMsgType = ast.NewRPCTypeNode(protoDollar[1].b, nil, protoDollar[2].tid, protoDollar[3].b)
+ }
+ case 273:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.mtdElements = prependRunes(toMethodElement, protoDollar[1].bs, nil)
+ }
+ case 274:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.mtdElements = prependRunes(toMethodElement, protoDollar[1].bs, protoDollar[2].mtdElements)
+ }
+ case 275:
+ protoDollar = protoS[protopt-2 : protopt+1]
+ {
+ protoVAL.mtdElements = append(protoDollar[1].mtdElements, protoDollar[2].mtdElements...)
+ }
+ case 276:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.mtdElements = protoDollar[1].mtdElements
+ }
+ case 277:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.mtdElements = toElements[ast.RPCElement](toMethodElement, protoDollar[1].opt.Node, protoDollar[1].opt.Runes)
+ }
+ case 278:
+ protoDollar = protoS[protopt-1 : protopt+1]
+ {
+ protoVAL.mtdElements = nil
+ }
+ }
+ goto protostack /* stack new state and value */
+}
diff --git a/vendor/github.com/bufbuild/protocompile/parser/result.go b/vendor/github.com/bufbuild/protocompile/parser/result.go
new file mode 100644
index 0000000..4aa83e7
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/parser/result.go
@@ -0,0 +1,1012 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package parser
+
+import (
+ "bytes"
+ "fmt"
+ "math"
+ "sort"
+ "strings"
+ "unicode"
+
+ "google.golang.org/protobuf/proto"
+ "google.golang.org/protobuf/reflect/protoreflect"
+ "google.golang.org/protobuf/types/descriptorpb"
+
+ "github.com/bufbuild/protocompile/ast"
+ "github.com/bufbuild/protocompile/internal"
+ "github.com/bufbuild/protocompile/internal/editions"
+ "github.com/bufbuild/protocompile/reporter"
+)
+
+type result struct {
+ file *ast.FileNode
+ proto *descriptorpb.FileDescriptorProto
+
+ nodes map[proto.Message]ast.Node
+ ifNoAST *ast.NoSourceNode
+}
+
+// ResultWithoutAST returns a parse result that has no AST. All methods for
+// looking up AST nodes return a placeholder node that contains only the filename
+// in position information.
+func ResultWithoutAST(proto *descriptorpb.FileDescriptorProto) Result {
+ return &result{proto: proto, ifNoAST: ast.NewNoSourceNode(proto.GetName())}
+}
+
+// ResultFromAST constructs a descriptor proto from the given AST. The returned
+// result includes the descriptor proto and also contains an index that can be
+// used to lookup AST node information for elements in the descriptor proto
+// hierarchy.
+//
+// If validate is true, some basic validation is performed, to make sure the
+// resulting descriptor proto is valid per protobuf rules and semantics. Only
+// some language elements can be validated since some rules and semantics can
+// only be checked after all symbols are all resolved, which happens in the
+// linking step.
+//
+// The given handler is used to report any errors or warnings encountered. If any
+// errors are reported, this function returns a non-nil error.
+func ResultFromAST(file *ast.FileNode, validate bool, handler *reporter.Handler) (Result, error) {
+ filename := file.Name()
+ r := &result{file: file, nodes: map[proto.Message]ast.Node{}}
+ r.createFileDescriptor(filename, file, handler)
+ if validate {
+ validateBasic(r, handler)
+ }
+ // Now that we're done validating, we can set any missing labels to optional
+ // (we leave them absent in first pass if label was missing in source, so we
+ // can do validation on presence of label, but final descriptors are expected
+ // to always have them present).
+ fillInMissingLabels(r.proto)
+ return r, handler.Error()
+}
+
+func (r *result) AST() *ast.FileNode {
+ return r.file
+}
+
+func (r *result) FileDescriptorProto() *descriptorpb.FileDescriptorProto {
+ return r.proto
+}
+
+func (r *result) createFileDescriptor(filename string, file *ast.FileNode, handler *reporter.Handler) {
+ fd := &descriptorpb.FileDescriptorProto{Name: proto.String(filename)}
+ r.proto = fd
+
+ r.putFileNode(fd, file)
+
+ var syntax protoreflect.Syntax
+ switch {
+ case file.Syntax != nil:
+ switch file.Syntax.Syntax.AsString() {
+ case "proto3":
+ syntax = protoreflect.Proto3
+ case "proto2":
+ syntax = protoreflect.Proto2
+ default:
+ nodeInfo := file.NodeInfo(file.Syntax.Syntax)
+ if handler.HandleErrorf(nodeInfo, `syntax value must be "proto2" or "proto3"`) != nil {
+ return
+ }
+ }
+
+ // proto2 is the default, so no need to set for that value
+ if syntax != protoreflect.Proto2 {
+ fd.Syntax = proto.String(file.Syntax.Syntax.AsString())
+ }
+ case file.Edition != nil:
+ edition := file.Edition.Edition.AsString()
+ syntax = protoreflect.Editions
+
+ fd.Syntax = proto.String("editions")
+ editionEnum, ok := editions.SupportedEditions[edition]
+ if !ok {
+ nodeInfo := file.NodeInfo(file.Edition.Edition)
+ editionStrs := make([]string, 0, len(editions.SupportedEditions))
+ for supportedEdition := range editions.SupportedEditions {
+ editionStrs = append(editionStrs, fmt.Sprintf("%q", supportedEdition))
+ }
+ sort.Strings(editionStrs)
+ if handler.HandleErrorf(nodeInfo, `edition value %q not recognized; should be one of [%s]`, edition, strings.Join(editionStrs, ",")) != nil {
+ return
+ }
+ }
+ fd.Edition = editionEnum.Enum()
+ default:
+ syntax = protoreflect.Proto2
+ nodeInfo := file.NodeInfo(file)
+ handler.HandleWarningWithPos(nodeInfo, ErrNoSyntax)
+ }
+
+ for _, decl := range file.Decls {
+ if handler.ReporterError() != nil {
+ return
+ }
+ switch decl := decl.(type) {
+ case *ast.EnumNode:
+ fd.EnumType = append(fd.EnumType, r.asEnumDescriptor(decl, syntax, handler))
+ case *ast.ExtendNode:
+ r.addExtensions(decl, &fd.Extension, &fd.MessageType, syntax, handler, 0)
+ case *ast.ImportNode:
+ index := len(fd.Dependency)
+ fd.Dependency = append(fd.Dependency, decl.Name.AsString())
+ if decl.Public != nil {
+ fd.PublicDependency = append(fd.PublicDependency, int32(index))
+ } else if decl.Weak != nil {
+ fd.WeakDependency = append(fd.WeakDependency, int32(index))
+ }
+ case *ast.MessageNode:
+ fd.MessageType = append(fd.MessageType, r.asMessageDescriptor(decl, syntax, handler, 1))
+ case *ast.OptionNode:
+ if fd.Options == nil {
+ fd.Options = &descriptorpb.FileOptions{}
+ }
+ fd.Options.UninterpretedOption = append(fd.Options.UninterpretedOption, r.asUninterpretedOption(decl))
+ case *ast.ServiceNode:
+ fd.Service = append(fd.Service, r.asServiceDescriptor(decl))
+ case *ast.PackageNode:
+ if fd.Package != nil {
+ nodeInfo := file.NodeInfo(decl)
+ if handler.HandleErrorf(nodeInfo, "files should have only one package declaration") != nil {
+ return
+ }
+ }
+ pkgName := string(decl.Name.AsIdentifier())
+ if len(pkgName) >= 512 {
+ nodeInfo := file.NodeInfo(decl.Name)
+ if handler.HandleErrorf(nodeInfo, "package name (with whitespace removed) must be less than 512 characters long") != nil {
+ return
+ }
+ }
+ if strings.Count(pkgName, ".") > 100 {
+ nodeInfo := file.NodeInfo(decl.Name)
+ if handler.HandleErrorf(nodeInfo, "package name may not contain more than 100 periods") != nil {
+ return
+ }
+ }
+ fd.Package = proto.String(string(decl.Name.AsIdentifier()))
+ }
+ }
+}
+
+func (r *result) asUninterpretedOptions(nodes []*ast.OptionNode) []*descriptorpb.UninterpretedOption {
+ if len(nodes) == 0 {
+ return nil
+ }
+ opts := make([]*descriptorpb.UninterpretedOption, len(nodes))
+ for i, n := range nodes {
+ opts[i] = r.asUninterpretedOption(n)
+ }
+ return opts
+}
+
+func (r *result) asUninterpretedOption(node *ast.OptionNode) *descriptorpb.UninterpretedOption {
+ opt := &descriptorpb.UninterpretedOption{Name: r.asUninterpretedOptionName(node.Name.Parts)}
+ r.putOptionNode(opt, node)
+
+ switch val := node.Val.Value().(type) {
+ case bool:
+ if val {
+ opt.IdentifierValue = proto.String("true")
+ } else {
+ opt.IdentifierValue = proto.String("false")
+ }
+ case int64:
+ opt.NegativeIntValue = proto.Int64(val)
+ case uint64:
+ opt.PositiveIntValue = proto.Uint64(val)
+ case float64:
+ opt.DoubleValue = proto.Float64(val)
+ case string:
+ opt.StringValue = []byte(val)
+ case ast.Identifier:
+ opt.IdentifierValue = proto.String(string(val))
+ default:
+ // the grammar does not allow arrays here, so the only possible case
+ // left should be []*ast.MessageFieldNode, which corresponds to an
+ // *ast.MessageLiteralNode
+ if n, ok := node.Val.(*ast.MessageLiteralNode); ok {
+ var buf bytes.Buffer
+ for i, el := range n.Elements {
+ flattenNode(r.file, el, &buf)
+ if len(n.Seps) > i && n.Seps[i] != nil {
+ buf.WriteRune(' ')
+ buf.WriteRune(n.Seps[i].Rune)
+ }
+ }
+ aggStr := buf.String()
+ opt.AggregateValue = proto.String(aggStr)
+ }
+ // TODO: else that reports an error or panics??
+ }
+ return opt
+}
+
+func flattenNode(f *ast.FileNode, n ast.Node, buf *bytes.Buffer) {
+ if cn, ok := n.(ast.CompositeNode); ok {
+ for _, ch := range cn.Children() {
+ flattenNode(f, ch, buf)
+ }
+ return
+ }
+
+ if buf.Len() > 0 {
+ buf.WriteRune(' ')
+ }
+ buf.WriteString(f.NodeInfo(n).RawText())
+}
+
+func (r *result) asUninterpretedOptionName(parts []*ast.FieldReferenceNode) []*descriptorpb.UninterpretedOption_NamePart {
+ ret := make([]*descriptorpb.UninterpretedOption_NamePart, len(parts))
+ for i, part := range parts {
+ np := &descriptorpb.UninterpretedOption_NamePart{
+ NamePart: proto.String(string(part.Name.AsIdentifier())),
+ IsExtension: proto.Bool(part.IsExtension()),
+ }
+ r.putOptionNamePartNode(np, part)
+ ret[i] = np
+ }
+ return ret
+}
+
+func (r *result) addExtensions(ext *ast.ExtendNode, flds *[]*descriptorpb.FieldDescriptorProto, msgs *[]*descriptorpb.DescriptorProto, syntax protoreflect.Syntax, handler *reporter.Handler, depth int) {
+ extendee := string(ext.Extendee.AsIdentifier())
+ count := 0
+ for _, decl := range ext.Decls {
+ switch decl := decl.(type) {
+ case *ast.FieldNode:
+ count++
+ // use higher limit since we don't know yet whether extendee is messageset wire format
+ fd := r.asFieldDescriptor(decl, internal.MaxTag, syntax, handler)
+ fd.Extendee = proto.String(extendee)
+ *flds = append(*flds, fd)
+ case *ast.GroupNode:
+ count++
+ // ditto: use higher limit right now
+ fd, md := r.asGroupDescriptors(decl, syntax, internal.MaxTag, handler, depth+1)
+ fd.Extendee = proto.String(extendee)
+ *flds = append(*flds, fd)
+ *msgs = append(*msgs, md)
+ }
+ }
+ if count == 0 {
+ nodeInfo := r.file.NodeInfo(ext)
+ _ = handler.HandleErrorf(nodeInfo, "extend sections must define at least one extension")
+ }
+}
+
+func asLabel(lbl *ast.FieldLabel) *descriptorpb.FieldDescriptorProto_Label {
+ if !lbl.IsPresent() {
+ return nil
+ }
+ switch {
+ case lbl.Repeated:
+ return descriptorpb.FieldDescriptorProto_LABEL_REPEATED.Enum()
+ case lbl.Required:
+ return descriptorpb.FieldDescriptorProto_LABEL_REQUIRED.Enum()
+ default:
+ return descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum()
+ }
+}
+
+func (r *result) asFieldDescriptor(node *ast.FieldNode, maxTag int32, syntax protoreflect.Syntax, handler *reporter.Handler) *descriptorpb.FieldDescriptorProto {
+ var tag *int32
+ if node.Tag != nil {
+ if err := r.checkTag(node.Tag, node.Tag.Val, maxTag); err != nil {
+ _ = handler.HandleError(err)
+ }
+ tag = proto.Int32(int32(node.Tag.Val))
+ }
+ fd := newFieldDescriptor(node.Name.Val, string(node.FldType.AsIdentifier()), tag, asLabel(&node.Label))
+ r.putFieldNode(fd, node)
+ if opts := node.Options.GetElements(); len(opts) > 0 {
+ fd.Options = &descriptorpb.FieldOptions{UninterpretedOption: r.asUninterpretedOptions(opts)}
+ }
+ if syntax == protoreflect.Proto3 && fd.Label != nil && fd.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL {
+ fd.Proto3Optional = proto.Bool(true)
+ }
+ return fd
+}
+
+var fieldTypes = map[string]descriptorpb.FieldDescriptorProto_Type{
+ "double": descriptorpb.FieldDescriptorProto_TYPE_DOUBLE,
+ "float": descriptorpb.FieldDescriptorProto_TYPE_FLOAT,
+ "int32": descriptorpb.FieldDescriptorProto_TYPE_INT32,
+ "int64": descriptorpb.FieldDescriptorProto_TYPE_INT64,
+ "uint32": descriptorpb.FieldDescriptorProto_TYPE_UINT32,
+ "uint64": descriptorpb.FieldDescriptorProto_TYPE_UINT64,
+ "sint32": descriptorpb.FieldDescriptorProto_TYPE_SINT32,
+ "sint64": descriptorpb.FieldDescriptorProto_TYPE_SINT64,
+ "fixed32": descriptorpb.FieldDescriptorProto_TYPE_FIXED32,
+ "fixed64": descriptorpb.FieldDescriptorProto_TYPE_FIXED64,
+ "sfixed32": descriptorpb.FieldDescriptorProto_TYPE_SFIXED32,
+ "sfixed64": descriptorpb.FieldDescriptorProto_TYPE_SFIXED64,
+ "bool": descriptorpb.FieldDescriptorProto_TYPE_BOOL,
+ "string": descriptorpb.FieldDescriptorProto_TYPE_STRING,
+ "bytes": descriptorpb.FieldDescriptorProto_TYPE_BYTES,
+}
+
+func newFieldDescriptor(name string, fieldType string, tag *int32, lbl *descriptorpb.FieldDescriptorProto_Label) *descriptorpb.FieldDescriptorProto {
+ fd := &descriptorpb.FieldDescriptorProto{
+ Name: proto.String(name),
+ JsonName: proto.String(internal.JSONName(name)),
+ Number: tag,
+ Label: lbl,
+ }
+ t, ok := fieldTypes[fieldType]
+ if ok {
+ fd.Type = t.Enum()
+ } else {
+ // NB: we don't have enough info to determine whether this is an enum
+ // or a message type, so we'll leave Type nil and set it later
+ // (during linking)
+ fd.TypeName = proto.String(fieldType)
+ }
+ return fd
+}
+
+func (r *result) asGroupDescriptors(group *ast.GroupNode, syntax protoreflect.Syntax, maxTag int32, handler *reporter.Handler, depth int) (*descriptorpb.FieldDescriptorProto, *descriptorpb.DescriptorProto) {
+ var tag *int32
+ if group.Tag != nil {
+ if err := r.checkTag(group.Tag, group.Tag.Val, maxTag); err != nil {
+ _ = handler.HandleError(err)
+ }
+ tag = proto.Int32(int32(group.Tag.Val))
+ }
+ if !unicode.IsUpper(rune(group.Name.Val[0])) {
+ nameNodeInfo := r.file.NodeInfo(group.Name)
+ _ = handler.HandleErrorf(nameNodeInfo, "group %s should have a name that starts with a capital letter", group.Name.Val)
+ }
+ fieldName := strings.ToLower(group.Name.Val)
+ fd := &descriptorpb.FieldDescriptorProto{
+ Name: proto.String(fieldName),
+ JsonName: proto.String(internal.JSONName(fieldName)),
+ Number: tag,
+ Label: asLabel(&group.Label),
+ Type: descriptorpb.FieldDescriptorProto_TYPE_GROUP.Enum(),
+ TypeName: proto.String(group.Name.Val),
+ }
+ r.putFieldNode(fd, group)
+ if opts := group.Options.GetElements(); len(opts) > 0 {
+ fd.Options = &descriptorpb.FieldOptions{UninterpretedOption: r.asUninterpretedOptions(opts)}
+ }
+ md := &descriptorpb.DescriptorProto{Name: proto.String(group.Name.Val)}
+ groupMsg := group.AsMessage()
+ r.putMessageNode(md, groupMsg)
+ // don't bother processing body if we've exceeded depth
+ if r.checkDepth(depth, groupMsg, handler) {
+ r.addMessageBody(md, &group.MessageBody, syntax, handler, depth)
+ }
+ return fd, md
+}
+
+func (r *result) asMapDescriptors(mapField *ast.MapFieldNode, syntax protoreflect.Syntax, maxTag int32, handler *reporter.Handler, depth int) (*descriptorpb.FieldDescriptorProto, *descriptorpb.DescriptorProto) {
+ var tag *int32
+ if mapField.Tag != nil {
+ if err := r.checkTag(mapField.Tag, mapField.Tag.Val, maxTag); err != nil {
+ _ = handler.HandleError(err)
+ }
+ tag = proto.Int32(int32(mapField.Tag.Val))
+ }
+ mapEntry := mapField.AsMessage()
+ r.checkDepth(depth, mapEntry, handler)
+ var lbl *descriptorpb.FieldDescriptorProto_Label
+ if syntax == protoreflect.Proto2 {
+ lbl = descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum()
+ }
+ keyFd := newFieldDescriptor("key", mapField.MapType.KeyType.Val, proto.Int32(1), lbl)
+ r.putFieldNode(keyFd, mapField.KeyField())
+ valFd := newFieldDescriptor("value", string(mapField.MapType.ValueType.AsIdentifier()), proto.Int32(2), lbl)
+ r.putFieldNode(valFd, mapField.ValueField())
+ entryName := internal.InitCap(internal.JSONName(mapField.Name.Val)) + "Entry"
+ fd := newFieldDescriptor(mapField.Name.Val, entryName, tag, descriptorpb.FieldDescriptorProto_LABEL_REPEATED.Enum())
+ if opts := mapField.Options.GetElements(); len(opts) > 0 {
+ fd.Options = &descriptorpb.FieldOptions{UninterpretedOption: r.asUninterpretedOptions(opts)}
+ }
+ r.putFieldNode(fd, mapField)
+ md := &descriptorpb.DescriptorProto{
+ Name: proto.String(entryName),
+ Options: &descriptorpb.MessageOptions{MapEntry: proto.Bool(true)},
+ Field: []*descriptorpb.FieldDescriptorProto{keyFd, valFd},
+ }
+ r.putMessageNode(md, mapEntry)
+ return fd, md
+}
+
+func (r *result) asExtensionRanges(node *ast.ExtensionRangeNode, maxTag int32, handler *reporter.Handler) []*descriptorpb.DescriptorProto_ExtensionRange {
+ opts := r.asUninterpretedOptions(node.Options.GetElements())
+ ers := make([]*descriptorpb.DescriptorProto_ExtensionRange, len(node.Ranges))
+ for i, rng := range node.Ranges {
+ start, end := r.getRangeBounds(rng, 1, maxTag, handler)
+ er := &descriptorpb.DescriptorProto_ExtensionRange{
+ Start: proto.Int32(start),
+ End: proto.Int32(end + 1),
+ }
+ if len(opts) > 0 {
+ er.Options = &descriptorpb.ExtensionRangeOptions{UninterpretedOption: opts}
+ }
+ r.putExtensionRangeNode(er, node, rng)
+ ers[i] = er
+ }
+ return ers
+}
+
+func (r *result) asEnumValue(ev *ast.EnumValueNode, handler *reporter.Handler) *descriptorpb.EnumValueDescriptorProto {
+ num, ok := ast.AsInt32(ev.Number, math.MinInt32, math.MaxInt32)
+ if !ok {
+ numberNodeInfo := r.file.NodeInfo(ev.Number)
+ _ = handler.HandleErrorf(numberNodeInfo, "value %d is out of range: should be between %d and %d", ev.Number.Value(), math.MinInt32, math.MaxInt32)
+ }
+ evd := &descriptorpb.EnumValueDescriptorProto{Name: proto.String(ev.Name.Val), Number: proto.Int32(num)}
+ r.putEnumValueNode(evd, ev)
+ if opts := ev.Options.GetElements(); len(opts) > 0 {
+ evd.Options = &descriptorpb.EnumValueOptions{UninterpretedOption: r.asUninterpretedOptions(opts)}
+ }
+ return evd
+}
+
+func (r *result) asMethodDescriptor(node *ast.RPCNode) *descriptorpb.MethodDescriptorProto {
+ md := &descriptorpb.MethodDescriptorProto{
+ Name: proto.String(node.Name.Val),
+ InputType: proto.String(string(node.Input.MessageType.AsIdentifier())),
+ OutputType: proto.String(string(node.Output.MessageType.AsIdentifier())),
+ }
+ r.putMethodNode(md, node)
+ if node.Input.Stream != nil {
+ md.ClientStreaming = proto.Bool(true)
+ }
+ if node.Output.Stream != nil {
+ md.ServerStreaming = proto.Bool(true)
+ }
+ // protoc always adds a MethodOptions if there are brackets
+ // We do the same to match protoc as closely as possible
+ // https://github.com/protocolbuffers/protobuf/blob/0c3f43a6190b77f1f68b7425d1b7e1a8257a8d0c/src/google/protobuf/compiler/parser.cc#L2152
+ if node.OpenBrace != nil {
+ md.Options = &descriptorpb.MethodOptions{}
+ for _, decl := range node.Decls {
+ if option, ok := decl.(*ast.OptionNode); ok {
+ md.Options.UninterpretedOption = append(md.Options.UninterpretedOption, r.asUninterpretedOption(option))
+ }
+ }
+ }
+ return md
+}
+
+func (r *result) asEnumDescriptor(en *ast.EnumNode, syntax protoreflect.Syntax, handler *reporter.Handler) *descriptorpb.EnumDescriptorProto {
+ ed := &descriptorpb.EnumDescriptorProto{Name: proto.String(en.Name.Val)}
+ r.putEnumNode(ed, en)
+ rsvdNames := map[string]ast.SourcePos{}
+ for _, decl := range en.Decls {
+ switch decl := decl.(type) {
+ case *ast.OptionNode:
+ if ed.Options == nil {
+ ed.Options = &descriptorpb.EnumOptions{}
+ }
+ ed.Options.UninterpretedOption = append(ed.Options.UninterpretedOption, r.asUninterpretedOption(decl))
+ case *ast.EnumValueNode:
+ ed.Value = append(ed.Value, r.asEnumValue(decl, handler))
+ case *ast.ReservedNode:
+ r.addReservedNames(&ed.ReservedName, decl, syntax, handler, rsvdNames)
+ for _, rng := range decl.Ranges {
+ ed.ReservedRange = append(ed.ReservedRange, r.asEnumReservedRange(rng, handler))
+ }
+ }
+ }
+ return ed
+}
+
+func (r *result) asEnumReservedRange(rng *ast.RangeNode, handler *reporter.Handler) *descriptorpb.EnumDescriptorProto_EnumReservedRange {
+ start, end := r.getRangeBounds(rng, math.MinInt32, math.MaxInt32, handler)
+ rr := &descriptorpb.EnumDescriptorProto_EnumReservedRange{
+ Start: proto.Int32(start),
+ End: proto.Int32(end),
+ }
+ r.putEnumReservedRangeNode(rr, rng)
+ return rr
+}
+
+func (r *result) asMessageDescriptor(node *ast.MessageNode, syntax protoreflect.Syntax, handler *reporter.Handler, depth int) *descriptorpb.DescriptorProto {
+ msgd := &descriptorpb.DescriptorProto{Name: proto.String(node.Name.Val)}
+ r.putMessageNode(msgd, node)
+ // don't bother processing body if we've exceeded depth
+ if r.checkDepth(depth, node, handler) {
+ r.addMessageBody(msgd, &node.MessageBody, syntax, handler, depth)
+ }
+ return msgd
+}
+
+func (r *result) addReservedNames(names *[]string, node *ast.ReservedNode, syntax protoreflect.Syntax, handler *reporter.Handler, alreadyReserved map[string]ast.SourcePos) {
+ if syntax == protoreflect.Editions {
+ if len(node.Names) > 0 {
+ nameNodeInfo := r.file.NodeInfo(node.Names[0])
+ _ = handler.HandleErrorf(nameNodeInfo, `must use identifiers, not string literals, to reserved names with editions`)
+ }
+ for _, n := range node.Identifiers {
+ name := string(n.AsIdentifier())
+ nameNodeInfo := r.file.NodeInfo(n)
+ if existing, ok := alreadyReserved[name]; ok {
+ _ = handler.HandleErrorf(nameNodeInfo, "name %q is already reserved at %s", name, existing)
+ continue
+ }
+ alreadyReserved[name] = nameNodeInfo.Start()
+ *names = append(*names, name)
+ }
+ return
+ }
+
+ if len(node.Identifiers) > 0 {
+ nameNodeInfo := r.file.NodeInfo(node.Identifiers[0])
+ _ = handler.HandleErrorf(nameNodeInfo, `must use string literals, not identifiers, to reserved names with proto2 and proto3`)
+ }
+ for _, n := range node.Names {
+ name := n.AsString()
+ nameNodeInfo := r.file.NodeInfo(n)
+ if existing, ok := alreadyReserved[name]; ok {
+ _ = handler.HandleErrorf(nameNodeInfo, "name %q is already reserved at %s", name, existing)
+ continue
+ }
+ alreadyReserved[name] = nameNodeInfo.Start()
+ *names = append(*names, name)
+ }
+}
+
+func (r *result) checkDepth(depth int, node ast.MessageDeclNode, handler *reporter.Handler) bool {
+ if depth < 32 {
+ return true
+ }
+ n := ast.Node(node)
+ if grp, ok := n.(*ast.SyntheticGroupMessageNode); ok {
+ // pinpoint the group keyword if the source is a group
+ n = grp.Keyword
+ }
+ _ = handler.HandleErrorf(r.file.NodeInfo(n), "message nesting depth must be less than 32")
+ return false
+}
+
+func (r *result) addMessageBody(msgd *descriptorpb.DescriptorProto, body *ast.MessageBody, syntax protoreflect.Syntax, handler *reporter.Handler, depth int) {
+ // first process any options
+ for _, decl := range body.Decls {
+ if opt, ok := decl.(*ast.OptionNode); ok {
+ if msgd.Options == nil {
+ msgd.Options = &descriptorpb.MessageOptions{}
+ }
+ msgd.Options.UninterpretedOption = append(msgd.Options.UninterpretedOption, r.asUninterpretedOption(opt))
+ }
+ }
+
+ // now that we have options, we can see if this uses messageset wire format, which
+ // impacts how we validate tag numbers in any fields in the message
+ maxTag := int32(internal.MaxNormalTag)
+ messageSetOpt, err := r.isMessageSetWireFormat("message "+msgd.GetName(), msgd, handler)
+ if err != nil {
+ return
+ } else if messageSetOpt != nil {
+ if syntax == protoreflect.Proto3 {
+ node := r.OptionNode(messageSetOpt)
+ nodeInfo := r.file.NodeInfo(node)
+ _ = handler.HandleErrorf(nodeInfo, "messages with message-set wire format are not allowed with proto3 syntax")
+ }
+ maxTag = internal.MaxTag // higher limit for messageset wire format
+ }
+
+ rsvdNames := map[string]ast.SourcePos{}
+
+ // now we can process the rest
+ for _, decl := range body.Decls {
+ switch decl := decl.(type) {
+ case *ast.EnumNode:
+ msgd.EnumType = append(msgd.EnumType, r.asEnumDescriptor(decl, syntax, handler))
+ case *ast.ExtendNode:
+ r.addExtensions(decl, &msgd.Extension, &msgd.NestedType, syntax, handler, depth)
+ case *ast.ExtensionRangeNode:
+ msgd.ExtensionRange = append(msgd.ExtensionRange, r.asExtensionRanges(decl, maxTag, handler)...)
+ case *ast.FieldNode:
+ fd := r.asFieldDescriptor(decl, maxTag, syntax, handler)
+ msgd.Field = append(msgd.Field, fd)
+ case *ast.MapFieldNode:
+ fd, md := r.asMapDescriptors(decl, syntax, maxTag, handler, depth+1)
+ msgd.Field = append(msgd.Field, fd)
+ msgd.NestedType = append(msgd.NestedType, md)
+ case *ast.GroupNode:
+ fd, md := r.asGroupDescriptors(decl, syntax, maxTag, handler, depth+1)
+ msgd.Field = append(msgd.Field, fd)
+ msgd.NestedType = append(msgd.NestedType, md)
+ case *ast.OneofNode:
+ oodIndex := len(msgd.OneofDecl)
+ ood := &descriptorpb.OneofDescriptorProto{Name: proto.String(decl.Name.Val)}
+ r.putOneofNode(ood, decl)
+ msgd.OneofDecl = append(msgd.OneofDecl, ood)
+ ooFields := 0
+ for _, oodecl := range decl.Decls {
+ switch oodecl := oodecl.(type) {
+ case *ast.OptionNode:
+ if ood.Options == nil {
+ ood.Options = &descriptorpb.OneofOptions{}
+ }
+ ood.Options.UninterpretedOption = append(ood.Options.UninterpretedOption, r.asUninterpretedOption(oodecl))
+ case *ast.FieldNode:
+ fd := r.asFieldDescriptor(oodecl, maxTag, syntax, handler)
+ fd.OneofIndex = proto.Int32(int32(oodIndex))
+ msgd.Field = append(msgd.Field, fd)
+ ooFields++
+ case *ast.GroupNode:
+ fd, md := r.asGroupDescriptors(oodecl, syntax, maxTag, handler, depth+1)
+ fd.OneofIndex = proto.Int32(int32(oodIndex))
+ msgd.Field = append(msgd.Field, fd)
+ msgd.NestedType = append(msgd.NestedType, md)
+ ooFields++
+ }
+ }
+ if ooFields == 0 {
+ declNodeInfo := r.file.NodeInfo(decl)
+ _ = handler.HandleErrorf(declNodeInfo, "oneof must contain at least one field")
+ }
+ case *ast.MessageNode:
+ msgd.NestedType = append(msgd.NestedType, r.asMessageDescriptor(decl, syntax, handler, depth+1))
+ case *ast.ReservedNode:
+ r.addReservedNames(&msgd.ReservedName, decl, syntax, handler, rsvdNames)
+ for _, rng := range decl.Ranges {
+ msgd.ReservedRange = append(msgd.ReservedRange, r.asMessageReservedRange(rng, maxTag, handler))
+ }
+ }
+ }
+
+ if messageSetOpt != nil {
+ if len(msgd.Field) > 0 {
+ node := r.FieldNode(msgd.Field[0])
+ nodeInfo := r.file.NodeInfo(node)
+ _ = handler.HandleErrorf(nodeInfo, "messages with message-set wire format cannot contain non-extension fields")
+ }
+ if len(msgd.ExtensionRange) == 0 {
+ node := r.OptionNode(messageSetOpt)
+ nodeInfo := r.file.NodeInfo(node)
+ _ = handler.HandleErrorf(nodeInfo, "messages with message-set wire format must contain at least one extension range")
+ }
+ }
+
+ // process any proto3_optional fields
+ if syntax == protoreflect.Proto3 {
+ r.processProto3OptionalFields(msgd)
+ }
+}
+
+func (r *result) isMessageSetWireFormat(scope string, md *descriptorpb.DescriptorProto, handler *reporter.Handler) (*descriptorpb.UninterpretedOption, error) {
+ uo := md.GetOptions().GetUninterpretedOption()
+ index, err := internal.FindOption(r, handler.HandleErrorf, scope, uo, "message_set_wire_format")
+ if err != nil {
+ return nil, err
+ }
+ if index == -1 {
+ // no such option
+ return nil, nil
+ }
+
+ opt := uo[index]
+
+ switch opt.GetIdentifierValue() {
+ case "true":
+ return opt, nil
+ case "false":
+ return nil, nil
+ default:
+ optNode := r.OptionNode(opt)
+ optNodeInfo := r.file.NodeInfo(optNode.GetValue())
+ return nil, handler.HandleErrorf(optNodeInfo, "%s: expecting bool value for message_set_wire_format option", scope)
+ }
+}
+
+func (r *result) asMessageReservedRange(rng *ast.RangeNode, maxTag int32, handler *reporter.Handler) *descriptorpb.DescriptorProto_ReservedRange {
+ start, end := r.getRangeBounds(rng, 1, maxTag, handler)
+ rr := &descriptorpb.DescriptorProto_ReservedRange{
+ Start: proto.Int32(start),
+ End: proto.Int32(end + 1),
+ }
+ r.putMessageReservedRangeNode(rr, rng)
+ return rr
+}
+
+func (r *result) getRangeBounds(rng *ast.RangeNode, minVal, maxVal int32, handler *reporter.Handler) (int32, int32) {
+ checkOrder := true
+ start, ok := rng.StartValueAsInt32(minVal, maxVal)
+ if !ok {
+ checkOrder = false
+ startValNodeInfo := r.file.NodeInfo(rng.StartVal)
+ _ = handler.HandleErrorf(startValNodeInfo, "range start %d is out of range: should be between %d and %d", rng.StartValue(), minVal, maxVal)
+ }
+
+ end, ok := rng.EndValueAsInt32(minVal, maxVal)
+ if !ok {
+ checkOrder = false
+ if rng.EndVal != nil {
+ endValNodeInfo := r.file.NodeInfo(rng.EndVal)
+ _ = handler.HandleErrorf(endValNodeInfo, "range end %d is out of range: should be between %d and %d", rng.EndValue(), minVal, maxVal)
+ }
+ }
+
+ if checkOrder && start > end {
+ rangeStartNodeInfo := r.file.NodeInfo(rng.RangeStart())
+ _ = handler.HandleErrorf(rangeStartNodeInfo, "range, %d to %d, is invalid: start must be <= end", start, end)
+ }
+
+ return start, end
+}
+
+func (r *result) asServiceDescriptor(svc *ast.ServiceNode) *descriptorpb.ServiceDescriptorProto {
+ sd := &descriptorpb.ServiceDescriptorProto{Name: proto.String(svc.Name.Val)}
+ r.putServiceNode(sd, svc)
+ for _, decl := range svc.Decls {
+ switch decl := decl.(type) {
+ case *ast.OptionNode:
+ if sd.Options == nil {
+ sd.Options = &descriptorpb.ServiceOptions{}
+ }
+ sd.Options.UninterpretedOption = append(sd.Options.UninterpretedOption, r.asUninterpretedOption(decl))
+ case *ast.RPCNode:
+ sd.Method = append(sd.Method, r.asMethodDescriptor(decl))
+ }
+ }
+ return sd
+}
+
+func (r *result) checkTag(n ast.Node, v uint64, maxTag int32) error {
+ switch {
+ case v < 1:
+ return reporter.Errorf(r.file.NodeInfo(n), "tag number %d must be greater than zero", v)
+ case v > uint64(maxTag):
+ return reporter.Errorf(r.file.NodeInfo(n), "tag number %d is higher than max allowed tag number (%d)", v, maxTag)
+ case v >= internal.SpecialReservedStart && v <= internal.SpecialReservedEnd:
+ return reporter.Errorf(r.file.NodeInfo(n), "tag number %d is in disallowed reserved range %d-%d", v, internal.SpecialReservedStart, internal.SpecialReservedEnd)
+ default:
+ return nil
+ }
+}
+
+// processProto3OptionalFields adds synthetic oneofs to the given message descriptor
+// for each proto3 optional field. It also updates the fields to have the correct
+// oneof index reference.
+func (r *result) processProto3OptionalFields(msgd *descriptorpb.DescriptorProto) {
+ // add synthetic oneofs to the given message descriptor for each proto3
+ // optional field, and update each field to have correct oneof index
+ var allNames map[string]struct{}
+ for _, fd := range msgd.Field {
+ if fd.GetProto3Optional() {
+ // lazy init the set of all names
+ if allNames == nil {
+ allNames = map[string]struct{}{}
+ for _, fd := range msgd.Field {
+ allNames[fd.GetName()] = struct{}{}
+ }
+ for _, od := range msgd.OneofDecl {
+ allNames[od.GetName()] = struct{}{}
+ }
+ // NB: protoc only considers names of other fields and oneofs
+ // when computing the synthetic oneof name. But that feels like
+ // a bug, since it means it could generate a name that conflicts
+ // with some other symbol defined in the message. If it's decided
+ // that's NOT a bug and is desirable, then we should remove the
+ // following four loops to mimic protoc's behavior.
+ for _, fd := range msgd.Extension {
+ allNames[fd.GetName()] = struct{}{}
+ }
+ for _, ed := range msgd.EnumType {
+ allNames[ed.GetName()] = struct{}{}
+ for _, evd := range ed.Value {
+ allNames[evd.GetName()] = struct{}{}
+ }
+ }
+ for _, fd := range msgd.NestedType {
+ allNames[fd.GetName()] = struct{}{}
+ }
+ }
+
+ // Compute a name for the synthetic oneof. This uses the same
+ // algorithm as used in protoc:
+ // https://github.com/protocolbuffers/protobuf/blob/74ad62759e0a9b5a21094f3fb9bb4ebfaa0d1ab8/src/google/protobuf/compiler/parser.cc#L785-L803
+ ooName := fd.GetName()
+ if !strings.HasPrefix(ooName, "_") {
+ ooName = "_" + ooName
+ }
+ for {
+ _, ok := allNames[ooName]
+ if !ok {
+ // found a unique name
+ allNames[ooName] = struct{}{}
+ break
+ }
+ ooName = "X" + ooName
+ }
+
+ fd.OneofIndex = proto.Int32(int32(len(msgd.OneofDecl)))
+ ood := &descriptorpb.OneofDescriptorProto{Name: proto.String(ooName)}
+ msgd.OneofDecl = append(msgd.OneofDecl, ood)
+ ooident := r.FieldNode(fd).(*ast.FieldNode) //nolint:errcheck
+ r.putOneofNode(ood, ast.NewSyntheticOneof(ooident))
+ }
+ }
+}
+
+func (r *result) Node(m proto.Message) ast.Node {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[m]
+}
+
+func (r *result) FileNode() ast.FileDeclNode {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[r.proto].(ast.FileDeclNode)
+}
+
+func (r *result) OptionNode(o *descriptorpb.UninterpretedOption) ast.OptionDeclNode {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[o].(ast.OptionDeclNode)
+}
+
+func (r *result) OptionNamePartNode(o *descriptorpb.UninterpretedOption_NamePart) ast.Node {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[o]
+}
+
+func (r *result) MessageNode(m *descriptorpb.DescriptorProto) ast.MessageDeclNode {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[m].(ast.MessageDeclNode)
+}
+
+func (r *result) FieldNode(f *descriptorpb.FieldDescriptorProto) ast.FieldDeclNode {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[f].(ast.FieldDeclNode)
+}
+
+func (r *result) OneofNode(o *descriptorpb.OneofDescriptorProto) ast.OneofDeclNode {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[o].(ast.OneofDeclNode)
+}
+
+func (r *result) ExtensionsNode(e *descriptorpb.DescriptorProto_ExtensionRange) ast.NodeWithOptions {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[asExtsNode(e)].(ast.NodeWithOptions)
+}
+
+func (r *result) ExtensionRangeNode(e *descriptorpb.DescriptorProto_ExtensionRange) ast.RangeDeclNode {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[e].(ast.RangeDeclNode)
+}
+
+func (r *result) MessageReservedRangeNode(rr *descriptorpb.DescriptorProto_ReservedRange) ast.RangeDeclNode {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[rr].(ast.RangeDeclNode)
+}
+
+func (r *result) EnumNode(e *descriptorpb.EnumDescriptorProto) ast.NodeWithOptions {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[e].(ast.NodeWithOptions)
+}
+
+func (r *result) EnumValueNode(e *descriptorpb.EnumValueDescriptorProto) ast.EnumValueDeclNode {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[e].(ast.EnumValueDeclNode)
+}
+
+func (r *result) EnumReservedRangeNode(rr *descriptorpb.EnumDescriptorProto_EnumReservedRange) ast.RangeDeclNode {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[rr].(ast.RangeDeclNode)
+}
+
+func (r *result) ServiceNode(s *descriptorpb.ServiceDescriptorProto) ast.NodeWithOptions {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[s].(ast.NodeWithOptions)
+}
+
+func (r *result) MethodNode(m *descriptorpb.MethodDescriptorProto) ast.RPCDeclNode {
+ if r.nodes == nil {
+ return r.ifNoAST
+ }
+ return r.nodes[m].(ast.RPCDeclNode)
+}
+
+func (r *result) putFileNode(f *descriptorpb.FileDescriptorProto, n *ast.FileNode) {
+ r.nodes[f] = n
+}
+
+func (r *result) putOptionNode(o *descriptorpb.UninterpretedOption, n *ast.OptionNode) {
+ r.nodes[o] = n
+}
+
+func (r *result) putOptionNamePartNode(o *descriptorpb.UninterpretedOption_NamePart, n *ast.FieldReferenceNode) {
+ r.nodes[o] = n
+}
+
+func (r *result) putMessageNode(m *descriptorpb.DescriptorProto, n ast.MessageDeclNode) {
+ r.nodes[m] = n
+}
+
+func (r *result) putFieldNode(f *descriptorpb.FieldDescriptorProto, n ast.FieldDeclNode) {
+ r.nodes[f] = n
+}
+
+func (r *result) putOneofNode(o *descriptorpb.OneofDescriptorProto, n ast.OneofDeclNode) {
+ r.nodes[o] = n
+}
+
+func (r *result) putExtensionRangeNode(e *descriptorpb.DescriptorProto_ExtensionRange, er *ast.ExtensionRangeNode, n *ast.RangeNode) {
+ r.nodes[asExtsNode(e)] = er
+ r.nodes[e] = n
+}
+
+func (r *result) putMessageReservedRangeNode(rr *descriptorpb.DescriptorProto_ReservedRange, n *ast.RangeNode) {
+ r.nodes[rr] = n
+}
+
+func (r *result) putEnumNode(e *descriptorpb.EnumDescriptorProto, n *ast.EnumNode) {
+ r.nodes[e] = n
+}
+
+func (r *result) putEnumValueNode(e *descriptorpb.EnumValueDescriptorProto, n *ast.EnumValueNode) {
+ r.nodes[e] = n
+}
+
+func (r *result) putEnumReservedRangeNode(rr *descriptorpb.EnumDescriptorProto_EnumReservedRange, n *ast.RangeNode) {
+ r.nodes[rr] = n
+}
+
+func (r *result) putServiceNode(s *descriptorpb.ServiceDescriptorProto, n *ast.ServiceNode) {
+ r.nodes[s] = n
+}
+
+func (r *result) putMethodNode(m *descriptorpb.MethodDescriptorProto, n *ast.RPCNode) {
+ r.nodes[m] = n
+}
+
+// NB: If we ever add other put*Node methods, to index other kinds of elements in the descriptor
+// proto hierarchy, we need to update the index recreation logic in clone.go, too.
+
+func asExtsNode(er *descriptorpb.DescriptorProto_ExtensionRange) proto.Message {
+ return extsParent{er}
+}
+
+// a simple marker type that allows us to have two distinct keys in a map for
+// the same ExtensionRange proto -- one for the range itself and another to
+// associate with the enclosing/parent AST node.
+type extsParent struct {
+ *descriptorpb.DescriptorProto_ExtensionRange
+}
diff --git a/vendor/github.com/bufbuild/protocompile/parser/validate.go b/vendor/github.com/bufbuild/protocompile/parser/validate.go
new file mode 100644
index 0000000..64ebdaa
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/parser/validate.go
@@ -0,0 +1,568 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package parser
+
+import (
+ "fmt"
+ "sort"
+
+ "google.golang.org/protobuf/proto"
+ "google.golang.org/protobuf/reflect/protoreflect"
+ "google.golang.org/protobuf/types/descriptorpb"
+
+ "github.com/bufbuild/protocompile/ast"
+ "github.com/bufbuild/protocompile/internal"
+ "github.com/bufbuild/protocompile/reporter"
+ "github.com/bufbuild/protocompile/walk"
+)
+
+func validateBasic(res *result, handler *reporter.Handler) {
+ fd := res.proto
+ var syntax protoreflect.Syntax
+ switch fd.GetSyntax() {
+ case "", "proto2":
+ syntax = protoreflect.Proto2
+ case "proto3":
+ syntax = protoreflect.Proto3
+ case "editions":
+ syntax = protoreflect.Editions
+ // TODO: default: error?
+ }
+
+ if err := validateImports(res, handler); err != nil {
+ return
+ }
+
+ if err := validateNoFeatures(res, syntax, "file options", fd.Options.GetUninterpretedOption(), handler); err != nil {
+ return
+ }
+
+ _ = walk.DescriptorProtos(fd,
+ func(name protoreflect.FullName, d proto.Message) error {
+ switch d := d.(type) {
+ case *descriptorpb.DescriptorProto:
+ if err := validateMessage(res, syntax, name, d, handler); err != nil {
+ // exit func is not called when enter returns error
+ return err
+ }
+ case *descriptorpb.FieldDescriptorProto:
+ if err := validateField(res, syntax, name, d, handler); err != nil {
+ return err
+ }
+ case *descriptorpb.OneofDescriptorProto:
+ if err := validateNoFeatures(res, syntax, fmt.Sprintf("oneof %s", name), d.Options.GetUninterpretedOption(), handler); err != nil {
+ return err
+ }
+ case *descriptorpb.EnumDescriptorProto:
+ if err := validateEnum(res, syntax, name, d, handler); err != nil {
+ return err
+ }
+ case *descriptorpb.EnumValueDescriptorProto:
+ if err := validateNoFeatures(res, syntax, fmt.Sprintf("enum value %s", name), d.Options.GetUninterpretedOption(), handler); err != nil {
+ return err
+ }
+ case *descriptorpb.ServiceDescriptorProto:
+ if err := validateNoFeatures(res, syntax, fmt.Sprintf("service %s", name), d.Options.GetUninterpretedOption(), handler); err != nil {
+ return err
+ }
+ case *descriptorpb.MethodDescriptorProto:
+ if err := validateNoFeatures(res, syntax, fmt.Sprintf("method %s", name), d.Options.GetUninterpretedOption(), handler); err != nil {
+ return err
+ }
+ }
+ return nil
+ })
+}
+
+func validateImports(res *result, handler *reporter.Handler) error {
+ fileNode := res.file
+ if fileNode == nil {
+ return nil
+ }
+ imports := make(map[string]ast.SourcePos)
+ for _, decl := range fileNode.Decls {
+ imp, ok := decl.(*ast.ImportNode)
+ if !ok {
+ continue
+ }
+ info := fileNode.NodeInfo(decl)
+ name := imp.Name.AsString()
+ if prev, ok := imports[name]; ok {
+ return handler.HandleErrorf(info, "%q was already imported at %v", name, prev)
+ }
+ imports[name] = info.Start()
+ }
+ return nil
+}
+
+func validateNoFeatures(res *result, syntax protoreflect.Syntax, scope string, opts []*descriptorpb.UninterpretedOption, handler *reporter.Handler) error {
+ if syntax == protoreflect.Editions {
+ // Editions is allowed to use features
+ return nil
+ }
+ if index, err := internal.FindFirstOption(res, handler.HandleErrorf, scope, opts, "features"); err != nil {
+ return err
+ } else if index >= 0 {
+ optNode := res.OptionNode(opts[index])
+ optNameNodeInfo := res.file.NodeInfo(optNode.GetName())
+ if err := handler.HandleErrorf(optNameNodeInfo, "%s: option 'features' may only be used with editions but file uses %s syntax", scope, syntax); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func validateMessage(res *result, syntax protoreflect.Syntax, name protoreflect.FullName, md *descriptorpb.DescriptorProto, handler *reporter.Handler) error {
+ scope := fmt.Sprintf("message %s", name)
+
+ if syntax == protoreflect.Proto3 && len(md.ExtensionRange) > 0 {
+ n := res.ExtensionRangeNode(md.ExtensionRange[0])
+ nInfo := res.file.NodeInfo(n)
+ if err := handler.HandleErrorf(nInfo, "%s: extension ranges are not allowed in proto3", scope); err != nil {
+ return err
+ }
+ }
+
+ if index, err := internal.FindOption(res, handler.HandleErrorf, scope, md.Options.GetUninterpretedOption(), "map_entry"); err != nil {
+ return err
+ } else if index >= 0 {
+ optNode := res.OptionNode(md.Options.GetUninterpretedOption()[index])
+ optNameNodeInfo := res.file.NodeInfo(optNode.GetName())
+ if err := handler.HandleErrorf(optNameNodeInfo, "%s: map_entry option should not be set explicitly; use map type instead", scope); err != nil {
+ return err
+ }
+ }
+
+ if err := validateNoFeatures(res, syntax, scope, md.Options.GetUninterpretedOption(), handler); err != nil {
+ return err
+ }
+
+ // reserved ranges should not overlap
+ rsvd := make(tagRanges, len(md.ReservedRange))
+ for i, r := range md.ReservedRange {
+ n := res.MessageReservedRangeNode(r)
+ rsvd[i] = tagRange{start: r.GetStart(), end: r.GetEnd(), node: n}
+ }
+ sort.Sort(rsvd)
+ for i := 1; i < len(rsvd); i++ {
+ if rsvd[i].start < rsvd[i-1].end {
+ rangeNodeInfo := res.file.NodeInfo(rsvd[i].node)
+ if err := handler.HandleErrorf(rangeNodeInfo, "%s: reserved ranges overlap: %d to %d and %d to %d", scope, rsvd[i-1].start, rsvd[i-1].end-1, rsvd[i].start, rsvd[i].end-1); err != nil {
+ return err
+ }
+ }
+ }
+
+ // extensions ranges should not overlap
+ exts := make(tagRanges, len(md.ExtensionRange))
+ for i, r := range md.ExtensionRange {
+ if err := validateNoFeatures(res, syntax, scope, r.Options.GetUninterpretedOption(), handler); err != nil {
+ return err
+ }
+ n := res.ExtensionRangeNode(r)
+ exts[i] = tagRange{start: r.GetStart(), end: r.GetEnd(), node: n}
+ }
+ sort.Sort(exts)
+ for i := 1; i < len(exts); i++ {
+ if exts[i].start < exts[i-1].end {
+ rangeNodeInfo := res.file.NodeInfo(exts[i].node)
+ if err := handler.HandleErrorf(rangeNodeInfo, "%s: extension ranges overlap: %d to %d and %d to %d", scope, exts[i-1].start, exts[i-1].end-1, exts[i].start, exts[i].end-1); err != nil {
+ return err
+ }
+ }
+ }
+
+ // see if any extension range overlaps any reserved range
+ var i, j int // i indexes rsvd; j indexes exts
+ for i < len(rsvd) && j < len(exts) {
+ if rsvd[i].start >= exts[j].start && rsvd[i].start < exts[j].end ||
+ exts[j].start >= rsvd[i].start && exts[j].start < rsvd[i].end {
+ var span ast.SourceSpan
+ if rsvd[i].start >= exts[j].start && rsvd[i].start < exts[j].end {
+ rangeNodeInfo := res.file.NodeInfo(rsvd[i].node)
+ span = rangeNodeInfo
+ } else {
+ rangeNodeInfo := res.file.NodeInfo(exts[j].node)
+ span = rangeNodeInfo
+ }
+ // ranges overlap
+ if err := handler.HandleErrorf(span, "%s: extension range %d to %d overlaps reserved range %d to %d", scope, exts[j].start, exts[j].end-1, rsvd[i].start, rsvd[i].end-1); err != nil {
+ return err
+ }
+ }
+ if rsvd[i].start < exts[j].start {
+ i++
+ } else {
+ j++
+ }
+ }
+
+ // now, check that fields don't re-use tags and don't try to use extension
+ // or reserved ranges or reserved names
+ rsvdNames := map[string]struct{}{}
+ for _, n := range md.ReservedName {
+ // validate reserved name while we're here
+ if !isIdentifier(n) {
+ node := findMessageReservedNameNode(res.MessageNode(md), n)
+ nodeInfo := res.file.NodeInfo(node)
+ if err := handler.HandleErrorf(nodeInfo, "%s: reserved name %q is not a valid identifier", scope, n); err != nil {
+ return err
+ }
+ }
+ rsvdNames[n] = struct{}{}
+ }
+ fieldTags := map[int32]string{}
+ for _, fld := range md.Field {
+ fn := res.FieldNode(fld)
+ if _, ok := rsvdNames[fld.GetName()]; ok {
+ fieldNameNodeInfo := res.file.NodeInfo(fn.FieldName())
+ if err := handler.HandleErrorf(fieldNameNodeInfo, "%s: field %s is using a reserved name", scope, fld.GetName()); err != nil {
+ return err
+ }
+ }
+ if existing := fieldTags[fld.GetNumber()]; existing != "" {
+ fieldTagNodeInfo := res.file.NodeInfo(fn.FieldTag())
+ if err := handler.HandleErrorf(fieldTagNodeInfo, "%s: fields %s and %s both have the same tag %d", scope, existing, fld.GetName(), fld.GetNumber()); err != nil {
+ return err
+ }
+ }
+ fieldTags[fld.GetNumber()] = fld.GetName()
+ // check reserved ranges
+ r := sort.Search(len(rsvd), func(index int) bool { return rsvd[index].end > fld.GetNumber() })
+ if r < len(rsvd) && rsvd[r].start <= fld.GetNumber() {
+ fieldTagNodeInfo := res.file.NodeInfo(fn.FieldTag())
+ if err := handler.HandleErrorf(fieldTagNodeInfo, "%s: field %s is using tag %d which is in reserved range %d to %d", scope, fld.GetName(), fld.GetNumber(), rsvd[r].start, rsvd[r].end-1); err != nil {
+ return err
+ }
+ }
+ // and check extension ranges
+ e := sort.Search(len(exts), func(index int) bool { return exts[index].end > fld.GetNumber() })
+ if e < len(exts) && exts[e].start <= fld.GetNumber() {
+ fieldTagNodeInfo := res.file.NodeInfo(fn.FieldTag())
+ if err := handler.HandleErrorf(fieldTagNodeInfo, "%s: field %s is using tag %d which is in extension range %d to %d", scope, fld.GetName(), fld.GetNumber(), exts[e].start, exts[e].end-1); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+func isIdentifier(s string) bool {
+ if len(s) == 0 {
+ return false
+ }
+ for i, r := range s {
+ if i == 0 && r >= '0' && r <= '9' {
+ // can't start with number
+ return false
+ }
+ // alphanumeric and underscore ok; everything else bad
+ switch {
+ case r >= '0' && r <= '9':
+ case r >= 'a' && r <= 'z':
+ case r >= 'A' && r <= 'Z':
+ case r == '_':
+ default:
+ return false
+ }
+ }
+ return true
+}
+
+func findMessageReservedNameNode(msgNode ast.MessageDeclNode, name string) ast.Node {
+ var decls []ast.MessageElement
+ switch msgNode := msgNode.(type) {
+ case *ast.MessageNode:
+ decls = msgNode.Decls
+ case *ast.SyntheticGroupMessageNode:
+ decls = msgNode.Decls
+ default:
+ // leave decls empty
+ }
+ return findReservedNameNode(msgNode, decls, name)
+}
+
+func findReservedNameNode[T ast.Node](parent ast.Node, decls []T, name string) ast.Node {
+ for _, decl := range decls {
+ // NB: We have to convert to empty interface first, before we can do a type
+ // assertion because type assertions on type parameters aren't allowed. (The
+ // compiler cannot yet know whether T is an interface type or not.)
+ rsvd, ok := any(decl).(*ast.ReservedNode)
+ if !ok {
+ continue
+ }
+ for _, rsvdName := range rsvd.Names {
+ if rsvdName.AsString() == name {
+ return rsvdName
+ }
+ }
+ }
+ // couldn't find it? Instead of puking, report position of the parent.
+ return parent
+}
+
+func validateEnum(res *result, syntax protoreflect.Syntax, name protoreflect.FullName, ed *descriptorpb.EnumDescriptorProto, handler *reporter.Handler) error {
+ scope := fmt.Sprintf("enum %s", name)
+
+ if len(ed.Value) == 0 {
+ enNode := res.EnumNode(ed)
+ enNodeInfo := res.file.NodeInfo(enNode)
+ if err := handler.HandleErrorf(enNodeInfo, "%s: enums must define at least one value", scope); err != nil {
+ return err
+ }
+ }
+
+ if err := validateNoFeatures(res, syntax, scope, ed.Options.GetUninterpretedOption(), handler); err != nil {
+ return err
+ }
+
+ allowAlias := false
+ var allowAliasOpt *descriptorpb.UninterpretedOption
+ if index, err := internal.FindOption(res, handler.HandleErrorf, scope, ed.Options.GetUninterpretedOption(), "allow_alias"); err != nil {
+ return err
+ } else if index >= 0 {
+ allowAliasOpt = ed.Options.UninterpretedOption[index]
+ valid := false
+ if allowAliasOpt.IdentifierValue != nil {
+ if allowAliasOpt.GetIdentifierValue() == "true" {
+ allowAlias = true
+ valid = true
+ } else if allowAliasOpt.GetIdentifierValue() == "false" {
+ valid = true
+ }
+ }
+ if !valid {
+ optNode := res.OptionNode(allowAliasOpt)
+ optNodeInfo := res.file.NodeInfo(optNode.GetValue())
+ if err := handler.HandleErrorf(optNodeInfo, "%s: expecting bool value for allow_alias option", scope); err != nil {
+ return err
+ }
+ }
+ }
+
+ if syntax == protoreflect.Proto3 && len(ed.Value) > 0 && ed.Value[0].GetNumber() != 0 {
+ evNode := res.EnumValueNode(ed.Value[0])
+ evNodeInfo := res.file.NodeInfo(evNode.GetNumber())
+ if err := handler.HandleErrorf(evNodeInfo, "%s: proto3 requires that first value of enum have numeric value zero", scope); err != nil {
+ return err
+ }
+ }
+
+ // check for aliases
+ vals := map[int32]string{}
+ hasAlias := false
+ for _, evd := range ed.Value {
+ existing := vals[evd.GetNumber()]
+ if existing != "" {
+ if allowAlias {
+ hasAlias = true
+ } else {
+ evNode := res.EnumValueNode(evd)
+ evNodeInfo := res.file.NodeInfo(evNode.GetNumber())
+ if err := handler.HandleErrorf(evNodeInfo, "%s: values %s and %s both have the same numeric value %d; use allow_alias option if intentional", scope, existing, evd.GetName(), evd.GetNumber()); err != nil {
+ return err
+ }
+ }
+ }
+ vals[evd.GetNumber()] = evd.GetName()
+ }
+ if allowAlias && !hasAlias {
+ optNode := res.OptionNode(allowAliasOpt)
+ optNodeInfo := res.file.NodeInfo(optNode.GetValue())
+ if err := handler.HandleErrorf(optNodeInfo, "%s: allow_alias is true but no values are aliases", scope); err != nil {
+ return err
+ }
+ }
+
+ // reserved ranges should not overlap
+ rsvd := make(tagRanges, len(ed.ReservedRange))
+ for i, r := range ed.ReservedRange {
+ n := res.EnumReservedRangeNode(r)
+ rsvd[i] = tagRange{start: r.GetStart(), end: r.GetEnd(), node: n}
+ }
+ sort.Sort(rsvd)
+ for i := 1; i < len(rsvd); i++ {
+ if rsvd[i].start <= rsvd[i-1].end {
+ rangeNodeInfo := res.file.NodeInfo(rsvd[i].node)
+ if err := handler.HandleErrorf(rangeNodeInfo, "%s: reserved ranges overlap: %d to %d and %d to %d", scope, rsvd[i-1].start, rsvd[i-1].end, rsvd[i].start, rsvd[i].end); err != nil {
+ return err
+ }
+ }
+ }
+
+ // now, check that fields don't re-use tags and don't try to use extension
+ // or reserved ranges or reserved names
+ rsvdNames := map[string]struct{}{}
+ for _, n := range ed.ReservedName {
+ // validate reserved name while we're here
+ if !isIdentifier(n) {
+ node := findEnumReservedNameNode(res.EnumNode(ed), n)
+ nodeInfo := res.file.NodeInfo(node)
+ if err := handler.HandleErrorf(nodeInfo, "%s: reserved name %q is not a valid identifier", scope, n); err != nil {
+ return err
+ }
+ }
+ rsvdNames[n] = struct{}{}
+ }
+ for _, ev := range ed.Value {
+ evn := res.EnumValueNode(ev)
+ if _, ok := rsvdNames[ev.GetName()]; ok {
+ enumValNodeInfo := res.file.NodeInfo(evn.GetName())
+ if err := handler.HandleErrorf(enumValNodeInfo, "%s: value %s is using a reserved name", scope, ev.GetName()); err != nil {
+ return err
+ }
+ }
+ // check reserved ranges
+ r := sort.Search(len(rsvd), func(index int) bool { return rsvd[index].end >= ev.GetNumber() })
+ if r < len(rsvd) && rsvd[r].start <= ev.GetNumber() {
+ enumValNodeInfo := res.file.NodeInfo(evn.GetNumber())
+ if err := handler.HandleErrorf(enumValNodeInfo, "%s: value %s is using number %d which is in reserved range %d to %d", scope, ev.GetName(), ev.GetNumber(), rsvd[r].start, rsvd[r].end); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+func findEnumReservedNameNode(enumNode ast.Node, name string) ast.Node {
+ var decls []ast.EnumElement
+ if enumNode, ok := enumNode.(*ast.EnumNode); ok {
+ decls = enumNode.Decls
+ // if not the right type, we leave decls empty
+ }
+ return findReservedNameNode(enumNode, decls, name)
+}
+
+func validateField(res *result, syntax protoreflect.Syntax, name protoreflect.FullName, fld *descriptorpb.FieldDescriptorProto, handler *reporter.Handler) error {
+ var scope string
+ if fld.Extendee != nil {
+ scope = fmt.Sprintf("extension %s", name)
+ } else {
+ scope = fmt.Sprintf("field %s", name)
+ }
+
+ node := res.FieldNode(fld)
+ if fld.Number == nil {
+ fieldTagNodeInfo := res.file.NodeInfo(node)
+ if err := handler.HandleErrorf(fieldTagNodeInfo, "%s: missing field tag number", scope); err != nil {
+ return err
+ }
+ }
+ if syntax != protoreflect.Proto2 {
+ if fld.GetType() == descriptorpb.FieldDescriptorProto_TYPE_GROUP {
+ groupNodeInfo := res.file.NodeInfo(node.GetGroupKeyword())
+ if err := handler.HandleErrorf(groupNodeInfo, "%s: groups are not allowed in proto3 or editions", scope); err != nil {
+ return err
+ }
+ } else if fld.Label != nil && fld.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REQUIRED {
+ fieldLabelNodeInfo := res.file.NodeInfo(node.FieldLabel())
+ if err := handler.HandleErrorf(fieldLabelNodeInfo, "%s: label 'required' is not allowed in proto3 or editions", scope); err != nil {
+ return err
+ }
+ }
+ if syntax == protoreflect.Editions {
+ if fld.Label != nil && fld.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL {
+ fieldLabelNodeInfo := res.file.NodeInfo(node.FieldLabel())
+ if err := handler.HandleErrorf(fieldLabelNodeInfo, "%s: label 'optional' is not allowed in editions; use option features.field_presence instead", scope); err != nil {
+ return err
+ }
+ }
+ if index, err := internal.FindOption(res, handler.HandleErrorf, scope, fld.Options.GetUninterpretedOption(), "packed"); err != nil {
+ return err
+ } else if index >= 0 {
+ optNode := res.OptionNode(fld.Options.GetUninterpretedOption()[index])
+ optNameNodeInfo := res.file.NodeInfo(optNode.GetName())
+ if err := handler.HandleErrorf(optNameNodeInfo, "%s: packed option is not allowed in editions; use option features.repeated_field_encoding instead", scope); err != nil {
+ return err
+ }
+ }
+ } else if syntax == protoreflect.Proto3 {
+ if index, err := internal.FindOption(res, handler.HandleErrorf, scope, fld.Options.GetUninterpretedOption(), "default"); err != nil {
+ return err
+ } else if index >= 0 {
+ optNode := res.OptionNode(fld.Options.GetUninterpretedOption()[index])
+ optNameNodeInfo := res.file.NodeInfo(optNode.GetName())
+ if err := handler.HandleErrorf(optNameNodeInfo, "%s: default values are not allowed in proto3", scope); err != nil {
+ return err
+ }
+ }
+ }
+ } else {
+ if fld.Label == nil && fld.OneofIndex == nil {
+ fieldNameNodeInfo := res.file.NodeInfo(node.FieldName())
+ if err := handler.HandleErrorf(fieldNameNodeInfo, "%s: field has no label; proto2 requires explicit 'optional' label", scope); err != nil {
+ return err
+ }
+ }
+ if fld.GetExtendee() != "" && fld.Label != nil && fld.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REQUIRED {
+ fieldLabelNodeInfo := res.file.NodeInfo(node.FieldLabel())
+ if err := handler.HandleErrorf(fieldLabelNodeInfo, "%s: extension fields cannot be 'required'", scope); err != nil {
+ return err
+ }
+ }
+ }
+
+ return validateNoFeatures(res, syntax, scope, fld.Options.GetUninterpretedOption(), handler)
+}
+
+type tagRange struct {
+ start int32
+ end int32
+ node ast.RangeDeclNode
+}
+
+type tagRanges []tagRange
+
+func (r tagRanges) Len() int {
+ return len(r)
+}
+
+func (r tagRanges) Less(i, j int) bool {
+ return r[i].start < r[j].start ||
+ (r[i].start == r[j].start && r[i].end < r[j].end)
+}
+
+func (r tagRanges) Swap(i, j int) {
+ r[i], r[j] = r[j], r[i]
+}
+
+func fillInMissingLabels(fd *descriptorpb.FileDescriptorProto) {
+ for _, md := range fd.MessageType {
+ fillInMissingLabelsInMsg(md)
+ }
+ for _, extd := range fd.Extension {
+ fillInMissingLabel(extd)
+ }
+}
+
+func fillInMissingLabelsInMsg(md *descriptorpb.DescriptorProto) {
+ for _, fld := range md.Field {
+ fillInMissingLabel(fld)
+ }
+ for _, nmd := range md.NestedType {
+ fillInMissingLabelsInMsg(nmd)
+ }
+ for _, extd := range md.Extension {
+ fillInMissingLabel(extd)
+ }
+}
+
+func fillInMissingLabel(fld *descriptorpb.FieldDescriptorProto) {
+ if fld.Label == nil {
+ fld.Label = descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum()
+ }
+}