summaryrefslogtreecommitdiff
path: root/vendor/github.com/bufbuild/protocompile/linker
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/bufbuild/protocompile/linker')
-rw-r--r--vendor/github.com/bufbuild/protocompile/linker/descriptors.go1884
-rw-r--r--vendor/github.com/bufbuild/protocompile/linker/doc.go48
-rw-r--r--vendor/github.com/bufbuild/protocompile/linker/files.go366
-rw-r--r--vendor/github.com/bufbuild/protocompile/linker/linker.go153
-rw-r--r--vendor/github.com/bufbuild/protocompile/linker/pathkey_no_unsafe.go35
-rw-r--r--vendor/github.com/bufbuild/protocompile/linker/pathkey_unsafe.go40
-rw-r--r--vendor/github.com/bufbuild/protocompile/linker/pool.go131
-rw-r--r--vendor/github.com/bufbuild/protocompile/linker/resolve.go835
-rw-r--r--vendor/github.com/bufbuild/protocompile/linker/symbols.go635
-rw-r--r--vendor/github.com/bufbuild/protocompile/linker/validate.go1153
10 files changed, 5280 insertions, 0 deletions
diff --git a/vendor/github.com/bufbuild/protocompile/linker/descriptors.go b/vendor/github.com/bufbuild/protocompile/linker/descriptors.go
new file mode 100644
index 0000000..cd43dcc
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/linker/descriptors.go
@@ -0,0 +1,1884 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package linker
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+ "unicode/utf8"
+
+ "google.golang.org/protobuf/proto"
+ "google.golang.org/protobuf/reflect/protodesc"
+ "google.golang.org/protobuf/reflect/protoreflect"
+ "google.golang.org/protobuf/reflect/protoregistry"
+ "google.golang.org/protobuf/types/descriptorpb"
+ "google.golang.org/protobuf/types/dynamicpb"
+
+ "github.com/bufbuild/protocompile/ast"
+ "github.com/bufbuild/protocompile/internal"
+ "github.com/bufbuild/protocompile/internal/editions"
+ "github.com/bufbuild/protocompile/parser"
+ "github.com/bufbuild/protocompile/protoutil"
+)
+
+var (
+ // These "noOp*" values are all descriptors. The protoreflect.Descriptor
+ // interface and its sub-interfaces are all marked with an unexported
+ // method so that they cannot be implemented outside of the google.golang.org/protobuf
+ // module. So, to provide implementations from this package, we must embed
+ // them. If we simply left the embedded interface field nil, then if/when
+ // new methods are added to the interfaces, it could induce panics in this
+ // package or users of this module (since trying to invoke one of these new
+ // methods would end up trying to call a method on a nil interface value).
+ //
+ // So instead of leaving the embedded interface fields nil, we embed an actual
+ // value. While new methods are unlikely to return the correct value (since
+ // the calls will be delegated to these no-op instances), it is a less
+ // dangerous latent bug than inducing a nil-dereference panic.
+
+ noOpFile protoreflect.FileDescriptor
+ noOpMessage protoreflect.MessageDescriptor
+ noOpOneof protoreflect.OneofDescriptor
+ noOpField protoreflect.FieldDescriptor
+ noOpEnum protoreflect.EnumDescriptor
+ noOpEnumValue protoreflect.EnumValueDescriptor
+ noOpExtension protoreflect.ExtensionDescriptor
+ noOpService protoreflect.ServiceDescriptor
+ noOpMethod protoreflect.MethodDescriptor
+)
+
+var (
+ fieldPresenceField = editions.FeatureSetDescriptor.Fields().ByName("field_presence")
+ repeatedFieldEncodingField = editions.FeatureSetDescriptor.Fields().ByName("repeated_field_encoding")
+ messageEncodingField = editions.FeatureSetDescriptor.Fields().ByName("message_encoding")
+ enumTypeField = editions.FeatureSetDescriptor.Fields().ByName("enum_type")
+ jsonFormatField = editions.FeatureSetDescriptor.Fields().ByName("json_format")
+)
+
+func init() {
+ noOpFile, _ = protodesc.NewFile(
+ &descriptorpb.FileDescriptorProto{
+ Name: proto.String("no-op.proto"),
+ Syntax: proto.String("proto2"),
+ Dependency: []string{"google/protobuf/descriptor.proto"},
+ MessageType: []*descriptorpb.DescriptorProto{
+ {
+ Name: proto.String("NoOpMsg"),
+ Field: []*descriptorpb.FieldDescriptorProto{
+ {
+ Name: proto.String("no_op"),
+ Type: descriptorpb.FieldDescriptorProto_TYPE_STRING.Enum(),
+ Label: descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum(),
+ Number: proto.Int32(1),
+ JsonName: proto.String("noOp"),
+ OneofIndex: proto.Int32(0),
+ },
+ },
+ OneofDecl: []*descriptorpb.OneofDescriptorProto{
+ {
+ Name: proto.String("no_op_oneof"),
+ },
+ },
+ },
+ },
+ EnumType: []*descriptorpb.EnumDescriptorProto{
+ {
+ Name: proto.String("NoOpEnum"),
+ Value: []*descriptorpb.EnumValueDescriptorProto{
+ {
+ Name: proto.String("NO_OP"),
+ Number: proto.Int32(0),
+ },
+ },
+ },
+ },
+ Extension: []*descriptorpb.FieldDescriptorProto{
+ {
+ Extendee: proto.String(".google.protobuf.FileOptions"),
+ Name: proto.String("no_op"),
+ Type: descriptorpb.FieldDescriptorProto_TYPE_STRING.Enum(),
+ Label: descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum(),
+ Number: proto.Int32(50000),
+ },
+ },
+ Service: []*descriptorpb.ServiceDescriptorProto{
+ {
+ Name: proto.String("NoOpService"),
+ Method: []*descriptorpb.MethodDescriptorProto{
+ {
+ Name: proto.String("NoOp"),
+ InputType: proto.String(".NoOpMsg"),
+ OutputType: proto.String(".NoOpMsg"),
+ },
+ },
+ },
+ },
+ },
+ protoregistry.GlobalFiles,
+ )
+ noOpMessage = noOpFile.Messages().Get(0)
+ noOpOneof = noOpMessage.Oneofs().Get(0)
+ noOpField = noOpMessage.Fields().Get(0)
+ noOpEnum = noOpFile.Enums().Get(0)
+ noOpEnumValue = noOpEnum.Values().Get(0)
+ noOpExtension = noOpFile.Extensions().Get(0)
+ noOpService = noOpFile.Services().Get(0)
+ noOpMethod = noOpService.Methods().Get(0)
+}
+
+// This file contains implementations of protoreflect.Descriptor. Note that
+// this is a hack since those interfaces have a "doNotImplement" tag
+// interface therein. We do just enough to make dynamicpb happy; constructing
+// a regular descriptor would fail because we haven't yet interpreted options
+// at the point we need these, and some validations will fail if the options
+// aren't present.
+
+type result struct {
+ protoreflect.FileDescriptor
+ parser.Result
+ prefix string
+ deps Files
+
+ // A map of all descriptors keyed by their fully-qualified name (without
+ // any leading dot).
+ descriptors map[string]protoreflect.Descriptor
+
+ // A set of imports that have been used in the course of linking and
+ // interpreting options.
+ usedImports map[string]struct{}
+
+ // A map of AST nodes that represent identifiers in ast.FieldReferenceNodes
+ // to their fully-qualified name. The identifiers are for field names in
+ // message literals (in option values) that are extension fields. These names
+ // are resolved during linking and stored here, to be used to interpret options.
+ optionQualifiedNames map[ast.IdentValueNode]string
+
+ imports fileImports
+ messages msgDescriptors
+ enums enumDescriptors
+ extensions extDescriptors
+ services svcDescriptors
+ srcLocations srcLocs
+}
+
+var _ protoreflect.FileDescriptor = (*result)(nil)
+var _ Result = (*result)(nil)
+var _ protoutil.DescriptorProtoWrapper = (*result)(nil)
+var _ editions.HasEdition = (*result)(nil)
+
+func (r *result) RemoveAST() {
+ r.Result = parser.ResultWithoutAST(r.FileDescriptorProto())
+ r.optionQualifiedNames = nil
+}
+
+func (r *result) AsProto() proto.Message {
+ return r.FileDescriptorProto()
+}
+
+func (r *result) ParentFile() protoreflect.FileDescriptor {
+ return r
+}
+
+func (r *result) Parent() protoreflect.Descriptor {
+ return nil
+}
+
+func (r *result) Index() int {
+ return 0
+}
+
+func (r *result) Syntax() protoreflect.Syntax {
+ switch r.FileDescriptorProto().GetSyntax() {
+ case "proto2", "":
+ return protoreflect.Proto2
+ case "proto3":
+ return protoreflect.Proto3
+ case "editions":
+ return protoreflect.Editions
+ default:
+ return 0 // ???
+ }
+}
+
+func (r *result) Edition() int32 {
+ switch r.Syntax() {
+ case protoreflect.Proto2:
+ return int32(descriptorpb.Edition_EDITION_PROTO2)
+ case protoreflect.Proto3:
+ return int32(descriptorpb.Edition_EDITION_PROTO3)
+ case protoreflect.Editions:
+ return int32(r.FileDescriptorProto().GetEdition())
+ default:
+ return int32(descriptorpb.Edition_EDITION_UNKNOWN) // ???
+ }
+}
+
+func (r *result) Name() protoreflect.Name {
+ return ""
+}
+
+func (r *result) FullName() protoreflect.FullName {
+ return r.Package()
+}
+
+func (r *result) IsPlaceholder() bool {
+ return false
+}
+
+func (r *result) Options() protoreflect.ProtoMessage {
+ return r.FileDescriptorProto().Options
+}
+
+func (r *result) Path() string {
+ return r.FileDescriptorProto().GetName()
+}
+
+func (r *result) Package() protoreflect.FullName {
+ return protoreflect.FullName(r.FileDescriptorProto().GetPackage())
+}
+
+func (r *result) Imports() protoreflect.FileImports {
+ return &r.imports
+}
+
+func (r *result) Enums() protoreflect.EnumDescriptors {
+ return &r.enums
+}
+
+func (r *result) Messages() protoreflect.MessageDescriptors {
+ return &r.messages
+}
+
+func (r *result) Extensions() protoreflect.ExtensionDescriptors {
+ return &r.extensions
+}
+
+func (r *result) Services() protoreflect.ServiceDescriptors {
+ return &r.services
+}
+
+func (r *result) PopulateSourceCodeInfo() {
+ srcLocProtos := asSourceLocations(r.FileDescriptorProto().GetSourceCodeInfo().GetLocation())
+ srcLocIndex := computeSourceLocIndex(srcLocProtos)
+ r.srcLocations = srcLocs{file: r, locs: srcLocProtos, index: srcLocIndex}
+}
+
+func (r *result) SourceLocations() protoreflect.SourceLocations {
+ return &r.srcLocations
+}
+
+func computeSourceLocIndex(locs []protoreflect.SourceLocation) map[interface{}]int {
+ index := map[interface{}]int{}
+ for i, loc := range locs {
+ if loc.Next == 0 {
+ index[pathKey(loc.Path)] = i
+ }
+ }
+ return index
+}
+
+func asSourceLocations(srcInfoProtos []*descriptorpb.SourceCodeInfo_Location) []protoreflect.SourceLocation {
+ locs := make([]protoreflect.SourceLocation, len(srcInfoProtos))
+ prev := map[any]*protoreflect.SourceLocation{}
+ for i, loc := range srcInfoProtos {
+ var stLin, stCol, enLin, enCol int
+ if len(loc.Span) == 3 {
+ stLin, stCol, enCol = int(loc.Span[0]), int(loc.Span[1]), int(loc.Span[2])
+ enLin = stLin
+ } else {
+ stLin, stCol, enLin, enCol = int(loc.Span[0]), int(loc.Span[1]), int(loc.Span[2]), int(loc.Span[3])
+ }
+ locs[i] = protoreflect.SourceLocation{
+ Path: loc.Path,
+ LeadingComments: loc.GetLeadingComments(),
+ LeadingDetachedComments: loc.GetLeadingDetachedComments(),
+ TrailingComments: loc.GetTrailingComments(),
+ StartLine: stLin,
+ StartColumn: stCol,
+ EndLine: enLin,
+ EndColumn: enCol,
+ }
+ str := pathKey(loc.Path)
+ pr := prev[str]
+ if pr != nil {
+ pr.Next = i
+ }
+ prev[str] = &locs[i]
+ }
+ return locs
+}
+
+type fileImports struct {
+ protoreflect.FileImports
+ files []protoreflect.FileImport
+}
+
+func (r *result) createImports() fileImports {
+ fd := r.FileDescriptorProto()
+ imps := make([]protoreflect.FileImport, len(fd.Dependency))
+ for i, dep := range fd.Dependency {
+ desc := r.deps.FindFileByPath(dep)
+ imps[i] = protoreflect.FileImport{FileDescriptor: unwrap(desc)}
+ }
+ for _, publicIndex := range fd.PublicDependency {
+ imps[int(publicIndex)].IsPublic = true
+ }
+ for _, weakIndex := range fd.WeakDependency {
+ imps[int(weakIndex)].IsWeak = true
+ }
+ return fileImports{files: imps}
+}
+
+func unwrap(descriptor protoreflect.FileDescriptor) protoreflect.FileDescriptor {
+ wrapped, ok := descriptor.(interface {
+ Unwrap() protoreflect.FileDescriptor
+ })
+ if !ok {
+ return descriptor
+ }
+ unwrapped := wrapped.Unwrap()
+ if unwrapped == nil {
+ return descriptor // shouldn't ever happen
+ }
+ return unwrapped
+}
+
+func (f *fileImports) Len() int {
+ return len(f.files)
+}
+
+func (f *fileImports) Get(i int) protoreflect.FileImport {
+ return f.files[i]
+}
+
+type srcLocs struct {
+ protoreflect.SourceLocations
+ file *result
+ locs []protoreflect.SourceLocation
+ index map[interface{}]int
+}
+
+func (s *srcLocs) Len() int {
+ return len(s.locs)
+}
+
+func (s *srcLocs) Get(i int) protoreflect.SourceLocation {
+ return s.locs[i]
+}
+
+func (s *srcLocs) ByPath(p protoreflect.SourcePath) protoreflect.SourceLocation {
+ index, ok := s.index[pathKey(p)]
+ if !ok {
+ return protoreflect.SourceLocation{}
+ }
+ return s.locs[index]
+}
+
+func (s *srcLocs) ByDescriptor(d protoreflect.Descriptor) protoreflect.SourceLocation {
+ if d.ParentFile() != s.file {
+ return protoreflect.SourceLocation{}
+ }
+ path, ok := internal.ComputePath(d)
+ if !ok {
+ return protoreflect.SourceLocation{}
+ }
+ return s.ByPath(path)
+}
+
+type msgDescriptors struct {
+ protoreflect.MessageDescriptors
+ msgs []msgDescriptor
+}
+
+func (r *result) createMessages(prefix string, parent protoreflect.Descriptor, msgProtos []*descriptorpb.DescriptorProto, pool *allocPool) msgDescriptors {
+ msgs := pool.getMessages(len(msgProtos))
+ for i, msgProto := range msgProtos {
+ r.createMessageDescriptor(&msgs[i], msgProto, parent, i, prefix+msgProto.GetName(), pool)
+ }
+ return msgDescriptors{msgs: msgs}
+}
+
+func (m *msgDescriptors) Len() int {
+ return len(m.msgs)
+}
+
+func (m *msgDescriptors) Get(i int) protoreflect.MessageDescriptor {
+ return &m.msgs[i]
+}
+
+func (m *msgDescriptors) ByName(s protoreflect.Name) protoreflect.MessageDescriptor {
+ for i := range m.msgs {
+ msg := &m.msgs[i]
+ if msg.Name() == s {
+ return msg
+ }
+ }
+ return nil
+}
+
+type msgDescriptor struct {
+ protoreflect.MessageDescriptor
+ file *result
+ parent protoreflect.Descriptor
+ index int
+ proto *descriptorpb.DescriptorProto
+ fqn string
+
+ fields fldDescriptors
+ oneofs oneofDescriptors
+ nestedMessages msgDescriptors
+ nestedEnums enumDescriptors
+ nestedExtensions extDescriptors
+
+ extRanges fieldRanges
+ rsvdRanges fieldRanges
+ rsvdNames names
+}
+
+var _ protoreflect.MessageDescriptor = (*msgDescriptor)(nil)
+var _ protoutil.DescriptorProtoWrapper = (*msgDescriptor)(nil)
+
+func (r *result) createMessageDescriptor(ret *msgDescriptor, md *descriptorpb.DescriptorProto, parent protoreflect.Descriptor, index int, fqn string, pool *allocPool) {
+ r.descriptors[fqn] = ret
+
+ ret.MessageDescriptor = noOpMessage
+ ret.file = r
+ ret.parent = parent
+ ret.index = index
+ ret.proto = md
+ ret.fqn = fqn
+
+ prefix := fqn + "."
+ // NB: We MUST create fields before oneofs so that we can populate the
+ // set of fields that belong to the oneof
+ ret.fields = r.createFields(prefix, ret, md.Field, pool)
+ ret.oneofs = r.createOneofs(prefix, ret, md.OneofDecl, pool)
+ ret.nestedMessages = r.createMessages(prefix, ret, md.NestedType, pool)
+ ret.nestedEnums = r.createEnums(prefix, ret, md.EnumType, pool)
+ ret.nestedExtensions = r.createExtensions(prefix, ret, md.Extension, pool)
+ ret.extRanges = createFieldRanges(md.ExtensionRange)
+ ret.rsvdRanges = createFieldRanges(md.ReservedRange)
+ ret.rsvdNames = names{s: md.ReservedName}
+}
+
+func (m *msgDescriptor) MessageDescriptorProto() *descriptorpb.DescriptorProto {
+ return m.proto
+}
+
+func (m *msgDescriptor) AsProto() proto.Message {
+ return m.proto
+}
+
+func (m *msgDescriptor) ParentFile() protoreflect.FileDescriptor {
+ return m.file
+}
+
+func (m *msgDescriptor) Parent() protoreflect.Descriptor {
+ return m.parent
+}
+
+func (m *msgDescriptor) Index() int {
+ return m.index
+}
+
+func (m *msgDescriptor) Syntax() protoreflect.Syntax {
+ return m.file.Syntax()
+}
+
+func (m *msgDescriptor) Name() protoreflect.Name {
+ return protoreflect.Name(m.proto.GetName())
+}
+
+func (m *msgDescriptor) FullName() protoreflect.FullName {
+ return protoreflect.FullName(m.fqn)
+}
+
+func (m *msgDescriptor) IsPlaceholder() bool {
+ return false
+}
+
+func (m *msgDescriptor) Options() protoreflect.ProtoMessage {
+ return m.proto.Options
+}
+
+func (m *msgDescriptor) IsMapEntry() bool {
+ return m.proto.Options.GetMapEntry()
+}
+
+func (m *msgDescriptor) Fields() protoreflect.FieldDescriptors {
+ return &m.fields
+}
+
+func (m *msgDescriptor) Oneofs() protoreflect.OneofDescriptors {
+ return &m.oneofs
+}
+
+func (m *msgDescriptor) ReservedNames() protoreflect.Names {
+ return m.rsvdNames
+}
+
+func (m *msgDescriptor) ReservedRanges() protoreflect.FieldRanges {
+ return m.rsvdRanges
+}
+
+func (m *msgDescriptor) RequiredNumbers() protoreflect.FieldNumbers {
+ var indexes fieldNums
+ for _, fld := range m.proto.Field {
+ if fld.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REQUIRED {
+ indexes.s = append(indexes.s, fld.GetNumber())
+ }
+ }
+ return indexes
+}
+
+func (m *msgDescriptor) ExtensionRanges() protoreflect.FieldRanges {
+ return m.extRanges
+}
+
+func (m *msgDescriptor) ExtensionRangeOptions(i int) protoreflect.ProtoMessage {
+ return m.proto.ExtensionRange[i].Options
+}
+
+func (m *msgDescriptor) Enums() protoreflect.EnumDescriptors {
+ return &m.nestedEnums
+}
+
+func (m *msgDescriptor) Messages() protoreflect.MessageDescriptors {
+ return &m.nestedMessages
+}
+
+func (m *msgDescriptor) Extensions() protoreflect.ExtensionDescriptors {
+ return &m.nestedExtensions
+}
+
+type names struct {
+ protoreflect.Names
+ s []string
+}
+
+func (n names) Len() int {
+ return len(n.s)
+}
+
+func (n names) Get(i int) protoreflect.Name {
+ return protoreflect.Name(n.s[i])
+}
+
+func (n names) Has(s protoreflect.Name) bool {
+ for _, name := range n.s {
+ if name == string(s) {
+ return true
+ }
+ }
+ return false
+}
+
+type fieldNums struct {
+ protoreflect.FieldNumbers
+ s []int32
+}
+
+func (n fieldNums) Len() int {
+ return len(n.s)
+}
+
+func (n fieldNums) Get(i int) protoreflect.FieldNumber {
+ return protoreflect.FieldNumber(n.s[i])
+}
+
+func (n fieldNums) Has(s protoreflect.FieldNumber) bool {
+ for _, num := range n.s {
+ if num == int32(s) {
+ return true
+ }
+ }
+ return false
+}
+
+type fieldRanges struct {
+ protoreflect.FieldRanges
+ ranges [][2]protoreflect.FieldNumber
+}
+
+type fieldRange interface {
+ GetStart() int32
+ GetEnd() int32
+}
+
+func createFieldRanges[T fieldRange](rangeProtos []T) fieldRanges {
+ ranges := make([][2]protoreflect.FieldNumber, len(rangeProtos))
+ for i, r := range rangeProtos {
+ ranges[i] = [2]protoreflect.FieldNumber{
+ protoreflect.FieldNumber(r.GetStart()),
+ protoreflect.FieldNumber(r.GetEnd()),
+ }
+ }
+ return fieldRanges{ranges: ranges}
+}
+
+func (f fieldRanges) Len() int {
+ return len(f.ranges)
+}
+
+func (f fieldRanges) Get(i int) [2]protoreflect.FieldNumber {
+ return f.ranges[i]
+}
+
+func (f fieldRanges) Has(n protoreflect.FieldNumber) bool {
+ for _, r := range f.ranges {
+ if r[0] <= n && r[1] > n {
+ return true
+ }
+ }
+ return false
+}
+
+type enumDescriptors struct {
+ protoreflect.EnumDescriptors
+ enums []enumDescriptor
+}
+
+func (r *result) createEnums(prefix string, parent protoreflect.Descriptor, enumProtos []*descriptorpb.EnumDescriptorProto, pool *allocPool) enumDescriptors {
+ enums := pool.getEnums(len(enumProtos))
+ for i, enumProto := range enumProtos {
+ r.createEnumDescriptor(&enums[i], enumProto, parent, i, prefix+enumProto.GetName(), pool)
+ }
+ return enumDescriptors{enums: enums}
+}
+
+func (e *enumDescriptors) Len() int {
+ return len(e.enums)
+}
+
+func (e *enumDescriptors) Get(i int) protoreflect.EnumDescriptor {
+ return &e.enums[i]
+}
+
+func (e *enumDescriptors) ByName(s protoreflect.Name) protoreflect.EnumDescriptor {
+ for i := range e.enums {
+ enum := &e.enums[i]
+ if enum.Name() == s {
+ return enum
+ }
+ }
+ return nil
+}
+
+type enumDescriptor struct {
+ protoreflect.EnumDescriptor
+ file *result
+ parent protoreflect.Descriptor
+ index int
+ proto *descriptorpb.EnumDescriptorProto
+ fqn string
+
+ values enValDescriptors
+
+ rsvdRanges enumRanges
+ rsvdNames names
+}
+
+var _ protoreflect.EnumDescriptor = (*enumDescriptor)(nil)
+var _ protoutil.DescriptorProtoWrapper = (*enumDescriptor)(nil)
+
+func (r *result) createEnumDescriptor(ret *enumDescriptor, ed *descriptorpb.EnumDescriptorProto, parent protoreflect.Descriptor, index int, fqn string, pool *allocPool) {
+ r.descriptors[fqn] = ret
+
+ ret.EnumDescriptor = noOpEnum
+ ret.file = r
+ ret.parent = parent
+ ret.index = index
+ ret.proto = ed
+ ret.fqn = fqn
+
+ // Unlike all other elements, the fully-qualified names of enum values
+ // are NOT scoped to their parent element (the enum), but rather to
+ // the enum's parent element. This follows C++ scoping rules for
+ // enum values.
+ prefix := strings.TrimSuffix(fqn, ed.GetName())
+ ret.values = r.createEnumValues(prefix, ret, ed.Value, pool)
+ ret.rsvdRanges = createEnumRanges(ed.ReservedRange)
+ ret.rsvdNames = names{s: ed.ReservedName}
+}
+
+func (e *enumDescriptor) EnumDescriptorProto() *descriptorpb.EnumDescriptorProto {
+ return e.proto
+}
+
+func (e *enumDescriptor) AsProto() proto.Message {
+ return e.proto
+}
+
+func (e *enumDescriptor) ParentFile() protoreflect.FileDescriptor {
+ return e.file
+}
+
+func (e *enumDescriptor) Parent() protoreflect.Descriptor {
+ return e.parent
+}
+
+func (e *enumDescriptor) Index() int {
+ return e.index
+}
+
+func (e *enumDescriptor) Syntax() protoreflect.Syntax {
+ return e.file.Syntax()
+}
+
+func (e *enumDescriptor) Name() protoreflect.Name {
+ return protoreflect.Name(e.proto.GetName())
+}
+
+func (e *enumDescriptor) FullName() protoreflect.FullName {
+ return protoreflect.FullName(e.fqn)
+}
+
+func (e *enumDescriptor) IsPlaceholder() bool {
+ return false
+}
+
+func (e *enumDescriptor) Options() protoreflect.ProtoMessage {
+ return e.proto.Options
+}
+
+func (e *enumDescriptor) Values() protoreflect.EnumValueDescriptors {
+ return &e.values
+}
+
+func (e *enumDescriptor) ReservedNames() protoreflect.Names {
+ return e.rsvdNames
+}
+
+func (e *enumDescriptor) ReservedRanges() protoreflect.EnumRanges {
+ return e.rsvdRanges
+}
+
+func (e *enumDescriptor) IsClosed() bool {
+ enumType := resolveFeature(e, enumTypeField)
+ return descriptorpb.FeatureSet_EnumType(enumType.Enum()) == descriptorpb.FeatureSet_CLOSED
+}
+
+type enumRanges struct {
+ protoreflect.EnumRanges
+ ranges [][2]protoreflect.EnumNumber
+}
+
+func createEnumRanges(rangeProtos []*descriptorpb.EnumDescriptorProto_EnumReservedRange) enumRanges {
+ ranges := make([][2]protoreflect.EnumNumber, len(rangeProtos))
+ for i, r := range rangeProtos {
+ ranges[i] = [2]protoreflect.EnumNumber{
+ protoreflect.EnumNumber(r.GetStart()),
+ protoreflect.EnumNumber(r.GetEnd()),
+ }
+ }
+ return enumRanges{ranges: ranges}
+}
+
+func (e enumRanges) Len() int {
+ return len(e.ranges)
+}
+
+func (e enumRanges) Get(i int) [2]protoreflect.EnumNumber {
+ return e.ranges[i]
+}
+
+func (e enumRanges) Has(n protoreflect.EnumNumber) bool {
+ for _, r := range e.ranges {
+ if r[0] <= n && r[1] >= n {
+ return true
+ }
+ }
+ return false
+}
+
+type enValDescriptors struct {
+ protoreflect.EnumValueDescriptors
+ vals []enValDescriptor
+}
+
+func (r *result) createEnumValues(prefix string, parent *enumDescriptor, enValProtos []*descriptorpb.EnumValueDescriptorProto, pool *allocPool) enValDescriptors {
+ vals := pool.getEnumValues(len(enValProtos))
+ for i, enValProto := range enValProtos {
+ r.createEnumValueDescriptor(&vals[i], enValProto, parent, i, prefix+enValProto.GetName())
+ }
+ return enValDescriptors{vals: vals}
+}
+
+func (e *enValDescriptors) Len() int {
+ return len(e.vals)
+}
+
+func (e *enValDescriptors) Get(i int) protoreflect.EnumValueDescriptor {
+ return &e.vals[i]
+}
+
+func (e *enValDescriptors) ByName(s protoreflect.Name) protoreflect.EnumValueDescriptor {
+ for i := range e.vals {
+ val := &e.vals[i]
+ if val.Name() == s {
+ return val
+ }
+ }
+ return nil
+}
+
+func (e *enValDescriptors) ByNumber(n protoreflect.EnumNumber) protoreflect.EnumValueDescriptor {
+ for i := range e.vals {
+ val := &e.vals[i]
+ if val.Number() == n {
+ return val
+ }
+ }
+ return nil
+}
+
+type enValDescriptor struct {
+ protoreflect.EnumValueDescriptor
+ file *result
+ parent *enumDescriptor
+ index int
+ proto *descriptorpb.EnumValueDescriptorProto
+ fqn string
+}
+
+var _ protoreflect.EnumValueDescriptor = (*enValDescriptor)(nil)
+var _ protoutil.DescriptorProtoWrapper = (*enValDescriptor)(nil)
+
+func (r *result) createEnumValueDescriptor(ret *enValDescriptor, ed *descriptorpb.EnumValueDescriptorProto, parent *enumDescriptor, index int, fqn string) {
+ r.descriptors[fqn] = ret
+ ret.EnumValueDescriptor = noOpEnumValue
+ ret.file = r
+ ret.parent = parent
+ ret.index = index
+ ret.proto = ed
+ ret.fqn = fqn
+}
+
+func (e *enValDescriptor) EnumValueDescriptorProto() *descriptorpb.EnumValueDescriptorProto {
+ return e.proto
+}
+
+func (e *enValDescriptor) AsProto() proto.Message {
+ return e.proto
+}
+
+func (e *enValDescriptor) ParentFile() protoreflect.FileDescriptor {
+ return e.file
+}
+
+func (e *enValDescriptor) Parent() protoreflect.Descriptor {
+ return e.parent
+}
+
+func (e *enValDescriptor) Index() int {
+ return e.index
+}
+
+func (e *enValDescriptor) Syntax() protoreflect.Syntax {
+ return e.file.Syntax()
+}
+
+func (e *enValDescriptor) Name() protoreflect.Name {
+ return protoreflect.Name(e.proto.GetName())
+}
+
+func (e *enValDescriptor) FullName() protoreflect.FullName {
+ return protoreflect.FullName(e.fqn)
+}
+
+func (e *enValDescriptor) IsPlaceholder() bool {
+ return false
+}
+
+func (e *enValDescriptor) Options() protoreflect.ProtoMessage {
+ return e.proto.Options
+}
+
+func (e *enValDescriptor) Number() protoreflect.EnumNumber {
+ return protoreflect.EnumNumber(e.proto.GetNumber())
+}
+
+type extDescriptors struct {
+ protoreflect.ExtensionDescriptors
+ exts []extTypeDescriptor
+}
+
+func (r *result) createExtensions(prefix string, parent protoreflect.Descriptor, extProtos []*descriptorpb.FieldDescriptorProto, pool *allocPool) extDescriptors {
+ exts := pool.getExtensions(len(extProtos))
+ for i, extProto := range extProtos {
+ r.createExtTypeDescriptor(&exts[i], extProto, parent, i, prefix+extProto.GetName())
+ }
+ return extDescriptors{exts: exts}
+}
+
+func (e *extDescriptors) Len() int {
+ return len(e.exts)
+}
+
+func (e *extDescriptors) Get(i int) protoreflect.ExtensionDescriptor {
+ return &e.exts[i]
+}
+
+func (e *extDescriptors) ByName(s protoreflect.Name) protoreflect.ExtensionDescriptor {
+ for i := range e.exts {
+ ext := &e.exts[i]
+ if ext.Name() == s {
+ return ext
+ }
+ }
+ return nil
+}
+
+type extTypeDescriptor struct {
+ protoreflect.ExtensionTypeDescriptor
+ field fldDescriptor
+}
+
+var _ protoutil.DescriptorProtoWrapper = &extTypeDescriptor{}
+
+func (r *result) createExtTypeDescriptor(ret *extTypeDescriptor, fd *descriptorpb.FieldDescriptorProto, parent protoreflect.Descriptor, index int, fqn string) {
+ r.descriptors[fqn] = ret
+ ret.field = fldDescriptor{FieldDescriptor: noOpExtension, file: r, parent: parent, index: index, proto: fd, fqn: fqn}
+ ret.ExtensionTypeDescriptor = dynamicpb.NewExtensionType(&ret.field).TypeDescriptor()
+}
+
+func (e *extTypeDescriptor) FieldDescriptorProto() *descriptorpb.FieldDescriptorProto {
+ return e.field.proto
+}
+
+func (e *extTypeDescriptor) AsProto() proto.Message {
+ return e.field.proto
+}
+
+type fldDescriptors struct {
+ protoreflect.FieldDescriptors
+ // We use pointers here, instead of flattened slice, because oneofs
+ // also have fields, but need to point to values in the parent
+ // message's fields. Even though they are pointers, in the containing
+ // message, we always allocate a flattened slice and then point into
+ // that, so we're still doing fewer allocations (2 per set of fields
+ // instead of 1 per each field).
+ fields []*fldDescriptor
+}
+
+func (r *result) createFields(prefix string, parent *msgDescriptor, fldProtos []*descriptorpb.FieldDescriptorProto, pool *allocPool) fldDescriptors {
+ fields := pool.getFields(len(fldProtos))
+ fieldPtrs := make([]*fldDescriptor, len(fldProtos))
+ for i, fldProto := range fldProtos {
+ r.createFieldDescriptor(&fields[i], fldProto, parent, i, prefix+fldProto.GetName())
+ fieldPtrs[i] = &fields[i]
+ }
+ return fldDescriptors{fields: fieldPtrs}
+}
+
+func (f *fldDescriptors) Len() int {
+ return len(f.fields)
+}
+
+func (f *fldDescriptors) Get(i int) protoreflect.FieldDescriptor {
+ return f.fields[i]
+}
+
+func (f *fldDescriptors) ByName(s protoreflect.Name) protoreflect.FieldDescriptor {
+ for _, fld := range f.fields {
+ if fld.Name() == s {
+ return fld
+ }
+ }
+ return nil
+}
+
+func (f *fldDescriptors) ByJSONName(s string) protoreflect.FieldDescriptor {
+ for _, fld := range f.fields {
+ if fld.JSONName() == s {
+ return fld
+ }
+ }
+ return nil
+}
+
+func (f *fldDescriptors) ByTextName(s string) protoreflect.FieldDescriptor {
+ fld := f.ByName(protoreflect.Name(s))
+ if fld != nil {
+ return fld
+ }
+ // Groups use type name instead, so we fallback to slow search
+ for _, fld := range f.fields {
+ if fld.TextName() == s {
+ return fld
+ }
+ }
+ return nil
+}
+
+func (f *fldDescriptors) ByNumber(n protoreflect.FieldNumber) protoreflect.FieldDescriptor {
+ for _, fld := range f.fields {
+ if fld.Number() == n {
+ return fld
+ }
+ }
+ return nil
+}
+
+type fldDescriptor struct {
+ protoreflect.FieldDescriptor
+ file *result
+ parent protoreflect.Descriptor
+ index int
+ proto *descriptorpb.FieldDescriptorProto
+ fqn string
+
+ msgType protoreflect.MessageDescriptor
+ extendee protoreflect.MessageDescriptor
+ enumType protoreflect.EnumDescriptor
+ oneof protoreflect.OneofDescriptor
+}
+
+var _ protoreflect.FieldDescriptor = (*fldDescriptor)(nil)
+var _ protoutil.DescriptorProtoWrapper = (*fldDescriptor)(nil)
+
+func (r *result) createFieldDescriptor(ret *fldDescriptor, fd *descriptorpb.FieldDescriptorProto, parent *msgDescriptor, index int, fqn string) {
+ r.descriptors[fqn] = ret
+ ret.FieldDescriptor = noOpField
+ ret.file = r
+ ret.parent = parent
+ ret.index = index
+ ret.proto = fd
+ ret.fqn = fqn
+}
+
+func (f *fldDescriptor) FieldDescriptorProto() *descriptorpb.FieldDescriptorProto {
+ return f.proto
+}
+
+func (f *fldDescriptor) AsProto() proto.Message {
+ return f.proto
+}
+
+func (f *fldDescriptor) ParentFile() protoreflect.FileDescriptor {
+ return f.file
+}
+
+func (f *fldDescriptor) Parent() protoreflect.Descriptor {
+ return f.parent
+}
+
+func (f *fldDescriptor) Index() int {
+ return f.index
+}
+
+func (f *fldDescriptor) Syntax() protoreflect.Syntax {
+ return f.file.Syntax()
+}
+
+func (f *fldDescriptor) Name() protoreflect.Name {
+ return protoreflect.Name(f.proto.GetName())
+}
+
+func (f *fldDescriptor) FullName() protoreflect.FullName {
+ return protoreflect.FullName(f.fqn)
+}
+
+func (f *fldDescriptor) IsPlaceholder() bool {
+ return false
+}
+
+func (f *fldDescriptor) Options() protoreflect.ProtoMessage {
+ return f.proto.Options
+}
+
+func (f *fldDescriptor) Number() protoreflect.FieldNumber {
+ return protoreflect.FieldNumber(f.proto.GetNumber())
+}
+
+func (f *fldDescriptor) Cardinality() protoreflect.Cardinality {
+ switch f.proto.GetLabel() {
+ case descriptorpb.FieldDescriptorProto_LABEL_REPEATED:
+ return protoreflect.Repeated
+ case descriptorpb.FieldDescriptorProto_LABEL_REQUIRED:
+ return protoreflect.Required
+ case descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL:
+ if f.Syntax() == protoreflect.Editions {
+ // Editions does not use label to indicate required. It instead
+ // uses a feature, and label is always optional.
+ fieldPresence := descriptorpb.FeatureSet_FieldPresence(resolveFeature(f, fieldPresenceField).Enum())
+ if fieldPresence == descriptorpb.FeatureSet_LEGACY_REQUIRED {
+ return protoreflect.Required
+ }
+ }
+ return protoreflect.Optional
+ default:
+ return 0
+ }
+}
+
+func (f *fldDescriptor) Kind() protoreflect.Kind {
+ if f.proto.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE && f.Syntax() == protoreflect.Editions &&
+ !f.IsMap() && !f.parentIsMap() {
+ // In editions, "group encoding" (aka "delimited encoding") is toggled
+ // via a feature. So we report group kind when that feature is enabled.
+ messageEncoding := resolveFeature(f, messageEncodingField)
+ if descriptorpb.FeatureSet_MessageEncoding(messageEncoding.Enum()) == descriptorpb.FeatureSet_DELIMITED {
+ return protoreflect.GroupKind
+ }
+ }
+ return protoreflect.Kind(f.proto.GetType())
+}
+
+func (f *fldDescriptor) HasJSONName() bool {
+ return f.proto.JsonName != nil
+}
+
+func (f *fldDescriptor) JSONName() string {
+ if f.IsExtension() {
+ return f.TextName()
+ }
+ return f.proto.GetJsonName()
+}
+
+func (f *fldDescriptor) TextName() string {
+ if f.IsExtension() {
+ return fmt.Sprintf("[%s]", f.FullName())
+ }
+ if f.looksLikeGroup() {
+ // groups use the type name
+ return string(protoreflect.FullName(f.proto.GetTypeName()).Name())
+ }
+ return string(f.Name())
+}
+
+func (f *fldDescriptor) looksLikeGroup() bool {
+ // It looks like a group if it uses group/delimited encoding (checked via f.Kind)
+ // and the message type is a sibling whose name is a mixed-case version of the field name.
+ return f.Kind() == protoreflect.GroupKind &&
+ f.Message().FullName().Parent() == f.FullName().Parent() &&
+ string(f.Name()) == strings.ToLower(string(f.Message().Name()))
+}
+
+func (f *fldDescriptor) HasPresence() bool {
+ if f.proto.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED {
+ return false
+ }
+ if f.IsExtension() ||
+ f.Kind() == protoreflect.MessageKind || f.Kind() == protoreflect.GroupKind ||
+ f.proto.OneofIndex != nil {
+ return true
+ }
+ fieldPresence := descriptorpb.FeatureSet_FieldPresence(resolveFeature(f, fieldPresenceField).Enum())
+ return fieldPresence == descriptorpb.FeatureSet_EXPLICIT || fieldPresence == descriptorpb.FeatureSet_LEGACY_REQUIRED
+}
+
+func (f *fldDescriptor) IsExtension() bool {
+ return f.proto.GetExtendee() != ""
+}
+
+func (f *fldDescriptor) HasOptionalKeyword() bool {
+ if f.proto.GetLabel() != descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL {
+ return false
+ }
+ if f.proto.GetProto3Optional() {
+ // NB: This smells weird to return false here. If the proto3_optional field
+ // is set, it's because the keyword WAS present. However, the Go runtime
+ // returns false for this case, so we mirror that behavior.
+ return !f.IsExtension()
+ }
+ // If it's optional, but not a proto3 optional, then the keyword is only
+ // present for proto2 files, for fields that are not part of a oneof.
+ return f.file.Syntax() == protoreflect.Proto2 && f.proto.OneofIndex == nil
+}
+
+func (f *fldDescriptor) IsWeak() bool {
+ return f.proto.Options.GetWeak()
+}
+
+func (f *fldDescriptor) IsPacked() bool {
+ if f.Cardinality() != protoreflect.Repeated || !internal.CanPack(f.Kind()) {
+ return false
+ }
+ opts := f.proto.GetOptions()
+ if opts != nil && opts.Packed != nil {
+ // packed option is set explicitly
+ return *opts.Packed
+ }
+ fieldEncoding := resolveFeature(f, repeatedFieldEncodingField)
+ return descriptorpb.FeatureSet_RepeatedFieldEncoding(fieldEncoding.Enum()) == descriptorpb.FeatureSet_PACKED
+}
+
+func (f *fldDescriptor) IsList() bool {
+ if f.proto.GetLabel() != descriptorpb.FieldDescriptorProto_LABEL_REPEATED {
+ return false
+ }
+ return !f.isMapEntry()
+}
+
+func (f *fldDescriptor) IsMap() bool {
+ if f.proto.GetLabel() != descriptorpb.FieldDescriptorProto_LABEL_REPEATED {
+ return false
+ }
+ if f.IsExtension() {
+ return false
+ }
+ return f.isMapEntry()
+}
+
+func (f *fldDescriptor) isMapEntry() bool {
+ if f.proto.GetType() != descriptorpb.FieldDescriptorProto_TYPE_MESSAGE {
+ return false
+ }
+ return f.Message().IsMapEntry()
+}
+
+func (f *fldDescriptor) parentIsMap() bool {
+ parent, ok := f.parent.(protoreflect.MessageDescriptor)
+ return ok && parent.IsMapEntry()
+}
+
+func (f *fldDescriptor) MapKey() protoreflect.FieldDescriptor {
+ if !f.IsMap() {
+ return nil
+ }
+ return f.Message().Fields().ByNumber(1)
+}
+
+func (f *fldDescriptor) MapValue() protoreflect.FieldDescriptor {
+ if !f.IsMap() {
+ return nil
+ }
+ return f.Message().Fields().ByNumber(2)
+}
+
+func (f *fldDescriptor) HasDefault() bool {
+ return f.proto.DefaultValue != nil
+}
+
+func (f *fldDescriptor) Default() protoreflect.Value {
+ // We only return a valid value for scalar fields
+ if f.proto.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED ||
+ f.Kind() == protoreflect.GroupKind || f.Kind() == protoreflect.MessageKind {
+ return protoreflect.Value{}
+ }
+
+ if f.proto.DefaultValue != nil {
+ defVal := f.parseDefaultValue(f.proto.GetDefaultValue())
+ if defVal.IsValid() {
+ return defVal
+ }
+ // if we cannot parse a valid value, fall back to zero value below
+ }
+
+ // No custom default value, so return the zero value for the type
+ switch f.Kind() {
+ case protoreflect.Int32Kind, protoreflect.Sint32Kind, protoreflect.Sfixed32Kind:
+ return protoreflect.ValueOfInt32(0)
+ case protoreflect.Int64Kind, protoreflect.Sint64Kind, protoreflect.Sfixed64Kind:
+ return protoreflect.ValueOfInt64(0)
+ case protoreflect.Uint32Kind, protoreflect.Fixed32Kind:
+ return protoreflect.ValueOfUint32(0)
+ case protoreflect.Uint64Kind, protoreflect.Fixed64Kind:
+ return protoreflect.ValueOfUint64(0)
+ case protoreflect.FloatKind:
+ return protoreflect.ValueOfFloat32(0)
+ case protoreflect.DoubleKind:
+ return protoreflect.ValueOfFloat64(0)
+ case protoreflect.BoolKind:
+ return protoreflect.ValueOfBool(false)
+ case protoreflect.BytesKind:
+ return protoreflect.ValueOfBytes(nil)
+ case protoreflect.StringKind:
+ return protoreflect.ValueOfString("")
+ case protoreflect.EnumKind:
+ return protoreflect.ValueOfEnum(f.Enum().Values().Get(0).Number())
+ case protoreflect.GroupKind, protoreflect.MessageKind:
+ return protoreflect.ValueOfMessage(dynamicpb.NewMessage(f.Message()))
+ default:
+ panic(fmt.Sprintf("unknown kind: %v", f.Kind()))
+ }
+}
+
+func (f *fldDescriptor) parseDefaultValue(val string) protoreflect.Value {
+ switch f.Kind() {
+ case protoreflect.EnumKind:
+ vd := f.Enum().Values().ByName(protoreflect.Name(val))
+ if vd != nil {
+ return protoreflect.ValueOfEnum(vd.Number())
+ }
+ return protoreflect.Value{}
+ case protoreflect.BoolKind:
+ switch val {
+ case "true":
+ return protoreflect.ValueOfBool(true)
+ case "false":
+ return protoreflect.ValueOfBool(false)
+ default:
+ return protoreflect.Value{}
+ }
+ case protoreflect.BytesKind:
+ return protoreflect.ValueOfBytes([]byte(unescape(val)))
+ case protoreflect.StringKind:
+ return protoreflect.ValueOfString(val)
+ case protoreflect.FloatKind:
+ if f, err := strconv.ParseFloat(val, 32); err == nil {
+ return protoreflect.ValueOfFloat32(float32(f))
+ }
+ return protoreflect.Value{}
+ case protoreflect.DoubleKind:
+ if f, err := strconv.ParseFloat(val, 64); err == nil {
+ return protoreflect.ValueOfFloat64(f)
+ }
+ return protoreflect.Value{}
+ case protoreflect.Int32Kind, protoreflect.Sint32Kind, protoreflect.Sfixed32Kind:
+ if i, err := strconv.ParseInt(val, 10, 32); err == nil {
+ return protoreflect.ValueOfInt32(int32(i))
+ }
+ return protoreflect.Value{}
+ case protoreflect.Uint32Kind, protoreflect.Fixed32Kind:
+ if i, err := strconv.ParseUint(val, 10, 32); err == nil {
+ return protoreflect.ValueOfUint32(uint32(i))
+ }
+ return protoreflect.Value{}
+ case protoreflect.Int64Kind, protoreflect.Sint64Kind, protoreflect.Sfixed64Kind:
+ if i, err := strconv.ParseInt(val, 10, 64); err == nil {
+ return protoreflect.ValueOfInt64(i)
+ }
+ return protoreflect.Value{}
+ case protoreflect.Uint64Kind, protoreflect.Fixed64Kind:
+ if i, err := strconv.ParseUint(val, 10, 64); err == nil {
+ return protoreflect.ValueOfUint64(i)
+ }
+ return protoreflect.Value{}
+ default:
+ return protoreflect.Value{}
+ }
+}
+
+func unescape(s string) string {
+ // protoc encodes default values for 'bytes' fields using C escaping,
+ // so this function reverses that escaping
+ out := make([]byte, 0, len(s))
+ var buf [4]byte
+ for len(s) > 0 {
+ if s[0] != '\\' || len(s) < 2 {
+ // not escape sequence, or too short to be well-formed escape
+ out = append(out, s[0])
+ s = s[1:]
+ continue
+ }
+ nextIndex := 2 // by default, skip '\' + escaped character
+ switch s[1] {
+ case 'x', 'X':
+ n := matchPrefix(s[2:], 2, isHex)
+ if n == 0 {
+ // bad escape
+ out = append(out, s[:2]...)
+ } else {
+ c, err := strconv.ParseUint(s[2:2+n], 16, 8)
+ if err != nil {
+ // shouldn't really happen...
+ out = append(out, s[:2+n]...)
+ } else {
+ out = append(out, byte(c))
+ }
+ nextIndex = 2 + n
+ }
+ case '0', '1', '2', '3', '4', '5', '6', '7':
+ n := 1 + matchPrefix(s[2:], 2, isOctal)
+ c, err := strconv.ParseUint(s[1:1+n], 8, 8)
+ if err != nil || c > 0xff {
+ out = append(out, s[:1+n]...)
+ } else {
+ out = append(out, byte(c))
+ }
+ nextIndex = 1 + n
+ case 'u':
+ if len(s) < 6 {
+ // bad escape
+ out = append(out, s...)
+ nextIndex = len(s)
+ } else {
+ c, err := strconv.ParseUint(s[2:6], 16, 16)
+ if err != nil {
+ // bad escape
+ out = append(out, s[:6]...)
+ } else {
+ w := utf8.EncodeRune(buf[:], rune(c))
+ out = append(out, buf[:w]...)
+ }
+ nextIndex = 6
+ }
+ case 'U':
+ if len(s) < 10 {
+ // bad escape
+ out = append(out, s...)
+ nextIndex = len(s)
+ } else {
+ c, err := strconv.ParseUint(s[2:10], 16, 32)
+ if err != nil || c > 0x10ffff {
+ // bad escape
+ out = append(out, s[:10]...)
+ } else {
+ w := utf8.EncodeRune(buf[:], rune(c))
+ out = append(out, buf[:w]...)
+ }
+ nextIndex = 10
+ }
+ case 'a':
+ out = append(out, '\a')
+ case 'b':
+ out = append(out, '\b')
+ case 'f':
+ out = append(out, '\f')
+ case 'n':
+ out = append(out, '\n')
+ case 'r':
+ out = append(out, '\r')
+ case 't':
+ out = append(out, '\t')
+ case 'v':
+ out = append(out, '\v')
+ case '\\', '\'', '"', '?':
+ out = append(out, s[1])
+ default:
+ // invalid escape, just copy it as-is
+ out = append(out, s[:2]...)
+ }
+ s = s[nextIndex:]
+ }
+ return string(out)
+}
+
+func isOctal(b byte) bool { return b >= '0' && b <= '7' }
+func isHex(b byte) bool {
+ return (b >= '0' && b <= '9') || (b >= 'a' && b <= 'f') || (b >= 'A' && b <= 'F')
+}
+func matchPrefix(s string, limit int, fn func(byte) bool) int {
+ l := len(s)
+ if l > limit {
+ l = limit
+ }
+ i := 0
+ for ; i < l; i++ {
+ if !fn(s[i]) {
+ return i
+ }
+ }
+ return i
+}
+
+func (f *fldDescriptor) DefaultEnumValue() protoreflect.EnumValueDescriptor {
+ ed := f.Enum()
+ if ed == nil {
+ return nil
+ }
+ if f.proto.DefaultValue != nil {
+ if val := ed.Values().ByName(protoreflect.Name(f.proto.GetDefaultValue())); val != nil {
+ return val
+ }
+ }
+ // if no default specified in source, return nil
+ return nil
+}
+
+func (f *fldDescriptor) ContainingOneof() protoreflect.OneofDescriptor {
+ return f.oneof
+}
+
+func (f *fldDescriptor) ContainingMessage() protoreflect.MessageDescriptor {
+ if f.extendee != nil {
+ return f.extendee
+ }
+ return f.parent.(protoreflect.MessageDescriptor)
+}
+
+func (f *fldDescriptor) Enum() protoreflect.EnumDescriptor {
+ return f.enumType
+}
+
+func (f *fldDescriptor) Message() protoreflect.MessageDescriptor {
+ return f.msgType
+}
+
+type oneofDescriptors struct {
+ protoreflect.OneofDescriptors
+ oneofs []oneofDescriptor
+}
+
+func (r *result) createOneofs(prefix string, parent *msgDescriptor, ooProtos []*descriptorpb.OneofDescriptorProto, pool *allocPool) oneofDescriptors {
+ oos := pool.getOneofs(len(ooProtos))
+ for i, fldProto := range ooProtos {
+ r.createOneofDescriptor(&oos[i], fldProto, parent, i, prefix+fldProto.GetName())
+ }
+ return oneofDescriptors{oneofs: oos}
+}
+
+func (o *oneofDescriptors) Len() int {
+ return len(o.oneofs)
+}
+
+func (o *oneofDescriptors) Get(i int) protoreflect.OneofDescriptor {
+ return &o.oneofs[i]
+}
+
+func (o *oneofDescriptors) ByName(s protoreflect.Name) protoreflect.OneofDescriptor {
+ for i := range o.oneofs {
+ oo := &o.oneofs[i]
+ if oo.Name() == s {
+ return oo
+ }
+ }
+ return nil
+}
+
+type oneofDescriptor struct {
+ protoreflect.OneofDescriptor
+ file *result
+ parent *msgDescriptor
+ index int
+ proto *descriptorpb.OneofDescriptorProto
+ fqn string
+
+ fields fldDescriptors
+}
+
+var _ protoreflect.OneofDescriptor = (*oneofDescriptor)(nil)
+var _ protoutil.DescriptorProtoWrapper = (*oneofDescriptor)(nil)
+
+func (r *result) createOneofDescriptor(ret *oneofDescriptor, ood *descriptorpb.OneofDescriptorProto, parent *msgDescriptor, index int, fqn string) {
+ r.descriptors[fqn] = ret
+ ret.OneofDescriptor = noOpOneof
+ ret.file = r
+ ret.parent = parent
+ ret.index = index
+ ret.proto = ood
+ ret.fqn = fqn
+
+ var fields []*fldDescriptor
+ for _, fld := range parent.fields.fields {
+ if fld.proto.OneofIndex != nil && int(fld.proto.GetOneofIndex()) == index {
+ fields = append(fields, fld)
+ }
+ }
+ ret.fields = fldDescriptors{fields: fields}
+}
+
+func (o *oneofDescriptor) OneofDescriptorProto() *descriptorpb.OneofDescriptorProto {
+ return o.proto
+}
+
+func (o *oneofDescriptor) AsProto() proto.Message {
+ return o.proto
+}
+
+func (o *oneofDescriptor) ParentFile() protoreflect.FileDescriptor {
+ return o.file
+}
+
+func (o *oneofDescriptor) Parent() protoreflect.Descriptor {
+ return o.parent
+}
+
+func (o *oneofDescriptor) Index() int {
+ return o.index
+}
+
+func (o *oneofDescriptor) Syntax() protoreflect.Syntax {
+ return o.file.Syntax()
+}
+
+func (o *oneofDescriptor) Name() protoreflect.Name {
+ return protoreflect.Name(o.proto.GetName())
+}
+
+func (o *oneofDescriptor) FullName() protoreflect.FullName {
+ return protoreflect.FullName(o.fqn)
+}
+
+func (o *oneofDescriptor) IsPlaceholder() bool {
+ return false
+}
+
+func (o *oneofDescriptor) Options() protoreflect.ProtoMessage {
+ return o.proto.Options
+}
+
+func (o *oneofDescriptor) IsSynthetic() bool {
+ for _, fld := range o.parent.proto.GetField() {
+ if fld.OneofIndex != nil && int(fld.GetOneofIndex()) == o.index {
+ return fld.GetProto3Optional()
+ }
+ }
+ return false // NB: we should never get here
+}
+
+func (o *oneofDescriptor) Fields() protoreflect.FieldDescriptors {
+ return &o.fields
+}
+
+type svcDescriptors struct {
+ protoreflect.ServiceDescriptors
+ svcs []svcDescriptor
+}
+
+func (r *result) createServices(prefix string, svcProtos []*descriptorpb.ServiceDescriptorProto, pool *allocPool) svcDescriptors {
+ svcs := pool.getServices(len(svcProtos))
+ for i, svcProto := range svcProtos {
+ r.createServiceDescriptor(&svcs[i], svcProto, i, prefix+svcProto.GetName(), pool)
+ }
+ return svcDescriptors{svcs: svcs}
+}
+
+func (s *svcDescriptors) Len() int {
+ return len(s.svcs)
+}
+
+func (s *svcDescriptors) Get(i int) protoreflect.ServiceDescriptor {
+ return &s.svcs[i]
+}
+
+func (s *svcDescriptors) ByName(n protoreflect.Name) protoreflect.ServiceDescriptor {
+ for i := range s.svcs {
+ svc := &s.svcs[i]
+ if svc.Name() == n {
+ return svc
+ }
+ }
+ return nil
+}
+
+type svcDescriptor struct {
+ protoreflect.ServiceDescriptor
+ file *result
+ index int
+ proto *descriptorpb.ServiceDescriptorProto
+ fqn string
+
+ methods mtdDescriptors
+}
+
+var _ protoreflect.ServiceDescriptor = (*svcDescriptor)(nil)
+var _ protoutil.DescriptorProtoWrapper = (*svcDescriptor)(nil)
+
+func (r *result) createServiceDescriptor(ret *svcDescriptor, sd *descriptorpb.ServiceDescriptorProto, index int, fqn string, pool *allocPool) {
+ r.descriptors[fqn] = ret
+ ret.ServiceDescriptor = noOpService
+ ret.file = r
+ ret.index = index
+ ret.proto = sd
+ ret.fqn = fqn
+
+ prefix := fqn + "."
+ ret.methods = r.createMethods(prefix, ret, sd.Method, pool)
+}
+
+func (s *svcDescriptor) ServiceDescriptorProto() *descriptorpb.ServiceDescriptorProto {
+ return s.proto
+}
+
+func (s *svcDescriptor) AsProto() proto.Message {
+ return s.proto
+}
+
+func (s *svcDescriptor) ParentFile() protoreflect.FileDescriptor {
+ return s.file
+}
+
+func (s *svcDescriptor) Parent() protoreflect.Descriptor {
+ return s.file
+}
+
+func (s *svcDescriptor) Index() int {
+ return s.index
+}
+
+func (s *svcDescriptor) Syntax() protoreflect.Syntax {
+ return s.file.Syntax()
+}
+
+func (s *svcDescriptor) Name() protoreflect.Name {
+ return protoreflect.Name(s.proto.GetName())
+}
+
+func (s *svcDescriptor) FullName() protoreflect.FullName {
+ return protoreflect.FullName(s.fqn)
+}
+
+func (s *svcDescriptor) IsPlaceholder() bool {
+ return false
+}
+
+func (s *svcDescriptor) Options() protoreflect.ProtoMessage {
+ return s.proto.Options
+}
+
+func (s *svcDescriptor) Methods() protoreflect.MethodDescriptors {
+ return &s.methods
+}
+
+type mtdDescriptors struct {
+ protoreflect.MethodDescriptors
+ mtds []mtdDescriptor
+}
+
+func (r *result) createMethods(prefix string, parent *svcDescriptor, mtdProtos []*descriptorpb.MethodDescriptorProto, pool *allocPool) mtdDescriptors {
+ mtds := pool.getMethods(len(mtdProtos))
+ for i, mtdProto := range mtdProtos {
+ r.createMethodDescriptor(&mtds[i], mtdProto, parent, i, prefix+mtdProto.GetName())
+ }
+ return mtdDescriptors{mtds: mtds}
+}
+
+func (m *mtdDescriptors) Len() int {
+ return len(m.mtds)
+}
+
+func (m *mtdDescriptors) Get(i int) protoreflect.MethodDescriptor {
+ return &m.mtds[i]
+}
+
+func (m *mtdDescriptors) ByName(n protoreflect.Name) protoreflect.MethodDescriptor {
+ for i := range m.mtds {
+ mtd := &m.mtds[i]
+ if mtd.Name() == n {
+ return mtd
+ }
+ }
+ return nil
+}
+
+type mtdDescriptor struct {
+ protoreflect.MethodDescriptor
+ file *result
+ parent *svcDescriptor
+ index int
+ proto *descriptorpb.MethodDescriptorProto
+ fqn string
+
+ inputType, outputType protoreflect.MessageDescriptor
+}
+
+var _ protoreflect.MethodDescriptor = (*mtdDescriptor)(nil)
+var _ protoutil.DescriptorProtoWrapper = (*mtdDescriptor)(nil)
+
+func (r *result) createMethodDescriptor(ret *mtdDescriptor, mtd *descriptorpb.MethodDescriptorProto, parent *svcDescriptor, index int, fqn string) {
+ r.descriptors[fqn] = ret
+ ret.MethodDescriptor = noOpMethod
+ ret.file = r
+ ret.parent = parent
+ ret.index = index
+ ret.proto = mtd
+ ret.fqn = fqn
+}
+
+func (m *mtdDescriptor) MethodDescriptorProto() *descriptorpb.MethodDescriptorProto {
+ return m.proto
+}
+
+func (m *mtdDescriptor) AsProto() proto.Message {
+ return m.proto
+}
+
+func (m *mtdDescriptor) ParentFile() protoreflect.FileDescriptor {
+ return m.file
+}
+
+func (m *mtdDescriptor) Parent() protoreflect.Descriptor {
+ return m.parent
+}
+
+func (m *mtdDescriptor) Index() int {
+ return m.index
+}
+
+func (m *mtdDescriptor) Syntax() protoreflect.Syntax {
+ return m.file.Syntax()
+}
+
+func (m *mtdDescriptor) Name() protoreflect.Name {
+ return protoreflect.Name(m.proto.GetName())
+}
+
+func (m *mtdDescriptor) FullName() protoreflect.FullName {
+ return protoreflect.FullName(m.fqn)
+}
+
+func (m *mtdDescriptor) IsPlaceholder() bool {
+ return false
+}
+
+func (m *mtdDescriptor) Options() protoreflect.ProtoMessage {
+ return m.proto.Options
+}
+
+func (m *mtdDescriptor) Input() protoreflect.MessageDescriptor {
+ return m.inputType
+}
+
+func (m *mtdDescriptor) Output() protoreflect.MessageDescriptor {
+ return m.outputType
+}
+
+func (m *mtdDescriptor) IsStreamingClient() bool {
+ return m.proto.GetClientStreaming()
+}
+
+func (m *mtdDescriptor) IsStreamingServer() bool {
+ return m.proto.GetServerStreaming()
+}
+
+func (r *result) FindImportByPath(path string) File {
+ return r.deps.FindFileByPath(path)
+}
+
+func (r *result) FindExtensionByNumber(msg protoreflect.FullName, tag protoreflect.FieldNumber) protoreflect.ExtensionTypeDescriptor {
+ return findExtension(r, msg, tag)
+}
+
+func (r *result) FindDescriptorByName(name protoreflect.FullName) protoreflect.Descriptor {
+ fqn := strings.TrimPrefix(string(name), ".")
+ return r.descriptors[fqn]
+}
+
+func (r *result) hasSource() bool {
+ n := r.FileNode()
+ _, ok := n.(*ast.FileNode)
+ return ok
+}
+
+// resolveFeature resolves a feature for the given descriptor. If the given element
+// is in a proto2 or proto3 syntax file, this skips resolution and just returns the
+// relevant default (since such files are not allowed to override features).
+//
+// If neither the given element nor any of its ancestors override the given feature,
+// the relevant default is returned.
+func resolveFeature(element protoreflect.Descriptor, feature protoreflect.FieldDescriptor) protoreflect.Value {
+ edition := editions.GetEdition(element)
+ if edition == descriptorpb.Edition_EDITION_PROTO2 || edition == descriptorpb.Edition_EDITION_PROTO3 {
+ // these syntax levels can't specify features, so we can short-circuit the search
+ // through the descriptor hierarchy for feature overrides
+ defaults := editions.GetEditionDefaults(edition)
+ return defaults.ProtoReflect().Get(feature) // returns default value if field is not present
+ }
+ val, err := editions.ResolveFeature(element, feature)
+ if err == nil && val.IsValid() {
+ return val
+ }
+ defaults := editions.GetEditionDefaults(edition)
+ return defaults.ProtoReflect().Get(feature)
+}
+
+func isJSONCompliant(d protoreflect.Descriptor) bool {
+ jsonFormat := resolveFeature(d, jsonFormatField)
+ return descriptorpb.FeatureSet_JsonFormat(jsonFormat.Enum()) == descriptorpb.FeatureSet_ALLOW
+}
diff --git a/vendor/github.com/bufbuild/protocompile/linker/doc.go b/vendor/github.com/bufbuild/protocompile/linker/doc.go
new file mode 100644
index 0000000..93e68d9
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/linker/doc.go
@@ -0,0 +1,48 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package linker contains logic and APIs related to linking a protobuf file.
+// The process of linking involves resolving all symbol references to the
+// referenced descriptor. The result of linking is a "rich" descriptor that
+// is more useful than just a descriptor proto since the links allow easy
+// traversal of a protobuf type schema and the relationships between elements.
+//
+// # Files
+//
+// This package uses an augmentation to protoreflect.FileDescriptor instances
+// in the form of the File interface. There are also factory functions for
+// promoting a FileDescriptor into a linker.File. This new interface provides
+// additional methods for resolving symbols in the file.
+//
+// This interface is both the result of linking but also an input to the linking
+// process, as all dependencies of a file to be linked must be provided in this
+// form. The actual result of the Link function, a Result, is an even broader
+// interface than File: The linker.Result interface provides even more functions,
+// which are needed for subsequent compilation steps: interpreting options and
+// generating source code info.
+//
+// # Symbols
+//
+// This package has a type named Symbols which represents a symbol table. This
+// is usually an internal detail when linking, but callers can provide an
+// instance so that symbols across multiple compile/link operations all have
+// access to the same table. This allows for detection of cases where multiple
+// files try to declare elements with conflicting fully-qualified names or
+// declare extensions for a particular extendable message that have conflicting
+// tag numbers.
+//
+// The calling code simply uses the same Symbols instance across all compile
+// operations and if any files processed have such conflicts, they can be
+// reported.
+package linker
diff --git a/vendor/github.com/bufbuild/protocompile/linker/files.go b/vendor/github.com/bufbuild/protocompile/linker/files.go
new file mode 100644
index 0000000..51ce3a8
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/linker/files.go
@@ -0,0 +1,366 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package linker
+
+import (
+ "fmt"
+ "strings"
+
+ "google.golang.org/protobuf/reflect/protodesc"
+ "google.golang.org/protobuf/reflect/protoreflect"
+ "google.golang.org/protobuf/reflect/protoregistry"
+ "google.golang.org/protobuf/types/dynamicpb"
+
+ "github.com/bufbuild/protocompile/walk"
+)
+
+// File is like a super-powered protoreflect.FileDescriptor. It includes helpful
+// methods for looking up elements in the descriptor and can be used to create a
+// resolver for the entire transitive closure of the file's dependencies. (See
+// ResolverFromFile.)
+type File interface {
+ protoreflect.FileDescriptor
+ // FindDescriptorByName returns the given named element that is defined in
+ // this file. If no such element exists, nil is returned.
+ FindDescriptorByName(name protoreflect.FullName) protoreflect.Descriptor
+ // FindImportByPath returns the File corresponding to the given import path.
+ // If this file does not import the given path, nil is returned.
+ FindImportByPath(path string) File
+ // FindExtensionByNumber returns the extension descriptor for the given tag
+ // that extends the given message name. If no such extension is defined in this
+ // file, nil is returned.
+ FindExtensionByNumber(message protoreflect.FullName, tag protoreflect.FieldNumber) protoreflect.ExtensionTypeDescriptor
+}
+
+// NewFile converts a protoreflect.FileDescriptor to a File. The given deps must
+// contain all dependencies/imports of f. Also see NewFileRecursive.
+func NewFile(f protoreflect.FileDescriptor, deps Files) (File, error) {
+ if asFile, ok := f.(File); ok {
+ return asFile, nil
+ }
+ checkedDeps := make(Files, f.Imports().Len())
+ for i := 0; i < f.Imports().Len(); i++ {
+ imprt := f.Imports().Get(i)
+ dep := deps.FindFileByPath(imprt.Path())
+ if dep == nil {
+ return nil, fmt.Errorf("cannot create File for %q: missing dependency for %q", f.Path(), imprt.Path())
+ }
+ checkedDeps[i] = dep
+ }
+ return newFile(f, checkedDeps)
+}
+
+func newFile(f protoreflect.FileDescriptor, deps Files) (File, error) {
+ descs := map[protoreflect.FullName]protoreflect.Descriptor{}
+ err := walk.Descriptors(f, func(d protoreflect.Descriptor) error {
+ if _, ok := descs[d.FullName()]; ok {
+ return fmt.Errorf("file %q contains multiple elements with the name %s", f.Path(), d.FullName())
+ }
+ descs[d.FullName()] = d
+ return nil
+ })
+ if err != nil {
+ return nil, err
+ }
+ return &file{
+ FileDescriptor: f,
+ descs: descs,
+ deps: deps,
+ }, nil
+}
+
+// NewFileRecursive recursively converts a protoreflect.FileDescriptor to a File.
+// If f has any dependencies/imports, they are converted, too, including any and
+// all transitive dependencies.
+//
+// If f already implements File, it is returned unchanged.
+func NewFileRecursive(f protoreflect.FileDescriptor) (File, error) {
+ if asFile, ok := f.(File); ok {
+ return asFile, nil
+ }
+ return newFileRecursive(f, map[protoreflect.FileDescriptor]File{})
+}
+
+func newFileRecursive(fd protoreflect.FileDescriptor, seen map[protoreflect.FileDescriptor]File) (File, error) {
+ if res, ok := seen[fd]; ok {
+ if res == nil {
+ return nil, fmt.Errorf("import cycle encountered: file %s transitively imports itself", fd.Path())
+ }
+ return res, nil
+ }
+
+ if f, ok := fd.(File); ok {
+ seen[fd] = f
+ return f, nil
+ }
+
+ seen[fd] = nil
+ deps := make([]File, fd.Imports().Len())
+ for i := 0; i < fd.Imports().Len(); i++ {
+ imprt := fd.Imports().Get(i)
+ dep, err := newFileRecursive(imprt, seen)
+ if err != nil {
+ return nil, err
+ }
+ deps[i] = dep
+ }
+
+ f, err := newFile(fd, deps)
+ if err != nil {
+ return nil, err
+ }
+ seen[fd] = f
+ return f, nil
+}
+
+type file struct {
+ protoreflect.FileDescriptor
+ descs map[protoreflect.FullName]protoreflect.Descriptor
+ deps Files
+}
+
+var _ File = (*file)(nil)
+
+func (f *file) FindDescriptorByName(name protoreflect.FullName) protoreflect.Descriptor {
+ return f.descs[name]
+}
+
+func (f *file) FindImportByPath(path string) File {
+ return f.deps.FindFileByPath(path)
+}
+
+func (f *file) FindExtensionByNumber(msg protoreflect.FullName, tag protoreflect.FieldNumber) protoreflect.ExtensionTypeDescriptor {
+ return findExtension(f, msg, tag)
+}
+
+func (f *file) Unwrap() protoreflect.FileDescriptor {
+ return f.FileDescriptor
+}
+
+// Files represents a set of protobuf files. It is a slice of File values, but
+// also provides a method for easily looking up files by path and name.
+type Files []File
+
+// FindFileByPath finds a file in f that has the given path and name. If f
+// contains no such file, nil is returned.
+func (f Files) FindFileByPath(path string) File {
+ for _, file := range f {
+ if file.Path() == path {
+ return file
+ }
+ }
+ return nil
+}
+
+// AsResolver returns a Resolver that uses f as the source of descriptors. If
+// a given query cannot be answered with the files in f, the query will fail
+// with a protoregistry.NotFound error. The implementation just delegates calls
+// to each file until a result is found.
+//
+// Also see ResolverFromFile.
+func (f Files) AsResolver() Resolver {
+ return filesResolver(f)
+}
+
+// Resolver is an interface that can resolve various kinds of queries about
+// descriptors. It satisfies the resolver interfaces defined in protodesc
+// and protoregistry packages.
+type Resolver interface {
+ protodesc.Resolver
+ protoregistry.MessageTypeResolver
+ protoregistry.ExtensionTypeResolver
+}
+
+// ResolverFromFile returns a Resolver that can resolve any element that is
+// visible to the given file. It will search the given file, its imports, and
+// any transitive public imports.
+//
+// Note that this function does not compute any additional indexes for efficient
+// search, so queries generally take linear time, O(n) where n is the number of
+// files whose elements are visible to the given file. Queries for an extension
+// by number have runtime complexity that is linear with the number of messages
+// and extensions defined across those files.
+func ResolverFromFile(f File) Resolver {
+ return fileResolver{f: f}
+}
+
+type fileResolver struct {
+ f File
+}
+
+func (r fileResolver) FindFileByPath(path string) (protoreflect.FileDescriptor, error) {
+ return resolveInFile(r.f, false, nil, func(f File) (protoreflect.FileDescriptor, error) {
+ if f.Path() == path {
+ return f, nil
+ }
+ return nil, protoregistry.NotFound
+ })
+}
+
+func (r fileResolver) FindDescriptorByName(name protoreflect.FullName) (protoreflect.Descriptor, error) {
+ return resolveInFile(r.f, false, nil, func(f File) (protoreflect.Descriptor, error) {
+ if d := f.FindDescriptorByName(name); d != nil {
+ return d, nil
+ }
+ return nil, protoregistry.NotFound
+ })
+}
+
+func (r fileResolver) FindMessageByName(message protoreflect.FullName) (protoreflect.MessageType, error) {
+ return resolveInFile(r.f, false, nil, func(f File) (protoreflect.MessageType, error) {
+ d := f.FindDescriptorByName(message)
+ if d != nil {
+ md, ok := d.(protoreflect.MessageDescriptor)
+ if !ok {
+ return nil, fmt.Errorf("%q is %s, not a message", message, descriptorTypeWithArticle(d))
+ }
+ return dynamicpb.NewMessageType(md), nil
+ }
+ return nil, protoregistry.NotFound
+ })
+}
+
+func (r fileResolver) FindMessageByURL(url string) (protoreflect.MessageType, error) {
+ fullName := messageNameFromURL(url)
+ return r.FindMessageByName(protoreflect.FullName(fullName))
+}
+
+func messageNameFromURL(url string) string {
+ lastSlash := strings.LastIndexByte(url, '/')
+ return url[lastSlash+1:]
+}
+
+func (r fileResolver) FindExtensionByName(field protoreflect.FullName) (protoreflect.ExtensionType, error) {
+ return resolveInFile(r.f, false, nil, func(f File) (protoreflect.ExtensionType, error) {
+ d := f.FindDescriptorByName(field)
+ if d != nil {
+ fld, ok := d.(protoreflect.FieldDescriptor)
+ if !ok || !fld.IsExtension() {
+ return nil, fmt.Errorf("%q is %s, not an extension", field, descriptorTypeWithArticle(d))
+ }
+ if extd, ok := fld.(protoreflect.ExtensionTypeDescriptor); ok {
+ return extd.Type(), nil
+ }
+ return dynamicpb.NewExtensionType(fld), nil
+ }
+ return nil, protoregistry.NotFound
+ })
+}
+
+func (r fileResolver) FindExtensionByNumber(message protoreflect.FullName, field protoreflect.FieldNumber) (protoreflect.ExtensionType, error) {
+ return resolveInFile(r.f, false, nil, func(f File) (protoreflect.ExtensionType, error) {
+ ext := findExtension(f, message, field)
+ if ext != nil {
+ return ext.Type(), nil
+ }
+ return nil, protoregistry.NotFound
+ })
+}
+
+type filesResolver []File
+
+func (r filesResolver) FindFileByPath(path string) (protoreflect.FileDescriptor, error) {
+ for _, f := range r {
+ if f.Path() == path {
+ return f, nil
+ }
+ }
+ return nil, protoregistry.NotFound
+}
+
+func (r filesResolver) FindDescriptorByName(name protoreflect.FullName) (protoreflect.Descriptor, error) {
+ for _, f := range r {
+ result := f.FindDescriptorByName(name)
+ if result != nil {
+ return result, nil
+ }
+ }
+ return nil, protoregistry.NotFound
+}
+
+func (r filesResolver) FindMessageByName(message protoreflect.FullName) (protoreflect.MessageType, error) {
+ for _, f := range r {
+ d := f.FindDescriptorByName(message)
+ if d != nil {
+ if md, ok := d.(protoreflect.MessageDescriptor); ok {
+ return dynamicpb.NewMessageType(md), nil
+ }
+ return nil, protoregistry.NotFound
+ }
+ }
+ return nil, protoregistry.NotFound
+}
+
+func (r filesResolver) FindMessageByURL(url string) (protoreflect.MessageType, error) {
+ name := messageNameFromURL(url)
+ return r.FindMessageByName(protoreflect.FullName(name))
+}
+
+func (r filesResolver) FindExtensionByName(field protoreflect.FullName) (protoreflect.ExtensionType, error) {
+ for _, f := range r {
+ d := f.FindDescriptorByName(field)
+ if d != nil {
+ if extd, ok := d.(protoreflect.ExtensionTypeDescriptor); ok {
+ return extd.Type(), nil
+ }
+ if fld, ok := d.(protoreflect.FieldDescriptor); ok && fld.IsExtension() {
+ return dynamicpb.NewExtensionType(fld), nil
+ }
+ return nil, protoregistry.NotFound
+ }
+ }
+ return nil, protoregistry.NotFound
+}
+
+func (r filesResolver) FindExtensionByNumber(message protoreflect.FullName, field protoreflect.FieldNumber) (protoreflect.ExtensionType, error) {
+ for _, f := range r {
+ ext := findExtension(f, message, field)
+ if ext != nil {
+ return ext.Type(), nil
+ }
+ }
+ return nil, protoregistry.NotFound
+}
+
+type hasExtensionsAndMessages interface {
+ Messages() protoreflect.MessageDescriptors
+ Extensions() protoreflect.ExtensionDescriptors
+}
+
+func findExtension(d hasExtensionsAndMessages, message protoreflect.FullName, field protoreflect.FieldNumber) protoreflect.ExtensionTypeDescriptor {
+ for i := 0; i < d.Extensions().Len(); i++ {
+ if extType := isExtensionMatch(d.Extensions().Get(i), message, field); extType != nil {
+ return extType
+ }
+ }
+
+ for i := 0; i < d.Messages().Len(); i++ {
+ if extType := findExtension(d.Messages().Get(i), message, field); extType != nil {
+ return extType
+ }
+ }
+
+ return nil // could not be found
+}
+
+func isExtensionMatch(ext protoreflect.ExtensionDescriptor, message protoreflect.FullName, field protoreflect.FieldNumber) protoreflect.ExtensionTypeDescriptor {
+ if ext.Number() != field || ext.ContainingMessage().FullName() != message {
+ return nil
+ }
+ if extType, ok := ext.(protoreflect.ExtensionTypeDescriptor); ok {
+ return extType
+ }
+ return dynamicpb.NewExtensionType(ext).TypeDescriptor()
+}
diff --git a/vendor/github.com/bufbuild/protocompile/linker/linker.go b/vendor/github.com/bufbuild/protocompile/linker/linker.go
new file mode 100644
index 0000000..6d87883
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/linker/linker.go
@@ -0,0 +1,153 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package linker
+
+import (
+ "fmt"
+
+ "google.golang.org/protobuf/reflect/protoreflect"
+
+ "github.com/bufbuild/protocompile/ast"
+ "github.com/bufbuild/protocompile/parser"
+ "github.com/bufbuild/protocompile/reporter"
+)
+
+// Link handles linking a parsed descriptor proto into a fully-linked descriptor.
+// If the given parser.Result has imports, they must all be present in the given
+// dependencies.
+//
+// The symbols value is optional and may be nil. If it is not nil, it must be the
+// same instance used to create and link all of the given result's dependencies
+// (or otherwise already have all dependencies imported). Otherwise, linking may
+// fail with spurious errors resolving symbols.
+//
+// The handler value is used to report any link errors. If any such errors are
+// reported, this function returns a non-nil error. The Result value returned
+// also implements protoreflect.FileDescriptor.
+//
+// Note that linking does NOT interpret options. So options messages in the
+// returned value have all values stored in UninterpretedOptions fields.
+func Link(parsed parser.Result, dependencies Files, symbols *Symbols, handler *reporter.Handler) (Result, error) {
+ if symbols == nil {
+ symbols = &Symbols{}
+ }
+ prefix := parsed.FileDescriptorProto().GetPackage()
+ if prefix != "" {
+ prefix += "."
+ }
+
+ for _, imp := range parsed.FileDescriptorProto().Dependency {
+ dep := dependencies.FindFileByPath(imp)
+ if dep == nil {
+ return nil, fmt.Errorf("dependencies is missing import %q", imp)
+ }
+ if err := symbols.Import(dep, handler); err != nil {
+ return nil, err
+ }
+ }
+
+ r := &result{
+ FileDescriptor: noOpFile,
+ Result: parsed,
+ deps: dependencies,
+ descriptors: map[string]protoreflect.Descriptor{},
+ usedImports: map[string]struct{}{},
+ prefix: prefix,
+ optionQualifiedNames: map[ast.IdentValueNode]string{},
+ }
+ // First, we create the hierarchy of descendant descriptors.
+ r.createDescendants()
+
+ // Then we can put all symbols into a single pool, which lets us ensure there
+ // are no duplicate symbols and will also let us resolve and revise all type
+ // references in next step.
+ if err := symbols.importResult(r, handler); err != nil {
+ return nil, err
+ }
+
+ // After we've populated the pool, we can now try to resolve all type
+ // references. All references must be checked for correct type, any fields
+ // with enum types must be corrected (since we parse them as if they are
+ // message references since we don't actually know message or enum until
+ // link time), and references will be re-written to be fully-qualified
+ // references (e.g. start with a dot ".").
+ if err := r.resolveReferences(handler, symbols); err != nil {
+ return nil, err
+ }
+
+ return r, handler.Error()
+}
+
+// Result is the result of linking. This is a protoreflect.FileDescriptor, but
+// with some additional methods for exposing additional information, such as the
+// for accessing the input AST or file descriptor.
+//
+// It also provides Resolve* methods, for looking up enums, messages, and
+// extensions that are available to the protobuf source file this result
+// represents. An element is "available" if it meets any of the following
+// criteria:
+// 1. The element is defined in this file itself.
+// 2. The element is defined in a file that is directly imported by this file.
+// 3. The element is "available" to a file that is directly imported by this
+// file as a public import.
+//
+// Other elements, even if in the transitive closure of this file, are not
+// available and thus won't be returned by these methods.
+type Result interface {
+ File
+ parser.Result
+
+ // ResolveMessageLiteralExtensionName returns the fully qualified name for
+ // an identifier for extension field names in message literals.
+ ResolveMessageLiteralExtensionName(ast.IdentValueNode) string
+ // ValidateOptions runs some validation checks on the descriptor that can only
+ // be done after options are interpreted. Any errors or warnings encountered
+ // will be reported via the given handler. If any error is reported, this
+ // function returns a non-nil error.
+ ValidateOptions(handler *reporter.Handler, symbols *Symbols) error
+ // CheckForUnusedImports is used to report warnings for unused imports. This
+ // should be called after options have been interpreted. Otherwise, the logic
+ // could incorrectly report imports as unused if the only symbol used were a
+ // custom option.
+ CheckForUnusedImports(handler *reporter.Handler)
+ // PopulateSourceCodeInfo is used to populate source code info for the file
+ // descriptor. This step requires that the underlying descriptor proto have
+ // its `source_code_info` field populated. This is typically a post-process
+ // step separate from linking, because computing source code info requires
+ // interpreting options (which is done after linking).
+ PopulateSourceCodeInfo()
+
+ // RemoveAST drops the AST information from this result.
+ RemoveAST()
+}
+
+// ErrorUnusedImport may be passed to a warning reporter when an unused
+// import is detected. The error the reporter receives will be wrapped
+// with source position that indicates the file and line where the import
+// statement appeared.
+type ErrorUnusedImport interface {
+ error
+ UnusedImport() string
+}
+
+type errUnusedImport string
+
+func (e errUnusedImport) Error() string {
+ return fmt.Sprintf("import %q not used", string(e))
+}
+
+func (e errUnusedImport) UnusedImport() string {
+ return string(e)
+}
diff --git a/vendor/github.com/bufbuild/protocompile/linker/pathkey_no_unsafe.go b/vendor/github.com/bufbuild/protocompile/linker/pathkey_no_unsafe.go
new file mode 100644
index 0000000..e00debc
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/linker/pathkey_no_unsafe.go
@@ -0,0 +1,35 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build appengine || gopherjs || purego
+// +build appengine gopherjs purego
+
+// NB: other environments where unsafe is inappropriate should use "purego" build tag
+// https://github.com/golang/go/issues/23172
+
+package linker
+
+import (
+ "reflect"
+
+ "google.golang.org/protobuf/reflect/protoreflect"
+)
+
+func pathKey(p protoreflect.SourcePath) interface{} {
+ rv := reflect.ValueOf(p)
+ arrayType := reflect.ArrayOf(rv.Len(), rv.Type().Elem())
+ array := reflect.New(arrayType).Elem()
+ reflect.Copy(array, rv)
+ return array.Interface()
+}
diff --git a/vendor/github.com/bufbuild/protocompile/linker/pathkey_unsafe.go b/vendor/github.com/bufbuild/protocompile/linker/pathkey_unsafe.go
new file mode 100644
index 0000000..aa33e74
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/linker/pathkey_unsafe.go
@@ -0,0 +1,40 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !appengine && !gopherjs && !purego
+// +build !appengine,!gopherjs,!purego
+
+// NB: other environments where unsafe is inappropriate should use "purego" build tag
+// https://github.com/golang/go/issues/23172
+
+package linker
+
+import (
+ "reflect"
+ "unsafe"
+
+ "google.golang.org/protobuf/reflect/protoreflect"
+)
+
+var pathElementType = reflect.TypeOf(protoreflect.SourcePath{}).Elem()
+
+func pathKey(p protoreflect.SourcePath) interface{} {
+ if p == nil {
+ // Reflection code below doesn't work with nil slices
+ return [0]int32{}
+ }
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(reflect.ValueOf(&p).Pointer()))
+ array := reflect.NewAt(reflect.ArrayOf(hdr.Len, pathElementType), unsafe.Pointer(hdr.Data))
+ return array.Elem().Interface()
+}
diff --git a/vendor/github.com/bufbuild/protocompile/linker/pool.go b/vendor/github.com/bufbuild/protocompile/linker/pool.go
new file mode 100644
index 0000000..3609edc
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/linker/pool.go
@@ -0,0 +1,131 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package linker
+
+import "google.golang.org/protobuf/types/descriptorpb"
+
+// allocPool helps allocate descriptor instances. Instead of allocating
+// them one at a time, we allocate a pool -- a large, flat slice to hold
+// all descriptors of a particular kind for a file. We then use capacity
+// in the pool when we need space for individual descriptors.
+type allocPool struct {
+ numMessages int
+ numFields int
+ numOneofs int
+ numEnums int
+ numEnumValues int
+ numExtensions int
+ numServices int
+ numMethods int
+
+ messages []msgDescriptor
+ fields []fldDescriptor
+ oneofs []oneofDescriptor
+ enums []enumDescriptor
+ enumVals []enValDescriptor
+ extensions []extTypeDescriptor
+ services []svcDescriptor
+ methods []mtdDescriptor
+}
+
+func newAllocPool(file *descriptorpb.FileDescriptorProto) *allocPool {
+ var pool allocPool
+ pool.countElements(file)
+ pool.messages = make([]msgDescriptor, pool.numMessages)
+ pool.fields = make([]fldDescriptor, pool.numFields)
+ pool.oneofs = make([]oneofDescriptor, pool.numOneofs)
+ pool.enums = make([]enumDescriptor, pool.numEnums)
+ pool.enumVals = make([]enValDescriptor, pool.numEnumValues)
+ pool.extensions = make([]extTypeDescriptor, pool.numExtensions)
+ pool.services = make([]svcDescriptor, pool.numServices)
+ pool.methods = make([]mtdDescriptor, pool.numMethods)
+ return &pool
+}
+
+func (p *allocPool) getMessages(count int) []msgDescriptor {
+ allocated := p.messages[:count]
+ p.messages = p.messages[count:]
+ return allocated
+}
+
+func (p *allocPool) getFields(count int) []fldDescriptor {
+ allocated := p.fields[:count]
+ p.fields = p.fields[count:]
+ return allocated
+}
+
+func (p *allocPool) getOneofs(count int) []oneofDescriptor {
+ allocated := p.oneofs[:count]
+ p.oneofs = p.oneofs[count:]
+ return allocated
+}
+
+func (p *allocPool) getEnums(count int) []enumDescriptor {
+ allocated := p.enums[:count]
+ p.enums = p.enums[count:]
+ return allocated
+}
+
+func (p *allocPool) getEnumValues(count int) []enValDescriptor {
+ allocated := p.enumVals[:count]
+ p.enumVals = p.enumVals[count:]
+ return allocated
+}
+
+func (p *allocPool) getExtensions(count int) []extTypeDescriptor {
+ allocated := p.extensions[:count]
+ p.extensions = p.extensions[count:]
+ return allocated
+}
+
+func (p *allocPool) getServices(count int) []svcDescriptor {
+ allocated := p.services[:count]
+ p.services = p.services[count:]
+ return allocated
+}
+
+func (p *allocPool) getMethods(count int) []mtdDescriptor {
+ allocated := p.methods[:count]
+ p.methods = p.methods[count:]
+ return allocated
+}
+
+func (p *allocPool) countElements(file *descriptorpb.FileDescriptorProto) {
+ p.countElementsInMessages(file.MessageType)
+ p.countElementsInEnums(file.EnumType)
+ p.numExtensions += len(file.Extension)
+ p.numServices += len(file.Service)
+ for _, svc := range file.Service {
+ p.numMethods += len(svc.Method)
+ }
+}
+
+func (p *allocPool) countElementsInMessages(msgs []*descriptorpb.DescriptorProto) {
+ p.numMessages += len(msgs)
+ for _, msg := range msgs {
+ p.numFields += len(msg.Field)
+ p.numOneofs += len(msg.OneofDecl)
+ p.countElementsInMessages(msg.NestedType)
+ p.countElementsInEnums(msg.EnumType)
+ p.numExtensions += len(msg.Extension)
+ }
+}
+
+func (p *allocPool) countElementsInEnums(enums []*descriptorpb.EnumDescriptorProto) {
+ p.numEnums += len(enums)
+ for _, enum := range enums {
+ p.numEnumValues += len(enum.Value)
+ }
+}
diff --git a/vendor/github.com/bufbuild/protocompile/linker/resolve.go b/vendor/github.com/bufbuild/protocompile/linker/resolve.go
new file mode 100644
index 0000000..cf30148
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/linker/resolve.go
@@ -0,0 +1,835 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package linker
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+
+ "google.golang.org/protobuf/proto"
+ "google.golang.org/protobuf/reflect/protoreflect"
+ "google.golang.org/protobuf/reflect/protoregistry"
+ "google.golang.org/protobuf/types/descriptorpb"
+
+ "github.com/bufbuild/protocompile/ast"
+ "github.com/bufbuild/protocompile/internal"
+ "github.com/bufbuild/protocompile/reporter"
+ "github.com/bufbuild/protocompile/walk"
+)
+
+func (r *result) ResolveMessageLiteralExtensionName(node ast.IdentValueNode) string {
+ return r.optionQualifiedNames[node]
+}
+
+func (r *result) resolveElement(name protoreflect.FullName, checkedCache []string) protoreflect.Descriptor {
+ if len(name) > 0 && name[0] == '.' {
+ name = name[1:]
+ }
+ res, _ := resolveInFile(r, false, checkedCache[:0], func(f File) (protoreflect.Descriptor, error) {
+ d := resolveElementInFile(name, f)
+ if d != nil {
+ return d, nil
+ }
+ return nil, protoregistry.NotFound
+ })
+ return res
+}
+
+func resolveInFile[T any](f File, publicImportsOnly bool, checked []string, fn func(File) (T, error)) (T, error) {
+ var zero T
+ path := f.Path()
+ for _, str := range checked {
+ if str == path {
+ // already checked
+ return zero, protoregistry.NotFound
+ }
+ }
+ checked = append(checked, path)
+
+ res, err := fn(f)
+ if err == nil {
+ // found it
+ return res, nil
+ }
+ if !errors.Is(err, protoregistry.NotFound) {
+ return zero, err
+ }
+
+ imports := f.Imports()
+ for i, l := 0, imports.Len(); i < l; i++ {
+ imp := imports.Get(i)
+ if publicImportsOnly && !imp.IsPublic {
+ continue
+ }
+ res, err := resolveInFile(f.FindImportByPath(imp.Path()), true, checked, fn)
+ if errors.Is(err, protoregistry.NotFound) {
+ continue
+ }
+ if err != nil {
+ return zero, err
+ }
+ if !imp.IsPublic {
+ if r, ok := f.(*result); ok {
+ r.markUsed(imp.Path())
+ }
+ }
+ return res, nil
+ }
+ return zero, err
+}
+
+func (r *result) markUsed(importPath string) {
+ r.usedImports[importPath] = struct{}{}
+}
+
+func (r *result) CheckForUnusedImports(handler *reporter.Handler) {
+ fd := r.FileDescriptorProto()
+ file, _ := r.FileNode().(*ast.FileNode)
+ for i, dep := range fd.Dependency {
+ if _, ok := r.usedImports[dep]; !ok {
+ isPublic := false
+ // it's fine if it's a public import
+ for _, j := range fd.PublicDependency {
+ if i == int(j) {
+ isPublic = true
+ break
+ }
+ }
+ if isPublic {
+ continue
+ }
+ span := ast.UnknownSpan(fd.GetName())
+ if file != nil {
+ for _, decl := range file.Decls {
+ imp, ok := decl.(*ast.ImportNode)
+ if ok && imp.Name.AsString() == dep {
+ span = file.NodeInfo(imp)
+ }
+ }
+ }
+ handler.HandleWarningWithPos(span, errUnusedImport(dep))
+ }
+ }
+}
+
+func descriptorTypeWithArticle(d protoreflect.Descriptor) string {
+ switch d := d.(type) {
+ case protoreflect.MessageDescriptor:
+ return "a message"
+ case protoreflect.FieldDescriptor:
+ if d.IsExtension() {
+ return "an extension"
+ }
+ return "a field"
+ case protoreflect.OneofDescriptor:
+ return "a oneof"
+ case protoreflect.EnumDescriptor:
+ return "an enum"
+ case protoreflect.EnumValueDescriptor:
+ return "an enum value"
+ case protoreflect.ServiceDescriptor:
+ return "a service"
+ case protoreflect.MethodDescriptor:
+ return "a method"
+ case protoreflect.FileDescriptor:
+ return "a file"
+ default:
+ // shouldn't be possible
+ return fmt.Sprintf("a %T", d)
+ }
+}
+
+func (r *result) createDescendants() {
+ fd := r.FileDescriptorProto()
+ pool := newAllocPool(fd)
+ prefix := ""
+ if fd.GetPackage() != "" {
+ prefix = fd.GetPackage() + "."
+ }
+ r.imports = r.createImports()
+ r.messages = r.createMessages(prefix, r, fd.MessageType, pool)
+ r.enums = r.createEnums(prefix, r, fd.EnumType, pool)
+ r.extensions = r.createExtensions(prefix, r, fd.Extension, pool)
+ r.services = r.createServices(prefix, fd.Service, pool)
+}
+
+func (r *result) resolveReferences(handler *reporter.Handler, s *Symbols) error {
+ fd := r.FileDescriptorProto()
+ checkedCache := make([]string, 0, 16)
+ scopes := []scope{fileScope(r, checkedCache)}
+ if fd.Options != nil {
+ if err := r.resolveOptions(handler, "file", protoreflect.FullName(fd.GetName()), fd.Options.UninterpretedOption, scopes, checkedCache); err != nil {
+ return err
+ }
+ }
+
+ // This is to de-dupe extendee-releated error messages when the same
+ // extendee is referenced from multiple extension field definitions.
+ // We leave it nil if there's no AST.
+ var extendeeNodes map[ast.Node]struct{}
+
+ return walk.DescriptorsEnterAndExit(r,
+ func(d protoreflect.Descriptor) error {
+ fqn := d.FullName()
+ switch d := d.(type) {
+ case *msgDescriptor:
+ // Strangely, when protoc resolves extension names, it uses the *enclosing* scope
+ // instead of the message's scope. So if the message contains an extension named "i",
+ // an option cannot refer to it as simply "i" but must qualify it (at a minimum "Msg.i").
+ // So we don't add this messages scope to our scopes slice until *after* we do options.
+ if d.proto.Options != nil {
+ if err := r.resolveOptions(handler, "message", fqn, d.proto.Options.UninterpretedOption, scopes, checkedCache); err != nil {
+ return err
+ }
+ }
+ scopes = append(scopes, messageScope(r, fqn)) // push new scope on entry
+ // walk only visits descriptors, so we need to loop over extension ranges ourselves
+ for _, er := range d.proto.ExtensionRange {
+ if er.Options != nil {
+ erName := protoreflect.FullName(fmt.Sprintf("%s:%d-%d", fqn, er.GetStart(), er.GetEnd()-1))
+ if err := r.resolveOptions(handler, "extension range", erName, er.Options.UninterpretedOption, scopes, checkedCache); err != nil {
+ return err
+ }
+ }
+ }
+ case *extTypeDescriptor:
+ if d.field.proto.Options != nil {
+ if err := r.resolveOptions(handler, "extension", fqn, d.field.proto.Options.UninterpretedOption, scopes, checkedCache); err != nil {
+ return err
+ }
+ }
+ if extendeeNodes == nil && r.AST() != nil {
+ extendeeNodes = map[ast.Node]struct{}{}
+ }
+ if err := resolveFieldTypes(&d.field, handler, extendeeNodes, s, scopes, checkedCache); err != nil {
+ return err
+ }
+ if r.Syntax() == protoreflect.Proto3 && !allowedProto3Extendee(d.field.proto.GetExtendee()) {
+ file := r.FileNode()
+ node := r.FieldNode(d.field.proto).FieldExtendee()
+ if err := handler.HandleErrorf(file.NodeInfo(node), "extend blocks in proto3 can only be used to define custom options"); err != nil {
+ return err
+ }
+ }
+ case *fldDescriptor:
+ if d.proto.Options != nil {
+ if err := r.resolveOptions(handler, "field", fqn, d.proto.Options.UninterpretedOption, scopes, checkedCache); err != nil {
+ return err
+ }
+ }
+ if err := resolveFieldTypes(d, handler, nil, s, scopes, checkedCache); err != nil {
+ return err
+ }
+ case *oneofDescriptor:
+ if d.proto.Options != nil {
+ if err := r.resolveOptions(handler, "oneof", fqn, d.proto.Options.UninterpretedOption, scopes, checkedCache); err != nil {
+ return err
+ }
+ }
+ case *enumDescriptor:
+ if d.proto.Options != nil {
+ if err := r.resolveOptions(handler, "enum", fqn, d.proto.Options.UninterpretedOption, scopes, checkedCache); err != nil {
+ return err
+ }
+ }
+ case *enValDescriptor:
+ if d.proto.Options != nil {
+ if err := r.resolveOptions(handler, "enum value", fqn, d.proto.Options.UninterpretedOption, scopes, checkedCache); err != nil {
+ return err
+ }
+ }
+ case *svcDescriptor:
+ if d.proto.Options != nil {
+ if err := r.resolveOptions(handler, "service", fqn, d.proto.Options.UninterpretedOption, scopes, checkedCache); err != nil {
+ return err
+ }
+ }
+ // not a message, but same scoping rules for nested elements as if it were
+ scopes = append(scopes, messageScope(r, fqn)) // push new scope on entry
+ case *mtdDescriptor:
+ if d.proto.Options != nil {
+ if err := r.resolveOptions(handler, "method", fqn, d.proto.Options.UninterpretedOption, scopes, checkedCache); err != nil {
+ return err
+ }
+ }
+ if err := resolveMethodTypes(d, handler, scopes, checkedCache); err != nil {
+ return err
+ }
+ }
+ return nil
+ },
+ func(d protoreflect.Descriptor) error {
+ switch d.(type) {
+ case protoreflect.MessageDescriptor, protoreflect.ServiceDescriptor:
+ // pop message scope on exit
+ scopes = scopes[:len(scopes)-1]
+ }
+ return nil
+ })
+}
+
+var allowedProto3Extendees = map[string]struct{}{
+ ".google.protobuf.FileOptions": {},
+ ".google.protobuf.MessageOptions": {},
+ ".google.protobuf.FieldOptions": {},
+ ".google.protobuf.OneofOptions": {},
+ ".google.protobuf.ExtensionRangeOptions": {},
+ ".google.protobuf.EnumOptions": {},
+ ".google.protobuf.EnumValueOptions": {},
+ ".google.protobuf.ServiceOptions": {},
+ ".google.protobuf.MethodOptions": {},
+}
+
+func allowedProto3Extendee(n string) bool {
+ if n == "" {
+ // not an extension, allowed
+ return true
+ }
+ _, ok := allowedProto3Extendees[n]
+ return ok
+}
+
+func resolveFieldTypes(f *fldDescriptor, handler *reporter.Handler, extendees map[ast.Node]struct{}, s *Symbols, scopes []scope, checkedCache []string) error {
+ r := f.file
+ fld := f.proto
+ file := r.FileNode()
+ node := r.FieldNode(fld)
+ kind := "field"
+ if fld.GetExtendee() != "" {
+ kind = "extension"
+ var alreadyReported bool
+ if extendees != nil {
+ _, alreadyReported = extendees[node.FieldExtendee()]
+ if !alreadyReported {
+ extendees[node.FieldExtendee()] = struct{}{}
+ }
+ }
+ dsc := r.resolve(fld.GetExtendee(), false, scopes, checkedCache)
+ if dsc == nil {
+ if alreadyReported {
+ return nil
+ }
+ var extendeePrefix string
+ if extendees == nil {
+ extendeePrefix = kind + " " + f.fqn + ": "
+ }
+ return handler.HandleErrorf(file.NodeInfo(node.FieldExtendee()), "%sunknown extendee type %s", extendeePrefix, fld.GetExtendee())
+ }
+ if isSentinelDescriptor(dsc) {
+ if alreadyReported {
+ return nil
+ }
+ var extendeePrefix string
+ if extendees == nil {
+ extendeePrefix = kind + " " + f.fqn + ": "
+ }
+ return handler.HandleErrorf(file.NodeInfo(node.FieldExtendee()), "%sunknown extendee type %s; resolved to %s which is not defined; consider using a leading dot", extendeePrefix, fld.GetExtendee(), dsc.FullName())
+ }
+ extd, ok := dsc.(protoreflect.MessageDescriptor)
+ if !ok {
+ if alreadyReported {
+ return nil
+ }
+ var extendeePrefix string
+ if extendees == nil {
+ extendeePrefix = kind + " " + f.fqn + ": "
+ }
+ return handler.HandleErrorf(file.NodeInfo(node.FieldExtendee()), "%sextendee is invalid: %s is %s, not a message", extendeePrefix, dsc.FullName(), descriptorTypeWithArticle(dsc))
+ }
+
+ f.extendee = extd
+ extendeeName := "." + string(dsc.FullName())
+ if fld.GetExtendee() != extendeeName {
+ fld.Extendee = proto.String(extendeeName)
+ }
+ // make sure the tag number is in range
+ found := false
+ tag := protoreflect.FieldNumber(fld.GetNumber())
+ for i := 0; i < extd.ExtensionRanges().Len(); i++ {
+ rng := extd.ExtensionRanges().Get(i)
+ if tag >= rng[0] && tag < rng[1] {
+ found = true
+ break
+ }
+ }
+ if !found {
+ if err := handler.HandleErrorf(file.NodeInfo(node.FieldTag()), "%s %s: tag %d is not in valid range for extended type %s", kind, f.fqn, tag, dsc.FullName()); err != nil {
+ return err
+ }
+ } else {
+ // make sure tag is not a duplicate
+ if err := s.AddExtension(packageFor(dsc), dsc.FullName(), tag, file.NodeInfo(node.FieldTag()), handler); err != nil {
+ return err
+ }
+ }
+ } else if f.proto.OneofIndex != nil {
+ parent := f.parent.(protoreflect.MessageDescriptor) //nolint:errcheck
+ index := int(f.proto.GetOneofIndex())
+ f.oneof = parent.Oneofs().Get(index)
+ }
+
+ if fld.GetTypeName() == "" {
+ // scalar type; no further resolution required
+ return nil
+ }
+
+ dsc := r.resolve(fld.GetTypeName(), true, scopes, checkedCache)
+ if dsc == nil {
+ return handler.HandleErrorf(file.NodeInfo(node.FieldType()), "%s %s: unknown type %s", kind, f.fqn, fld.GetTypeName())
+ }
+ if isSentinelDescriptor(dsc) {
+ return handler.HandleErrorf(file.NodeInfo(node.FieldType()), "%s %s: unknown type %s; resolved to %s which is not defined; consider using a leading dot", kind, f.fqn, fld.GetTypeName(), dsc.FullName())
+ }
+ switch dsc := dsc.(type) {
+ case protoreflect.MessageDescriptor:
+ if dsc.IsMapEntry() {
+ isValid := false
+ switch node.(type) {
+ case *ast.MapFieldNode:
+ // We have an AST for this file and can see this field is from a map declaration
+ isValid = true
+ case *ast.NoSourceNode:
+ // We don't have an AST for the file (it came from a provided descriptor). So we
+ // need to validate that it's not an illegal reference. To be valid, the field
+ // must be repeated and the entry type must be nested in the same enclosing
+ // message as the field.
+ isValid = isValidMap(f, dsc)
+ if isValid && f.index > 0 {
+ // also make sure there are no earlier fields that are valid for this map entry
+ flds := f.Parent().(protoreflect.MessageDescriptor).Fields()
+ for i := 0; i < f.index; i++ {
+ if isValidMap(flds.Get(i), dsc) {
+ isValid = false
+ break
+ }
+ }
+ }
+ }
+ if !isValid {
+ return handler.HandleErrorf(file.NodeInfo(node.FieldType()), "%s %s: %s is a synthetic map entry and may not be referenced explicitly", kind, f.fqn, dsc.FullName())
+ }
+ }
+ typeName := "." + string(dsc.FullName())
+ if fld.GetTypeName() != typeName {
+ fld.TypeName = proto.String(typeName)
+ }
+ if fld.Type == nil {
+ // if type was tentatively unset, we now know it's actually a message
+ fld.Type = descriptorpb.FieldDescriptorProto_TYPE_MESSAGE.Enum()
+ } else if fld.GetType() != descriptorpb.FieldDescriptorProto_TYPE_MESSAGE && fld.GetType() != descriptorpb.FieldDescriptorProto_TYPE_GROUP {
+ return handler.HandleErrorf(file.NodeInfo(node.FieldType()), "%s %s: descriptor proto indicates type %v but should be %v", kind, f.fqn, fld.GetType(), descriptorpb.FieldDescriptorProto_TYPE_MESSAGE)
+ }
+ f.msgType = dsc
+ case protoreflect.EnumDescriptor:
+ typeName := "." + string(dsc.FullName())
+ if fld.GetTypeName() != typeName {
+ fld.TypeName = proto.String(typeName)
+ }
+ if fld.Type == nil {
+ // the type was tentatively unset, but now we know it's actually an enum
+ fld.Type = descriptorpb.FieldDescriptorProto_TYPE_ENUM.Enum()
+ } else if fld.GetType() != descriptorpb.FieldDescriptorProto_TYPE_ENUM {
+ return handler.HandleErrorf(file.NodeInfo(node.FieldType()), "%s %s: descriptor proto indicates type %v but should be %v", kind, f.fqn, fld.GetType(), descriptorpb.FieldDescriptorProto_TYPE_ENUM)
+ }
+ f.enumType = dsc
+ default:
+ return handler.HandleErrorf(file.NodeInfo(node.FieldType()), "%s %s: invalid type: %s is %s, not a message or enum", kind, f.fqn, dsc.FullName(), descriptorTypeWithArticle(dsc))
+ }
+ return nil
+}
+
+func packageFor(dsc protoreflect.Descriptor) protoreflect.FullName {
+ if dsc.ParentFile() != nil {
+ return dsc.ParentFile().Package()
+ }
+ // Can't access package? Make a best effort guess.
+ return dsc.FullName().Parent()
+}
+
+func isValidMap(mapField protoreflect.FieldDescriptor, mapEntry protoreflect.MessageDescriptor) bool {
+ return !mapField.IsExtension() &&
+ mapEntry.Parent() == mapField.ContainingMessage() &&
+ mapField.Cardinality() == protoreflect.Repeated &&
+ string(mapEntry.Name()) == internal.InitCap(internal.JSONName(string(mapField.Name())))+"Entry"
+}
+
+func resolveMethodTypes(m *mtdDescriptor, handler *reporter.Handler, scopes []scope, checkedCache []string) error {
+ scope := "method " + m.fqn
+ r := m.file
+ mtd := m.proto
+ file := r.FileNode()
+ node := r.MethodNode(mtd)
+ dsc := r.resolve(mtd.GetInputType(), false, scopes, checkedCache)
+ if dsc == nil {
+ if err := handler.HandleErrorf(file.NodeInfo(node.GetInputType()), "%s: unknown request type %s", scope, mtd.GetInputType()); err != nil {
+ return err
+ }
+ } else if isSentinelDescriptor(dsc) {
+ if err := handler.HandleErrorf(file.NodeInfo(node.GetInputType()), "%s: unknown request type %s; resolved to %s which is not defined; consider using a leading dot", scope, mtd.GetInputType(), dsc.FullName()); err != nil {
+ return err
+ }
+ } else if msg, ok := dsc.(protoreflect.MessageDescriptor); !ok {
+ if err := handler.HandleErrorf(file.NodeInfo(node.GetInputType()), "%s: invalid request type: %s is %s, not a message", scope, dsc.FullName(), descriptorTypeWithArticle(dsc)); err != nil {
+ return err
+ }
+ } else {
+ typeName := "." + string(dsc.FullName())
+ if mtd.GetInputType() != typeName {
+ mtd.InputType = proto.String(typeName)
+ }
+ m.inputType = msg
+ }
+
+ // TODO: make input and output type resolution more DRY
+ dsc = r.resolve(mtd.GetOutputType(), false, scopes, checkedCache)
+ if dsc == nil {
+ if err := handler.HandleErrorf(file.NodeInfo(node.GetOutputType()), "%s: unknown response type %s", scope, mtd.GetOutputType()); err != nil {
+ return err
+ }
+ } else if isSentinelDescriptor(dsc) {
+ if err := handler.HandleErrorf(file.NodeInfo(node.GetOutputType()), "%s: unknown response type %s; resolved to %s which is not defined; consider using a leading dot", scope, mtd.GetOutputType(), dsc.FullName()); err != nil {
+ return err
+ }
+ } else if msg, ok := dsc.(protoreflect.MessageDescriptor); !ok {
+ if err := handler.HandleErrorf(file.NodeInfo(node.GetOutputType()), "%s: invalid response type: %s is %s, not a message", scope, dsc.FullName(), descriptorTypeWithArticle(dsc)); err != nil {
+ return err
+ }
+ } else {
+ typeName := "." + string(dsc.FullName())
+ if mtd.GetOutputType() != typeName {
+ mtd.OutputType = proto.String(typeName)
+ }
+ m.outputType = msg
+ }
+
+ return nil
+}
+
+func (r *result) resolveOptions(handler *reporter.Handler, elemType string, elemName protoreflect.FullName, opts []*descriptorpb.UninterpretedOption, scopes []scope, checkedCache []string) error {
+ mc := &internal.MessageContext{
+ File: r,
+ ElementName: string(elemName),
+ ElementType: elemType,
+ }
+ file := r.FileNode()
+opts:
+ for _, opt := range opts {
+ // resolve any extension names found in option names
+ for _, nm := range opt.Name {
+ if nm.GetIsExtension() {
+ node := r.OptionNamePartNode(nm)
+ fqn, err := r.resolveExtensionName(nm.GetNamePart(), scopes, checkedCache)
+ if err != nil {
+ if err := handler.HandleErrorf(file.NodeInfo(node), "%v%v", mc, err); err != nil {
+ return err
+ }
+ continue opts
+ }
+ nm.NamePart = proto.String(fqn)
+ }
+ }
+ // also resolve any extension names found inside message literals in option values
+ mc.Option = opt
+ optVal := r.OptionNode(opt).GetValue()
+ if err := r.resolveOptionValue(handler, mc, optVal, scopes, checkedCache); err != nil {
+ return err
+ }
+ mc.Option = nil
+ }
+ return nil
+}
+
+func (r *result) resolveOptionValue(handler *reporter.Handler, mc *internal.MessageContext, val ast.ValueNode, scopes []scope, checkedCache []string) error {
+ optVal := val.Value()
+ switch optVal := optVal.(type) {
+ case []ast.ValueNode:
+ origPath := mc.OptAggPath
+ defer func() {
+ mc.OptAggPath = origPath
+ }()
+ for i, v := range optVal {
+ mc.OptAggPath = fmt.Sprintf("%s[%d]", origPath, i)
+ if err := r.resolveOptionValue(handler, mc, v, scopes, checkedCache); err != nil {
+ return err
+ }
+ }
+ case []*ast.MessageFieldNode:
+ origPath := mc.OptAggPath
+ defer func() {
+ mc.OptAggPath = origPath
+ }()
+ for _, fld := range optVal {
+ // check for extension name
+ if fld.Name.IsExtension() {
+ // Confusingly, an extension reference inside a message literal cannot refer to
+ // elements in the same enclosing message without a qualifier. Basically, we
+ // treat this as if there were no message scopes, so only the package name is
+ // used for resolving relative references. (Inconsistent protoc behavior, but
+ // likely due to how it re-uses C++ text format implementation, and normal text
+ // format doesn't expect that kind of relative reference.)
+ scopes := scopes[:1] // first scope is file, the rest are enclosing messages
+ fqn, err := r.resolveExtensionName(string(fld.Name.Name.AsIdentifier()), scopes, checkedCache)
+ if err != nil {
+ if err := handler.HandleErrorf(r.FileNode().NodeInfo(fld.Name.Name), "%v%v", mc, err); err != nil {
+ return err
+ }
+ } else {
+ r.optionQualifiedNames[fld.Name.Name] = fqn
+ }
+ }
+
+ // recurse into value
+ mc.OptAggPath = origPath
+ if origPath != "" {
+ mc.OptAggPath += "."
+ }
+ if fld.Name.IsExtension() {
+ mc.OptAggPath = fmt.Sprintf("%s[%s]", mc.OptAggPath, string(fld.Name.Name.AsIdentifier()))
+ } else {
+ mc.OptAggPath = fmt.Sprintf("%s%s", mc.OptAggPath, string(fld.Name.Name.AsIdentifier()))
+ }
+
+ if err := r.resolveOptionValue(handler, mc, fld.Val, scopes, checkedCache); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+func (r *result) resolveExtensionName(name string, scopes []scope, checkedCache []string) (string, error) {
+ dsc := r.resolve(name, false, scopes, checkedCache)
+ if dsc == nil {
+ return "", fmt.Errorf("unknown extension %s", name)
+ }
+ if isSentinelDescriptor(dsc) {
+ return "", fmt.Errorf("unknown extension %s; resolved to %s which is not defined; consider using a leading dot", name, dsc.FullName())
+ }
+ if ext, ok := dsc.(protoreflect.FieldDescriptor); !ok {
+ return "", fmt.Errorf("invalid extension: %s is %s, not an extension", name, descriptorTypeWithArticle(dsc))
+ } else if !ext.IsExtension() {
+ return "", fmt.Errorf("invalid extension: %s is a field but not an extension", name)
+ }
+ return string("." + dsc.FullName()), nil
+}
+
+func (r *result) resolve(name string, onlyTypes bool, scopes []scope, checkedCache []string) protoreflect.Descriptor {
+ if strings.HasPrefix(name, ".") {
+ // already fully-qualified
+ return r.resolveElement(protoreflect.FullName(name[1:]), checkedCache)
+ }
+ // unqualified, so we look in the enclosing (last) scope first and move
+ // towards outermost (first) scope, trying to resolve the symbol
+ pos := strings.IndexByte(name, '.')
+ firstName := name
+ if pos > 0 {
+ firstName = name[:pos]
+ }
+ var bestGuess protoreflect.Descriptor
+ for i := len(scopes) - 1; i >= 0; i-- {
+ d := scopes[i](firstName, name)
+ if d != nil {
+ // In `protoc`, it will skip a match of the wrong type and move on
+ // to the next scope, but only if the reference is unqualified. So
+ // we mirror that behavior here. When we skip and move on, we go
+ // ahead and save the match of the wrong type so we can at least use
+ // it to construct a better error in the event that we don't find
+ // any match of the right type.
+ if !onlyTypes || isType(d) || firstName != name {
+ return d
+ }
+ if bestGuess == nil {
+ bestGuess = d
+ }
+ }
+ }
+ // we return best guess, even though it was not an allowed kind of
+ // descriptor, so caller can print a better error message (e.g.
+ // indicating that the name was found but that it's the wrong type)
+ return bestGuess
+}
+
+func isType(d protoreflect.Descriptor) bool {
+ switch d.(type) {
+ case protoreflect.MessageDescriptor, protoreflect.EnumDescriptor:
+ return true
+ }
+ return false
+}
+
+// scope represents a lexical scope in a proto file in which messages and enums
+// can be declared.
+type scope func(firstName, fullName string) protoreflect.Descriptor
+
+func fileScope(r *result, checkedCache []string) scope {
+ // we search symbols in this file, but also symbols in other files that have
+ // the same package as this file or a "parent" package (in protobuf,
+ // packages are a hierarchy like C++ namespaces)
+ prefixes := internal.CreatePrefixList(r.FileDescriptorProto().GetPackage())
+ querySymbol := func(n string) protoreflect.Descriptor {
+ return r.resolveElement(protoreflect.FullName(n), checkedCache)
+ }
+ return func(firstName, fullName string) protoreflect.Descriptor {
+ for _, prefix := range prefixes {
+ var n1, n string
+ if prefix == "" {
+ // exhausted all prefixes, so it must be in this one
+ n1, n = fullName, fullName
+ } else {
+ n = prefix + "." + fullName
+ n1 = prefix + "." + firstName
+ }
+ d := resolveElementRelative(n1, n, querySymbol)
+ if d != nil {
+ return d
+ }
+ }
+ return nil
+ }
+}
+
+func messageScope(r *result, messageName protoreflect.FullName) scope {
+ querySymbol := func(n string) protoreflect.Descriptor {
+ return resolveElementInFile(protoreflect.FullName(n), r)
+ }
+ return func(firstName, fullName string) protoreflect.Descriptor {
+ n1 := string(messageName) + "." + firstName
+ n := string(messageName) + "." + fullName
+ return resolveElementRelative(n1, n, querySymbol)
+ }
+}
+
+func resolveElementRelative(firstName, fullName string, query func(name string) protoreflect.Descriptor) protoreflect.Descriptor {
+ d := query(firstName)
+ if d == nil {
+ return nil
+ }
+ if firstName == fullName {
+ return d
+ }
+ if !isAggregateDescriptor(d) {
+ // can't possibly find the rest of full name if
+ // the first name indicated a leaf descriptor
+ return nil
+ }
+ d = query(fullName)
+ if d == nil {
+ return newSentinelDescriptor(fullName)
+ }
+ return d
+}
+
+func resolveElementInFile(name protoreflect.FullName, f File) protoreflect.Descriptor {
+ d := f.FindDescriptorByName(name)
+ if d != nil {
+ return d
+ }
+
+ if matchesPkgNamespace(name, f.Package()) {
+ // this sentinel means the name is a valid namespace but
+ // does not refer to a descriptor
+ return newSentinelDescriptor(string(name))
+ }
+ return nil
+}
+
+func matchesPkgNamespace(fqn, pkg protoreflect.FullName) bool {
+ if pkg == "" {
+ return false
+ }
+ if fqn == pkg {
+ return true
+ }
+ if len(pkg) > len(fqn) && strings.HasPrefix(string(pkg), string(fqn)) {
+ // if char after fqn is a dot, then fqn is a namespace
+ if pkg[len(fqn)] == '.' {
+ return true
+ }
+ }
+ return false
+}
+
+func isAggregateDescriptor(d protoreflect.Descriptor) bool {
+ if isSentinelDescriptor(d) {
+ // this indicates the name matched a package, not a
+ // descriptor, but a package is an aggregate, so
+ // we return true
+ return true
+ }
+ switch d.(type) {
+ case protoreflect.MessageDescriptor, protoreflect.EnumDescriptor, protoreflect.ServiceDescriptor:
+ return true
+ default:
+ return false
+ }
+}
+
+func isSentinelDescriptor(d protoreflect.Descriptor) bool {
+ _, ok := d.(*sentinelDescriptor)
+ return ok
+}
+
+func newSentinelDescriptor(name string) protoreflect.Descriptor {
+ return &sentinelDescriptor{name: name}
+}
+
+// sentinelDescriptor is a placeholder descriptor. It is used instead of nil to
+// distinguish between two situations:
+// 1. The given name could not be found.
+// 2. The given name *cannot* be a valid result so stop searching.
+//
+// In these cases, attempts to resolve an element name will return nil for the
+// first case and will return a sentinelDescriptor in the second. The sentinel
+// contains the fully-qualified name which caused the search to stop (which may
+// be a prefix of the actual name being resolved).
+type sentinelDescriptor struct {
+ protoreflect.Descriptor
+ name string
+}
+
+func (p *sentinelDescriptor) ParentFile() protoreflect.FileDescriptor {
+ return nil
+}
+
+func (p *sentinelDescriptor) Parent() protoreflect.Descriptor {
+ return nil
+}
+
+func (p *sentinelDescriptor) Index() int {
+ return 0
+}
+
+func (p *sentinelDescriptor) Syntax() protoreflect.Syntax {
+ return 0
+}
+
+func (p *sentinelDescriptor) Name() protoreflect.Name {
+ return protoreflect.Name(p.name)
+}
+
+func (p *sentinelDescriptor) FullName() protoreflect.FullName {
+ return protoreflect.FullName(p.name)
+}
+
+func (p *sentinelDescriptor) IsPlaceholder() bool {
+ return false
+}
+
+func (p *sentinelDescriptor) Options() protoreflect.ProtoMessage {
+ return nil
+}
+
+var _ protoreflect.Descriptor = (*sentinelDescriptor)(nil)
diff --git a/vendor/github.com/bufbuild/protocompile/linker/symbols.go b/vendor/github.com/bufbuild/protocompile/linker/symbols.go
new file mode 100644
index 0000000..c8db762
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/linker/symbols.go
@@ -0,0 +1,635 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package linker
+
+import (
+ "strings"
+ "sync"
+
+ "google.golang.org/protobuf/reflect/protoreflect"
+
+ "github.com/bufbuild/protocompile/ast"
+ "github.com/bufbuild/protocompile/internal"
+ "github.com/bufbuild/protocompile/protoutil"
+ "github.com/bufbuild/protocompile/reporter"
+ "github.com/bufbuild/protocompile/walk"
+)
+
+const unknownFilePath = "<unknown file>"
+
+// Symbols is a symbol table that maps names for all program elements to their
+// location in source. It also tracks extension tag numbers. This can be used
+// to enforce uniqueness for symbol names and tag numbers across many files and
+// many link operations.
+//
+// This type is thread-safe.
+type Symbols struct {
+ pkgTrie packageSymbols
+
+ // We don't know the packages for these symbols, so we can't
+ // keep them in the pkgTrie. In vast majority of cases, this
+ // will always be empty/unused. When used, it ensures that
+ // multiple extension declarations don't refer to the same
+ // extension.
+ extDeclsMu sync.Mutex
+ extDecls map[protoreflect.FullName]extDecl
+}
+
+type packageSymbols struct {
+ mu sync.RWMutex
+ children map[protoreflect.FullName]*packageSymbols
+ files map[protoreflect.FileDescriptor]struct{}
+ symbols map[protoreflect.FullName]symbolEntry
+ exts map[extNumber]ast.SourceSpan
+}
+
+type extNumber struct {
+ extendee protoreflect.FullName
+ tag protoreflect.FieldNumber
+}
+
+type symbolEntry struct {
+ span ast.SourceSpan
+ isEnumValue bool
+ isPackage bool
+}
+
+type extDecl struct {
+ span ast.SourceSpan
+ extendee protoreflect.FullName
+ tag protoreflect.FieldNumber
+}
+
+// Import populates the symbol table with all symbols/elements and extension
+// tags present in the given file descriptor. If s is nil or if fd has already
+// been imported into s, this returns immediately without doing anything. If any
+// collisions in symbol names or extension tags are identified, an error will be
+// returned and the symbol table will not be updated.
+func (s *Symbols) Import(fd protoreflect.FileDescriptor, handler *reporter.Handler) error {
+ if s == nil {
+ return nil
+ }
+
+ if f, ok := fd.(protoreflect.FileImport); ok {
+ // unwrap any import instance
+ fd = f.FileDescriptor
+ }
+ if f, ok := fd.(*file); ok {
+ // unwrap any file instance
+ fd = f.FileDescriptor
+ }
+
+ var pkgSpan ast.SourceSpan
+ if res, ok := fd.(*result); ok {
+ pkgSpan = packageNameSpan(res)
+ } else {
+ pkgSpan = sourceSpanForPackage(fd)
+ }
+ pkg, err := s.importPackages(pkgSpan, fd.Package(), handler)
+ if err != nil || pkg == nil {
+ return err
+ }
+
+ pkg.mu.RLock()
+ _, alreadyImported := pkg.files[fd]
+ pkg.mu.RUnlock()
+
+ if alreadyImported {
+ return nil
+ }
+
+ for i := 0; i < fd.Imports().Len(); i++ {
+ if err := s.Import(fd.Imports().Get(i).FileDescriptor, handler); err != nil {
+ return err
+ }
+ }
+
+ if res, ok := fd.(*result); ok && res.hasSource() {
+ return s.importResultWithExtensions(pkg, res, handler)
+ }
+
+ return s.importFileWithExtensions(pkg, fd, handler)
+}
+
+func (s *Symbols) importFileWithExtensions(pkg *packageSymbols, fd protoreflect.FileDescriptor, handler *reporter.Handler) error {
+ imported, err := pkg.importFile(fd, handler)
+ if err != nil {
+ return err
+ }
+ if !imported {
+ // nothing else to do
+ return nil
+ }
+
+ return walk.Descriptors(fd, func(d protoreflect.Descriptor) error {
+ fld, ok := d.(protoreflect.FieldDescriptor)
+ if !ok || !fld.IsExtension() {
+ return nil
+ }
+ span := sourceSpanForNumber(fld)
+ extendee := fld.ContainingMessage()
+ return s.AddExtension(packageFor(extendee), extendee.FullName(), fld.Number(), span, handler)
+ })
+}
+
+func (s *packageSymbols) importFile(fd protoreflect.FileDescriptor, handler *reporter.Handler) (bool, error) {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+
+ if _, ok := s.files[fd]; ok {
+ // have to double-check if it's already imported, in case
+ // it was added after above read-locked check
+ return false, nil
+ }
+
+ // first pass: check for conflicts
+ if err := s.checkFileLocked(fd, handler); err != nil {
+ return false, err
+ }
+ if err := handler.Error(); err != nil {
+ return false, err
+ }
+
+ // second pass: commit all symbols
+ s.commitFileLocked(fd)
+
+ return true, nil
+}
+
+func (s *Symbols) importPackages(pkgSpan ast.SourceSpan, pkg protoreflect.FullName, handler *reporter.Handler) (*packageSymbols, error) {
+ if pkg == "" {
+ return &s.pkgTrie, nil
+ }
+
+ cur := &s.pkgTrie
+ enumerator := nameEnumerator{name: pkg}
+ for {
+ p, ok := enumerator.next()
+ if !ok {
+ return cur, nil
+ }
+ var err error
+ cur, err = cur.importPackage(pkgSpan, p, handler)
+ if err != nil {
+ return nil, err
+ }
+ if cur == nil {
+ return nil, nil
+ }
+ }
+}
+
+func (s *packageSymbols) importPackage(pkgSpan ast.SourceSpan, pkg protoreflect.FullName, handler *reporter.Handler) (*packageSymbols, error) {
+ s.mu.RLock()
+ existing, ok := s.symbols[pkg]
+ var child *packageSymbols
+ if ok && existing.isPackage {
+ child = s.children[pkg]
+ }
+ s.mu.RUnlock()
+
+ if ok && existing.isPackage {
+ // package already exists
+ return child, nil
+ } else if ok {
+ return nil, reportSymbolCollision(pkgSpan, pkg, false, existing, handler)
+ }
+
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ // have to double-check in case it was added while upgrading to write lock
+ existing, ok = s.symbols[pkg]
+ if ok && existing.isPackage {
+ // package already exists
+ return s.children[pkg], nil
+ } else if ok {
+ return nil, reportSymbolCollision(pkgSpan, pkg, false, existing, handler)
+ }
+ if s.symbols == nil {
+ s.symbols = map[protoreflect.FullName]symbolEntry{}
+ }
+ s.symbols[pkg] = symbolEntry{span: pkgSpan, isPackage: true}
+ child = &packageSymbols{}
+ if s.children == nil {
+ s.children = map[protoreflect.FullName]*packageSymbols{}
+ }
+ s.children[pkg] = child
+ return child, nil
+}
+
+func (s *Symbols) getPackage(pkg protoreflect.FullName, exact bool) *packageSymbols {
+ if pkg == "" {
+ return &s.pkgTrie
+ }
+ cur := &s.pkgTrie
+ enumerator := nameEnumerator{name: pkg}
+ for {
+ p, ok := enumerator.next()
+ if !ok {
+ return cur
+ }
+ cur.mu.RLock()
+ next := cur.children[p]
+ cur.mu.RUnlock()
+
+ if next == nil {
+ if exact {
+ return nil
+ }
+ return cur
+ }
+ cur = next
+ }
+}
+
+func reportSymbolCollision(span ast.SourceSpan, fqn protoreflect.FullName, additionIsEnumVal bool, existing symbolEntry, handler *reporter.Handler) error {
+ // because of weird scoping for enum values, provide more context in error message
+ // if this conflict is with an enum value
+ var isPkg, suffix string
+ if additionIsEnumVal || existing.isEnumValue {
+ suffix = "; protobuf uses C++ scoping rules for enum values, so they exist in the scope enclosing the enum"
+ }
+ if existing.isPackage {
+ isPkg = " as a package"
+ }
+ orig := existing.span
+ conflict := span
+ if posLess(conflict.Start(), orig.Start()) {
+ orig, conflict = conflict, orig
+ }
+ return handler.HandleErrorf(conflict, "symbol %q already defined%s at %v%s", fqn, isPkg, orig.Start(), suffix)
+}
+
+func posLess(a, b ast.SourcePos) bool {
+ if a.Filename == b.Filename {
+ if a.Line == b.Line {
+ return a.Col < b.Col
+ }
+ return a.Line < b.Line
+ }
+ return false
+}
+
+func (s *packageSymbols) checkFileLocked(f protoreflect.FileDescriptor, handler *reporter.Handler) error {
+ return walk.Descriptors(f, func(d protoreflect.Descriptor) error {
+ span := sourceSpanFor(d)
+ if existing, ok := s.symbols[d.FullName()]; ok {
+ _, isEnumVal := d.(protoreflect.EnumValueDescriptor)
+ if err := reportSymbolCollision(span, d.FullName(), isEnumVal, existing, handler); err != nil {
+ return err
+ }
+ }
+ return nil
+ })
+}
+
+func sourceSpanForPackage(fd protoreflect.FileDescriptor) ast.SourceSpan {
+ loc := fd.SourceLocations().ByPath([]int32{internal.FilePackageTag})
+ if internal.IsZeroLocation(loc) {
+ return ast.UnknownSpan(fd.Path())
+ }
+ return ast.NewSourceSpan(
+ ast.SourcePos{
+ Filename: fd.Path(),
+ Line: loc.StartLine,
+ Col: loc.StartColumn,
+ },
+ ast.SourcePos{
+ Filename: fd.Path(),
+ Line: loc.EndLine,
+ Col: loc.EndColumn,
+ },
+ )
+}
+
+func sourceSpanFor(d protoreflect.Descriptor) ast.SourceSpan {
+ file := d.ParentFile()
+ if file == nil {
+ return ast.UnknownSpan(unknownFilePath)
+ }
+ if result, ok := file.(*result); ok {
+ return nameSpan(result.FileNode(), result.Node(protoutil.ProtoFromDescriptor(d)))
+ }
+ path, ok := internal.ComputePath(d)
+ if !ok {
+ return ast.UnknownSpan(file.Path())
+ }
+ namePath := path
+ switch d.(type) {
+ case protoreflect.FieldDescriptor:
+ namePath = append(namePath, internal.FieldNameTag)
+ case protoreflect.MessageDescriptor:
+ namePath = append(namePath, internal.MessageNameTag)
+ case protoreflect.OneofDescriptor:
+ namePath = append(namePath, internal.OneofNameTag)
+ case protoreflect.EnumDescriptor:
+ namePath = append(namePath, internal.EnumNameTag)
+ case protoreflect.EnumValueDescriptor:
+ namePath = append(namePath, internal.EnumValNameTag)
+ case protoreflect.ServiceDescriptor:
+ namePath = append(namePath, internal.ServiceNameTag)
+ case protoreflect.MethodDescriptor:
+ namePath = append(namePath, internal.MethodNameTag)
+ default:
+ // NB: shouldn't really happen, but just in case fall back to path to
+ // descriptor, sans name field
+ }
+ loc := file.SourceLocations().ByPath(namePath)
+ if internal.IsZeroLocation(loc) {
+ loc = file.SourceLocations().ByPath(path)
+ if internal.IsZeroLocation(loc) {
+ return ast.UnknownSpan(file.Path())
+ }
+ }
+
+ return ast.NewSourceSpan(
+ ast.SourcePos{
+ Filename: file.Path(),
+ Line: loc.StartLine,
+ Col: loc.StartColumn,
+ },
+ ast.SourcePos{
+ Filename: file.Path(),
+ Line: loc.EndLine,
+ Col: loc.EndColumn,
+ },
+ )
+}
+
+func sourceSpanForNumber(fd protoreflect.FieldDescriptor) ast.SourceSpan {
+ file := fd.ParentFile()
+ if file == nil {
+ return ast.UnknownSpan(unknownFilePath)
+ }
+ path, ok := internal.ComputePath(fd)
+ if !ok {
+ return ast.UnknownSpan(file.Path())
+ }
+ numberPath := path
+ numberPath = append(numberPath, internal.FieldNumberTag)
+ loc := file.SourceLocations().ByPath(numberPath)
+ if internal.IsZeroLocation(loc) {
+ loc = file.SourceLocations().ByPath(path)
+ if internal.IsZeroLocation(loc) {
+ return ast.UnknownSpan(file.Path())
+ }
+ }
+ return ast.NewSourceSpan(
+ ast.SourcePos{
+ Filename: file.Path(),
+ Line: loc.StartLine,
+ Col: loc.StartColumn,
+ },
+ ast.SourcePos{
+ Filename: file.Path(),
+ Line: loc.EndLine,
+ Col: loc.EndColumn,
+ },
+ )
+}
+
+func (s *packageSymbols) commitFileLocked(f protoreflect.FileDescriptor) {
+ if s.symbols == nil {
+ s.symbols = map[protoreflect.FullName]symbolEntry{}
+ }
+ if s.exts == nil {
+ s.exts = map[extNumber]ast.SourceSpan{}
+ }
+ _ = walk.Descriptors(f, func(d protoreflect.Descriptor) error {
+ span := sourceSpanFor(d)
+ name := d.FullName()
+ _, isEnumValue := d.(protoreflect.EnumValueDescriptor)
+ s.symbols[name] = symbolEntry{span: span, isEnumValue: isEnumValue}
+ return nil
+ })
+
+ if s.files == nil {
+ s.files = map[protoreflect.FileDescriptor]struct{}{}
+ }
+ s.files[f] = struct{}{}
+}
+
+func (s *Symbols) importResultWithExtensions(pkg *packageSymbols, r *result, handler *reporter.Handler) error {
+ imported, err := pkg.importResult(r, handler)
+ if err != nil {
+ return err
+ }
+ if !imported {
+ // nothing else to do
+ return nil
+ }
+
+ return walk.Descriptors(r, func(d protoreflect.Descriptor) error {
+ fd, ok := d.(*extTypeDescriptor)
+ if !ok {
+ return nil
+ }
+ file := r.FileNode()
+ node := r.FieldNode(fd.FieldDescriptorProto())
+ info := file.NodeInfo(node.FieldTag())
+ extendee := fd.ContainingMessage()
+ return s.AddExtension(packageFor(extendee), extendee.FullName(), fd.Number(), info, handler)
+ })
+}
+
+func (s *Symbols) importResult(r *result, handler *reporter.Handler) error {
+ pkg, err := s.importPackages(packageNameSpan(r), r.Package(), handler)
+ if err != nil || pkg == nil {
+ return err
+ }
+ _, err = pkg.importResult(r, handler)
+ return err
+}
+
+func (s *packageSymbols) importResult(r *result, handler *reporter.Handler) (bool, error) {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+
+ if _, ok := s.files[r]; ok {
+ // already imported
+ return false, nil
+ }
+
+ // first pass: check for conflicts
+ if err := s.checkResultLocked(r, handler); err != nil {
+ return false, err
+ }
+ if err := handler.Error(); err != nil {
+ return false, err
+ }
+
+ // second pass: commit all symbols
+ s.commitFileLocked(r)
+
+ return true, nil
+}
+
+func (s *packageSymbols) checkResultLocked(r *result, handler *reporter.Handler) error {
+ resultSyms := map[protoreflect.FullName]symbolEntry{}
+ return walk.Descriptors(r, func(d protoreflect.Descriptor) error {
+ _, isEnumVal := d.(protoreflect.EnumValueDescriptor)
+ file := r.FileNode()
+ name := d.FullName()
+ node := r.Node(protoutil.ProtoFromDescriptor(d))
+ span := nameSpan(file, node)
+ // check symbols already in this symbol table
+ if existing, ok := s.symbols[name]; ok {
+ if err := reportSymbolCollision(span, name, isEnumVal, existing, handler); err != nil {
+ return err
+ }
+ }
+
+ // also check symbols from this result (that are not yet in symbol table)
+ if existing, ok := resultSyms[name]; ok {
+ if err := reportSymbolCollision(span, name, isEnumVal, existing, handler); err != nil {
+ return err
+ }
+ }
+ resultSyms[name] = symbolEntry{
+ span: span,
+ isEnumValue: isEnumVal,
+ }
+
+ return nil
+ })
+}
+
+func packageNameSpan(r *result) ast.SourceSpan {
+ if node, ok := r.FileNode().(*ast.FileNode); ok {
+ for _, decl := range node.Decls {
+ if pkgNode, ok := decl.(*ast.PackageNode); ok {
+ return r.FileNode().NodeInfo(pkgNode.Name)
+ }
+ }
+ }
+ return ast.UnknownSpan(r.Path())
+}
+
+func nameSpan(file ast.FileDeclNode, n ast.Node) ast.SourceSpan {
+ // TODO: maybe ast package needs a NamedNode interface to simplify this?
+ switch n := n.(type) {
+ case ast.FieldDeclNode:
+ return file.NodeInfo(n.FieldName())
+ case ast.MessageDeclNode:
+ return file.NodeInfo(n.MessageName())
+ case ast.OneofDeclNode:
+ return file.NodeInfo(n.OneofName())
+ case ast.EnumValueDeclNode:
+ return file.NodeInfo(n.GetName())
+ case *ast.EnumNode:
+ return file.NodeInfo(n.Name)
+ case *ast.ServiceNode:
+ return file.NodeInfo(n.Name)
+ case ast.RPCDeclNode:
+ return file.NodeInfo(n.GetName())
+ default:
+ return file.NodeInfo(n)
+ }
+}
+
+// AddExtension records the given extension, which is used to ensure that no two files
+// attempt to extend the same message using the same tag. The given pkg should be the
+// package that defines extendee.
+func (s *Symbols) AddExtension(pkg, extendee protoreflect.FullName, tag protoreflect.FieldNumber, span ast.SourceSpan, handler *reporter.Handler) error {
+ if pkg != "" {
+ if !strings.HasPrefix(string(extendee), string(pkg)+".") {
+ return handler.HandleErrorf(span, "could not register extension: extendee %q does not match package %q", extendee, pkg)
+ }
+ }
+ pkgSyms := s.getPackage(pkg, true)
+ if pkgSyms == nil {
+ // should never happen
+ return handler.HandleErrorf(span, "could not register extension: missing package symbols for %q", pkg)
+ }
+ return pkgSyms.addExtension(extendee, tag, span, handler)
+}
+
+func (s *packageSymbols) addExtension(extendee protoreflect.FullName, tag protoreflect.FieldNumber, span ast.SourceSpan, handler *reporter.Handler) error {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+
+ extNum := extNumber{extendee: extendee, tag: tag}
+ if existing, ok := s.exts[extNum]; ok {
+ return handler.HandleErrorf(span, "extension with tag %d for message %s already defined at %v", tag, extendee, existing.Start())
+ }
+
+ if s.exts == nil {
+ s.exts = map[extNumber]ast.SourceSpan{}
+ }
+ s.exts[extNum] = span
+ return nil
+}
+
+// AddExtensionDeclaration records the given extension declaration, which is used to
+// ensure that no two declarations refer to the same extension.
+func (s *Symbols) AddExtensionDeclaration(extension, extendee protoreflect.FullName, tag protoreflect.FieldNumber, span ast.SourceSpan, handler *reporter.Handler) error {
+ s.extDeclsMu.Lock()
+ defer s.extDeclsMu.Unlock()
+ existing, ok := s.extDecls[extension]
+ if ok {
+ if existing.extendee == extendee && existing.tag == tag {
+ // This is a declaration that has already been added. Ignore.
+ return nil
+ }
+ return handler.HandleErrorf(span, "extension %s already declared as extending %s with tag %d at %v", extension, existing.extendee, existing.tag, existing.span.Start())
+ }
+ if s.extDecls == nil {
+ s.extDecls = map[protoreflect.FullName]extDecl{}
+ }
+ s.extDecls[extension] = extDecl{
+ span: span,
+ extendee: extendee,
+ tag: tag,
+ }
+ return nil
+}
+
+// Lookup finds the registered location of the given name. If the given name has
+// not been seen/registered, nil is returned.
+func (s *Symbols) Lookup(name protoreflect.FullName) ast.SourceSpan {
+ // note: getPackage never returns nil when exact=false
+ pkgSyms := s.getPackage(name, false)
+ if entry, ok := pkgSyms.symbols[name]; ok {
+ return entry.span
+ }
+ return nil
+}
+
+// LookupExtension finds the registered location of the given extension. If the given
+// extension has not been seen/registered, nil is returned.
+func (s *Symbols) LookupExtension(messageName protoreflect.FullName, extensionNumber protoreflect.FieldNumber) ast.SourceSpan {
+ // note: getPackage never returns nil when exact=false
+ pkgSyms := s.getPackage(messageName, false)
+ return pkgSyms.exts[extNumber{messageName, extensionNumber}]
+}
+
+type nameEnumerator struct {
+ name protoreflect.FullName
+ start int
+}
+
+func (e *nameEnumerator) next() (protoreflect.FullName, bool) {
+ if e.start < 0 {
+ return "", false
+ }
+ pos := strings.IndexByte(string(e.name[e.start:]), '.')
+ if pos == -1 {
+ e.start = -1
+ return e.name, true
+ }
+ pos += e.start
+ e.start = pos + 1
+ return e.name[:pos], true
+}
diff --git a/vendor/github.com/bufbuild/protocompile/linker/validate.go b/vendor/github.com/bufbuild/protocompile/linker/validate.go
new file mode 100644
index 0000000..6633a9f
--- /dev/null
+++ b/vendor/github.com/bufbuild/protocompile/linker/validate.go
@@ -0,0 +1,1153 @@
+// Copyright 2020-2024 Buf Technologies, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package linker
+
+import (
+ "fmt"
+ "math"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+
+ "google.golang.org/protobuf/reflect/protoreflect"
+ "google.golang.org/protobuf/types/descriptorpb"
+
+ "github.com/bufbuild/protocompile/ast"
+ "github.com/bufbuild/protocompile/internal"
+ "github.com/bufbuild/protocompile/protoutil"
+ "github.com/bufbuild/protocompile/reporter"
+ "github.com/bufbuild/protocompile/walk"
+)
+
+// ValidateOptions runs some validation checks on the result that can only
+// be done after options are interpreted.
+func (r *result) ValidateOptions(handler *reporter.Handler, symbols *Symbols) error {
+ if err := r.validateFile(handler); err != nil {
+ return err
+ }
+ return walk.Descriptors(r, func(d protoreflect.Descriptor) error {
+ switch d := d.(type) {
+ case protoreflect.FieldDescriptor:
+ if err := r.validateField(d, handler); err != nil {
+ return err
+ }
+ case protoreflect.MessageDescriptor:
+ if symbols == nil {
+ symbols = &Symbols{}
+ }
+ if err := r.validateMessage(d, handler, symbols); err != nil {
+ return err
+ }
+ case protoreflect.EnumDescriptor:
+ if err := r.validateEnum(d, handler); err != nil {
+ return err
+ }
+ }
+ return nil
+ })
+}
+
+func (r *result) validateFile(handler *reporter.Handler) error {
+ opts := r.FileDescriptorProto().GetOptions()
+ if opts.GetOptimizeFor() != descriptorpb.FileOptions_LITE_RUNTIME {
+ // Non-lite files may not import lite files.
+ imports := r.Imports()
+ for i, length := 0, imports.Len(); i < length; i++ {
+ dep := imports.Get(i)
+ depOpts, ok := dep.Options().(*descriptorpb.FileOptions)
+ if !ok {
+ continue // what else to do?
+ }
+ if depOpts.GetOptimizeFor() == descriptorpb.FileOptions_LITE_RUNTIME {
+ err := handler.HandleErrorf(r.getImportLocation(dep.Path()), "a file that does not use optimize_for=LITE_RUNTIME may not import file %q that does", dep.Path())
+ if err != nil {
+ return err
+ }
+ }
+ }
+ }
+ if isEditions(r) {
+ // Validate features
+ if opts.GetFeatures().GetFieldPresence() == descriptorpb.FeatureSet_LEGACY_REQUIRED {
+ span := r.findOptionSpan(r, internal.FileOptionsFeaturesTag, internal.FeatureSetFieldPresenceTag)
+ err := handler.HandleErrorf(span, "LEGACY_REQUIRED field presence cannot be set as the default for a file")
+ if err != nil {
+ return err
+ }
+ }
+ if opts != nil && opts.JavaStringCheckUtf8 != nil {
+ span := r.findOptionSpan(r, internal.FileOptionsJavaStringCheckUTF8Tag)
+ err := handler.HandleErrorf(span, `file option java_string_check_utf8 is not allowed with editions; import "google/protobuf/java_features.proto" and use (pb.java).utf8_validation instead`)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+func (r *result) validateField(fld protoreflect.FieldDescriptor, handler *reporter.Handler) error {
+ if xtd, ok := fld.(protoreflect.ExtensionTypeDescriptor); ok {
+ fld = xtd.Descriptor()
+ }
+ fd, ok := fld.(*fldDescriptor)
+ if !ok {
+ // should not be possible
+ return fmt.Errorf("field descriptor is wrong type: expecting %T, got %T", (*fldDescriptor)(nil), fld)
+ }
+
+ if err := r.validatePacked(fd, handler); err != nil {
+ return err
+ }
+ if fd.Kind() == protoreflect.EnumKind {
+ requiresOpen := !fd.IsList() && !fd.HasPresence()
+ if requiresOpen && fd.Enum().IsClosed() {
+ // Fields in a proto3 message cannot refer to proto2 enums.
+ // In editions, this translates to implicit presence fields
+ // not being able to refer to closed enums.
+ // TODO: This really should be based solely on whether the enum's first
+ // value is zero, NOT based on if it's open vs closed.
+ // https://github.com/protocolbuffers/protobuf/issues/16249
+ file := r.FileNode()
+ info := file.NodeInfo(r.FieldNode(fd.proto).FieldType())
+ if err := handler.HandleErrorf(info, "cannot use closed enum %s in a field with implicit presence", fd.Enum().FullName()); err != nil {
+ return err
+ }
+ }
+ }
+ if fd.HasDefault() && !fd.HasPresence() {
+ span := r.findScalarOptionSpan(r.FieldNode(fd.proto), "default")
+ err := handler.HandleErrorf(span, "default value is not allowed on fields with implicit presence")
+ if err != nil {
+ return err
+ }
+ }
+ if fd.proto.Options != nil && fd.proto.Options.Ctype != nil {
+ if descriptorpb.Edition(r.Edition()) >= descriptorpb.Edition_EDITION_2024 {
+ // We don't support edition 2024 yet, but we went ahead and mimic'ed this check
+ // from protoc, which currently has experimental support for 2024.
+ span := r.findOptionSpan(fd, internal.FieldOptionsCTypeTag)
+ if err := handler.HandleErrorf(span, "ctype option cannot be used as of edition 2024; use features.string_type instead"); err != nil {
+ return err
+ }
+ } else if descriptorpb.Edition(r.Edition()) == descriptorpb.Edition_EDITION_2023 {
+ if fld.Kind() != protoreflect.StringKind && fld.Kind() != protoreflect.BytesKind {
+ span := r.findOptionSpan(fd, internal.FieldOptionsCTypeTag)
+ if err := handler.HandleErrorf(span, "ctype option can only be used on string and bytes fields"); err != nil {
+ return err
+ }
+ }
+ if fd.proto.Options.GetCtype() == descriptorpb.FieldOptions_CORD && fd.IsExtension() {
+ span := r.findOptionSpan(fd, internal.FieldOptionsCTypeTag)
+ if err := handler.HandleErrorf(span, "ctype option cannot be CORD for extension fields"); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ if (fd.proto.Options.GetLazy() || fd.proto.Options.GetUnverifiedLazy()) && fd.Kind() != protoreflect.MessageKind {
+ var span ast.SourceSpan
+ var optionName string
+ if fd.proto.Options.GetLazy() {
+ span = r.findOptionSpan(fd, internal.FieldOptionsLazyTag)
+ optionName = "lazy"
+ } else {
+ span = r.findOptionSpan(fd, internal.FieldOptionsUnverifiedLazyTag)
+ optionName = "unverified_lazy"
+ }
+ var suffix string
+ if fd.Kind() == protoreflect.GroupKind {
+ if isEditions(r) {
+ suffix = " that use length-prefixed encoding"
+ } else {
+ suffix = ", not groups"
+ }
+ }
+ if err := handler.HandleErrorf(span, "%s option can only be used with message fields%s", optionName, suffix); err != nil {
+ return err
+ }
+ }
+ if fd.proto.Options.GetJstype() != descriptorpb.FieldOptions_JS_NORMAL {
+ switch fd.Kind() {
+ case protoreflect.Int64Kind, protoreflect.Uint64Kind, protoreflect.Sint64Kind,
+ protoreflect.Fixed64Kind, protoreflect.Sfixed64Kind:
+ // allowed only for 64-bit integer types
+ default:
+ span := r.findOptionSpan(fd, internal.FieldOptionsJSTypeTag)
+ err := handler.HandleErrorf(span, "only 64-bit integer fields (int64, uint64, sint64, fixed64, and sfixed64) can specify a jstype other than JS_NORMAL")
+ if err != nil {
+ return err
+ }
+ }
+ }
+ if isEditions(r) {
+ if err := r.validateFieldFeatures(fd, handler); err != nil {
+ return err
+ }
+ }
+
+ if fld.IsExtension() {
+ // More checks if this is an extension field.
+ if err := r.validateExtension(fd, handler); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (r *result) validateExtension(fd *fldDescriptor, handler *reporter.Handler) error {
+ // NB: It's a little gross that we don't enforce these in validateBasic().
+ // But it requires linking to resolve the extendee, so we can interrogate
+ // its descriptor.
+ msg := fd.ContainingMessage()
+ if msg.Options().(*descriptorpb.MessageOptions).GetMessageSetWireFormat() {
+ // Message set wire format requires that all extensions be messages
+ // themselves (no scalar extensions)
+ if fd.Kind() != protoreflect.MessageKind {
+ file := r.FileNode()
+ info := file.NodeInfo(r.FieldNode(fd.proto).FieldType())
+ err := handler.HandleErrorf(info, "messages with message-set wire format cannot contain scalar extensions, only messages")
+ if err != nil {
+ return err
+ }
+ }
+ if fd.Cardinality() == protoreflect.Repeated {
+ file := r.FileNode()
+ info := file.NodeInfo(r.FieldNode(fd.proto).FieldLabel())
+ err := handler.HandleErrorf(info, "messages with message-set wire format cannot contain repeated extensions, only optional")
+ if err != nil {
+ return err
+ }
+ }
+ } else if fd.Number() > internal.MaxNormalTag {
+ // In validateBasic() we just made sure these were within bounds for any message. But
+ // now that things are linked, we can check if the extendee is messageset wire format
+ // and, if not, enforce tighter limit.
+ file := r.FileNode()
+ info := file.NodeInfo(r.FieldNode(fd.proto).FieldTag())
+ err := handler.HandleErrorf(info, "tag number %d is higher than max allowed tag number (%d)", fd.Number(), internal.MaxNormalTag)
+ if err != nil {
+ return err
+ }
+ }
+
+ fileOpts := r.FileDescriptorProto().GetOptions()
+ if fileOpts.GetOptimizeFor() == descriptorpb.FileOptions_LITE_RUNTIME {
+ extendeeFileOpts, _ := msg.ParentFile().Options().(*descriptorpb.FileOptions)
+ if extendeeFileOpts.GetOptimizeFor() != descriptorpb.FileOptions_LITE_RUNTIME {
+ file := r.FileNode()
+ info := file.NodeInfo(r.FieldNode(fd.proto))
+ err := handler.HandleErrorf(info, "extensions in a file that uses optimize_for=LITE_RUNTIME may not extend messages in file %q which does not", msg.ParentFile().Path())
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ // If the extendee uses extension declarations, make sure this extension matches.
+ md := protoutil.ProtoFromMessageDescriptor(msg)
+ for i, extRange := range md.ExtensionRange {
+ if int32(fd.Number()) < extRange.GetStart() || int32(fd.Number()) >= extRange.GetEnd() {
+ continue
+ }
+ extRangeOpts := extRange.GetOptions()
+ if extRangeOpts == nil {
+ break
+ }
+ if len(extRangeOpts.Declaration) == 0 && extRangeOpts.GetVerification() != descriptorpb.ExtensionRangeOptions_DECLARATION {
+ break
+ }
+ var found bool
+ for j, extDecl := range extRangeOpts.Declaration {
+ if extDecl.GetNumber() != int32(fd.Number()) {
+ continue
+ }
+ found = true
+ if extDecl.GetReserved() {
+ file := r.FileNode()
+ info := file.NodeInfo(r.FieldNode(fd.proto).FieldTag())
+ span, _ := findExtensionRangeOptionSpan(msg.ParentFile(), msg, i, extRange,
+ internal.ExtensionRangeOptionsDeclarationTag, int32(j), internal.ExtensionRangeOptionsDeclarationReservedTag)
+ err := handler.HandleErrorf(info, "cannot use field number %d for an extension because it is reserved in declaration at %v",
+ fd.Number(), span.Start())
+ if err != nil {
+ return err
+ }
+ break
+ }
+ if extDecl.GetFullName() != "."+string(fd.FullName()) {
+ file := r.FileNode()
+ info := file.NodeInfo(r.FieldNode(fd.proto).FieldName())
+ span, _ := findExtensionRangeOptionSpan(msg.ParentFile(), msg, i, extRange,
+ internal.ExtensionRangeOptionsDeclarationTag, int32(j), internal.ExtensionRangeOptionsDeclarationFullNameTag)
+ err := handler.HandleErrorf(info, "expected extension with number %d to be named %s, not %s, per declaration at %v",
+ fd.Number(), strings.TrimPrefix(extDecl.GetFullName(), "."), fd.FullName(), span.Start())
+ if err != nil {
+ return err
+ }
+ }
+ if extDecl.GetType() != getTypeName(fd) {
+ file := r.FileNode()
+ info := file.NodeInfo(r.FieldNode(fd.proto).FieldType())
+ span, _ := findExtensionRangeOptionSpan(msg.ParentFile(), msg, i, extRange,
+ internal.ExtensionRangeOptionsDeclarationTag, int32(j), internal.ExtensionRangeOptionsDeclarationTypeTag)
+ err := handler.HandleErrorf(info, "expected extension with number %d to have type %s, not %s, per declaration at %v",
+ fd.Number(), strings.TrimPrefix(extDecl.GetType(), "."), getTypeName(fd), span.Start())
+ if err != nil {
+ return err
+ }
+ }
+ if extDecl.GetRepeated() != (fd.Cardinality() == protoreflect.Repeated) {
+ expected, actual := "repeated", "optional"
+ if !extDecl.GetRepeated() {
+ expected, actual = actual, expected
+ }
+ file := r.FileNode()
+ info := file.NodeInfo(r.FieldNode(fd.proto).FieldLabel())
+ span, _ := findExtensionRangeOptionSpan(msg.ParentFile(), msg, i, extRange,
+ internal.ExtensionRangeOptionsDeclarationTag, int32(j), internal.ExtensionRangeOptionsDeclarationRepeatedTag)
+ err := handler.HandleErrorf(info, "expected extension with number %d to be %s, not %s, per declaration at %v",
+ fd.Number(), expected, actual, span.Start())
+ if err != nil {
+ return err
+ }
+ }
+ break
+ }
+ if !found {
+ file := r.FileNode()
+ info := file.NodeInfo(r.FieldNode(fd.proto).FieldTag())
+ span, _ := findExtensionRangeOptionSpan(fd.ParentFile(), msg, i, extRange,
+ internal.ExtensionRangeOptionsVerificationTag)
+ err := handler.HandleErrorf(info, "expected extension with number %d to be declared in type %s, but no declaration found at %v",
+ fd.Number(), fd.ContainingMessage().FullName(), span.Start())
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+func (r *result) validatePacked(fd *fldDescriptor, handler *reporter.Handler) error {
+ if fd.proto.Options != nil && fd.proto.Options.Packed != nil && isEditions(r) {
+ span := r.findOptionSpan(fd, internal.FieldOptionsPackedTag)
+ err := handler.HandleErrorf(span, "packed option cannot be used with editions; use features.repeated_field_encoding=PACKED instead")
+ if err != nil {
+ return err
+ }
+ }
+ if !fd.proto.GetOptions().GetPacked() {
+ // if packed isn't true, nothing to validate
+ return nil
+ }
+ if fd.proto.GetLabel() != descriptorpb.FieldDescriptorProto_LABEL_REPEATED {
+ file := r.FileNode()
+ info := file.NodeInfo(r.FieldNode(fd.proto).FieldLabel())
+ err := handler.HandleErrorf(info, "packed option is only allowed on repeated fields")
+ if err != nil {
+ return err
+ }
+ }
+ switch fd.proto.GetType() {
+ case descriptorpb.FieldDescriptorProto_TYPE_STRING, descriptorpb.FieldDescriptorProto_TYPE_BYTES,
+ descriptorpb.FieldDescriptorProto_TYPE_MESSAGE, descriptorpb.FieldDescriptorProto_TYPE_GROUP:
+ file := r.FileNode()
+ info := file.NodeInfo(r.FieldNode(fd.proto).FieldType())
+ err := handler.HandleErrorf(info, "packed option is only allowed on numeric, boolean, and enum fields")
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (r *result) validateFieldFeatures(fld *fldDescriptor, handler *reporter.Handler) error {
+ if msg, ok := fld.Parent().(*msgDescriptor); ok && msg.proto.GetOptions().GetMapEntry() {
+ // Skip validating features on fields of synthetic map entry messages.
+ // We blindly propagate them from the map field's features, but some may
+ // really only apply to the map field and not to a key or value entry field.
+ return nil
+ }
+ features := fld.proto.GetOptions().GetFeatures()
+ if features == nil {
+ // No features to validate.
+ return nil
+ }
+ if features.FieldPresence != nil {
+ switch {
+ case fld.proto.OneofIndex != nil:
+ span := r.findOptionSpan(fld, internal.FieldOptionsFeaturesTag, internal.FeatureSetFieldPresenceTag)
+ if err := handler.HandleErrorf(span, "oneof fields may not specify field presence"); err != nil {
+ return err
+ }
+ case fld.Cardinality() == protoreflect.Repeated:
+ span := r.findOptionSpan(fld, internal.FieldOptionsFeaturesTag, internal.FeatureSetFieldPresenceTag)
+ if err := handler.HandleErrorf(span, "repeated fields may not specify field presence"); err != nil {
+ return err
+ }
+ case fld.IsExtension():
+ span := r.findOptionSpan(fld, internal.FieldOptionsFeaturesTag, internal.FeatureSetFieldPresenceTag)
+ if err := handler.HandleErrorf(span, "extension fields may not specify field presence"); err != nil {
+ return err
+ }
+ case fld.Message() != nil && features.GetFieldPresence() == descriptorpb.FeatureSet_IMPLICIT:
+ span := r.findOptionSpan(fld, internal.FieldOptionsFeaturesTag, internal.FeatureSetFieldPresenceTag)
+ if err := handler.HandleErrorf(span, "message fields may not specify implicit presence"); err != nil {
+ return err
+ }
+ }
+ }
+ if features.RepeatedFieldEncoding != nil {
+ if fld.Cardinality() != protoreflect.Repeated {
+ span := r.findOptionSpan(fld, internal.FieldOptionsFeaturesTag, internal.FeatureSetRepeatedFieldEncodingTag)
+ if err := handler.HandleErrorf(span, "only repeated fields may specify repeated field encoding"); err != nil {
+ return err
+ }
+ } else if !internal.CanPack(fld.Kind()) && features.GetRepeatedFieldEncoding() == descriptorpb.FeatureSet_PACKED {
+ span := r.findOptionSpan(fld, internal.FieldOptionsFeaturesTag, internal.FeatureSetRepeatedFieldEncodingTag)
+ if err := handler.HandleErrorf(span, "only repeated primitive fields may specify packed encoding"); err != nil {
+ return err
+ }
+ }
+ }
+ if features.Utf8Validation != nil {
+ isMap := fld.IsMap()
+ if (!isMap && fld.Kind() != protoreflect.StringKind) ||
+ (isMap &&
+ fld.MapKey().Kind() != protoreflect.StringKind &&
+ fld.MapValue().Kind() != protoreflect.StringKind) {
+ span := r.findOptionSpan(fld, internal.FieldOptionsFeaturesTag, internal.FeatureSetUTF8ValidationTag)
+ if err := handler.HandleErrorf(span, "only string fields may specify UTF8 validation"); err != nil {
+ return err
+ }
+ }
+ }
+ if features.MessageEncoding != nil {
+ if fld.Message() == nil || fld.IsMap() {
+ span := r.findOptionSpan(fld, internal.FieldOptionsFeaturesTag, internal.FeatureSetMessageEncodingTag)
+ if err := handler.HandleErrorf(span, "only message fields may specify message encoding"); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+func (r *result) validateMessage(d protoreflect.MessageDescriptor, handler *reporter.Handler, symbols *Symbols) error {
+ md, ok := d.(*msgDescriptor)
+ if !ok {
+ // should not be possible
+ return fmt.Errorf("message descriptor is wrong type: expecting %T, got %T", (*msgDescriptor)(nil), d)
+ }
+
+ if err := r.validateJSONNamesInMessage(md, handler); err != nil {
+ return err
+ }
+
+ return r.validateExtensionDeclarations(md, handler, symbols)
+}
+
+func (r *result) validateJSONNamesInMessage(md *msgDescriptor, handler *reporter.Handler) error {
+ if err := r.validateFieldJSONNames(md, false, handler); err != nil {
+ return err
+ }
+ if err := r.validateFieldJSONNames(md, true, handler); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (r *result) validateEnum(d protoreflect.EnumDescriptor, handler *reporter.Handler) error {
+ ed, ok := d.(*enumDescriptor)
+ if !ok {
+ // should not be possible
+ return fmt.Errorf("enum descriptor is wrong type: expecting %T, got %T", (*enumDescriptor)(nil), d)
+ }
+
+ firstValue := ed.Values().Get(0)
+ if !ed.IsClosed() && firstValue.Number() != 0 {
+ // TODO: This check doesn't really belong here. Whether the
+ // first value is zero s/b orthogonal to whether the
+ // allowed values are open or closed.
+ // https://github.com/protocolbuffers/protobuf/issues/16249
+ file := r.FileNode()
+ evd, ok := firstValue.(*enValDescriptor)
+ if !ok {
+ // should not be possible
+ return fmt.Errorf("enum value descriptor is wrong type: expecting %T, got %T", (*enValDescriptor)(nil), firstValue)
+ }
+ info := file.NodeInfo(r.EnumValueNode(evd.proto).GetNumber())
+ if err := handler.HandleErrorf(info, "first value of open enum %s must have numeric value zero", ed.FullName()); err != nil {
+ return err
+ }
+ }
+
+ if err := r.validateJSONNamesInEnum(ed, handler); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (r *result) validateJSONNamesInEnum(ed *enumDescriptor, handler *reporter.Handler) error {
+ seen := map[string]*descriptorpb.EnumValueDescriptorProto{}
+ for _, evd := range ed.proto.GetValue() {
+ scope := "enum value " + ed.proto.GetName() + "." + evd.GetName()
+
+ name := canonicalEnumValueName(evd.GetName(), ed.proto.GetName())
+ if existing, ok := seen[name]; ok && evd.GetNumber() != existing.GetNumber() {
+ fldNode := r.EnumValueNode(evd)
+ existingNode := r.EnumValueNode(existing)
+ conflictErr := fmt.Errorf("%s: camel-case name (with optional enum name prefix removed) %q conflicts with camel-case name of enum value %s, defined at %v",
+ scope, name, existing.GetName(), r.FileNode().NodeInfo(existingNode).Start())
+
+ // Since proto2 did not originally have a JSON format, we report conflicts as just warnings.
+ // With editions, not fully supporting JSON is allowed via feature: json_format == BEST_EFFORT
+ if !isJSONCompliant(ed) {
+ handler.HandleWarningWithPos(r.FileNode().NodeInfo(fldNode), conflictErr)
+ } else if err := handler.HandleErrorWithPos(r.FileNode().NodeInfo(fldNode), conflictErr); err != nil {
+ return err
+ }
+ } else {
+ seen[name] = evd
+ }
+ }
+ return nil
+}
+
+func (r *result) validateFieldJSONNames(md *msgDescriptor, useCustom bool, handler *reporter.Handler) error {
+ type jsonName struct {
+ source *descriptorpb.FieldDescriptorProto
+ // true if orig is a custom JSON name (vs. the field's default JSON name)
+ custom bool
+ }
+ seen := map[string]jsonName{}
+
+ for _, fd := range md.proto.GetField() {
+ scope := "field " + md.proto.GetName() + "." + fd.GetName()
+ defaultName := internal.JSONName(fd.GetName())
+ name := defaultName
+ custom := false
+ if useCustom {
+ n := fd.GetJsonName()
+ if n != defaultName || r.hasCustomJSONName(fd) {
+ name = n
+ custom = true
+ }
+ }
+ if existing, ok := seen[name]; ok {
+ // When useCustom is true, we'll only report an issue when a conflict is
+ // due to a custom name. That way, we don't double report conflicts on
+ // non-custom names.
+ if !useCustom || custom || existing.custom {
+ fldNode := r.FieldNode(fd)
+ customStr, srcCustomStr := "custom", "custom"
+ if !custom {
+ customStr = "default"
+ }
+ if !existing.custom {
+ srcCustomStr = "default"
+ }
+ info := r.FileNode().NodeInfo(fldNode)
+ conflictErr := reporter.Errorf(info, "%s: %s JSON name %q conflicts with %s JSON name of field %s, defined at %v",
+ scope, customStr, name, srcCustomStr, existing.source.GetName(), r.FileNode().NodeInfo(r.FieldNode(existing.source)).Start())
+
+ // Since proto2 did not originally have default JSON names, we report conflicts
+ // between default names (neither is a custom name) as just warnings.
+ // With editions, not fully supporting JSON is allowed via feature: json_format == BEST_EFFORT
+ if !isJSONCompliant(md) && !custom && !existing.custom {
+ handler.HandleWarning(conflictErr)
+ } else if err := handler.HandleError(conflictErr); err != nil {
+ return err
+ }
+ }
+ } else {
+ seen[name] = jsonName{source: fd, custom: custom}
+ }
+ }
+ return nil
+}
+
+func (r *result) validateExtensionDeclarations(md *msgDescriptor, handler *reporter.Handler, symbols *Symbols) error {
+ for i, extRange := range md.proto.ExtensionRange {
+ opts := extRange.GetOptions()
+ if len(opts.GetDeclaration()) == 0 {
+ // nothing to check
+ continue
+ }
+ // If any declarations are present, verification is assumed to be
+ // DECLARATION. It's an error for declarations to be present but the
+ // verification field explicitly set to something other than that.
+ if opts.Verification != nil && opts.GetVerification() != descriptorpb.ExtensionRangeOptions_DECLARATION {
+ span, ok := findExtensionRangeOptionSpan(r, md, i, extRange, internal.ExtensionRangeOptionsVerificationTag)
+ if !ok {
+ span, _ = findExtensionRangeOptionSpan(r, md, i, extRange, internal.ExtensionRangeOptionsDeclarationTag, 0)
+ }
+ if err := handler.HandleErrorf(span, "extension range cannot have declarations and have verification of %s", opts.GetVerification()); err != nil {
+ return err
+ }
+ }
+ declsByTag := map[int32]ast.SourcePos{}
+ for i, extDecl := range extRange.GetOptions().GetDeclaration() {
+ if extDecl.Number == nil {
+ span, _ := findExtensionRangeOptionSpan(r, md, i, extRange, internal.ExtensionRangeOptionsDeclarationTag, int32(i))
+ if err := handler.HandleErrorf(span, "extension declaration is missing required field number"); err != nil {
+ return err
+ }
+ } else {
+ extensionNumberSpan, _ := findExtensionRangeOptionSpan(r, md, i, extRange,
+ internal.ExtensionRangeOptionsDeclarationTag, int32(i), internal.ExtensionRangeOptionsDeclarationNumberTag)
+ if extDecl.GetNumber() < extRange.GetStart() || extDecl.GetNumber() >= extRange.GetEnd() {
+ // Number is out of range.
+ // See if one of the other ranges on the same extends statement includes the number,
+ // so we can provide a helpful message.
+ var suffix string
+ if extRange, ok := r.ExtensionsNode(extRange).(*ast.ExtensionRangeNode); ok {
+ for _, rng := range extRange.Ranges {
+ start, _ := rng.StartVal.AsInt64()
+ var end int64
+ switch {
+ case rng.Max != nil:
+ end = math.MaxInt64
+ case rng.EndVal != nil:
+ end, _ = rng.EndVal.AsInt64()
+ default:
+ end = start
+ }
+ if int64(extDecl.GetNumber()) >= start && int64(extDecl.GetNumber()) <= end {
+ // Found another range that matches
+ suffix = "; when using declarations, extends statements should indicate only a single span of field numbers"
+ break
+ }
+ }
+ }
+ err := handler.HandleErrorf(extensionNumberSpan, "extension declaration has number outside the range: %d not in [%d,%d]%s",
+ extDecl.GetNumber(), extRange.GetStart(), extRange.GetEnd()-1, suffix)
+ if err != nil {
+ return err
+ }
+ } else {
+ // Valid number; make sure it's not a duplicate
+ if existing, ok := declsByTag[extDecl.GetNumber()]; ok {
+ err := handler.HandleErrorf(extensionNumberSpan, "extension for tag number %d already declared at %v",
+ extDecl.GetNumber(), existing)
+ if err != nil {
+ return err
+ }
+ } else {
+ declsByTag[extDecl.GetNumber()] = extensionNumberSpan.Start()
+ }
+ }
+ }
+
+ if extDecl.FullName == nil && !extDecl.GetReserved() {
+ span, _ := findExtensionRangeOptionSpan(r, md, i, extRange, internal.ExtensionRangeOptionsDeclarationTag, int32(i))
+ if err := handler.HandleErrorf(span, "extension declaration that is not marked reserved must have a full_name"); err != nil {
+ return err
+ }
+ } else if extDecl.FullName != nil {
+ var extensionFullName protoreflect.FullName
+ extensionNameSpan, _ := findExtensionRangeOptionSpan(r, md, i, extRange,
+ internal.ExtensionRangeOptionsDeclarationTag, int32(i), internal.ExtensionRangeOptionsDeclarationFullNameTag)
+ if !strings.HasPrefix(extDecl.GetFullName(), ".") {
+ if err := handler.HandleErrorf(extensionNameSpan, "extension declaration full name %q should start with a leading dot (.)", extDecl.GetFullName()); err != nil {
+ return err
+ }
+ extensionFullName = protoreflect.FullName(extDecl.GetFullName())
+ } else {
+ extensionFullName = protoreflect.FullName(extDecl.GetFullName()[1:])
+ }
+ if !extensionFullName.IsValid() {
+ if err := handler.HandleErrorf(extensionNameSpan, "extension declaration full name %q is not a valid qualified name", extDecl.GetFullName()); err != nil {
+ return err
+ }
+ }
+ if err := symbols.AddExtensionDeclaration(extensionFullName, md.FullName(), protoreflect.FieldNumber(extDecl.GetNumber()), extensionNameSpan, handler); err != nil {
+ return err
+ }
+ }
+
+ if extDecl.Type == nil && !extDecl.GetReserved() {
+ span, _ := findExtensionRangeOptionSpan(r, md, i, extRange, internal.ExtensionRangeOptionsDeclarationTag, int32(i))
+ if err := handler.HandleErrorf(span, "extension declaration that is not marked reserved must have a type"); err != nil {
+ return err
+ }
+ } else if extDecl.Type != nil {
+ if strings.HasPrefix(extDecl.GetType(), ".") {
+ if !protoreflect.FullName(extDecl.GetType()[1:]).IsValid() {
+ span, _ := findExtensionRangeOptionSpan(r, md, i, extRange,
+ internal.ExtensionRangeOptionsDeclarationTag, int32(i), internal.ExtensionRangeOptionsDeclarationTypeTag)
+ if err := handler.HandleErrorf(span, "extension declaration type %q is not a valid qualified name", extDecl.GetType()); err != nil {
+ return err
+ }
+ }
+ } else if !isBuiltinTypeName(extDecl.GetType()) {
+ span, _ := findExtensionRangeOptionSpan(r, md, i, extRange,
+ internal.ExtensionRangeOptionsDeclarationTag, int32(i), internal.ExtensionRangeOptionsDeclarationTypeTag)
+ if err := handler.HandleErrorf(span, "extension declaration type %q must be a builtin type or start with a leading dot (.)", extDecl.GetType()); err != nil {
+ return err
+ }
+ }
+ }
+
+ if extDecl.GetReserved() && (extDecl.FullName == nil) != (extDecl.Type == nil) {
+ var fieldTag int32
+ if extDecl.FullName != nil {
+ fieldTag = internal.ExtensionRangeOptionsDeclarationFullNameTag
+ } else {
+ fieldTag = internal.ExtensionRangeOptionsDeclarationTypeTag
+ }
+ span, _ := findExtensionRangeOptionSpan(r, md, i, extRange,
+ internal.ExtensionRangeOptionsDeclarationTag, int32(i), fieldTag)
+ if err := handler.HandleErrorf(span, "extension declarations that are reserved should specify both full_name and type or neither"); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func (r *result) hasCustomJSONName(fdProto *descriptorpb.FieldDescriptorProto) bool {
+ // if we have the AST, we can more precisely determine if there was a custom
+ // JSON named defined, even if it is explicitly configured to tbe the same
+ // as the default JSON name for the field.
+ opts := r.FieldNode(fdProto).GetOptions()
+ if opts == nil {
+ return false
+ }
+ for _, opt := range opts.Options {
+ if len(opt.Name.Parts) == 1 &&
+ opt.Name.Parts[0].Name.AsIdentifier() == "json_name" &&
+ !opt.Name.Parts[0].IsExtension() {
+ return true
+ }
+ }
+ return false
+}
+
+func canonicalEnumValueName(enumValueName, enumName string) string {
+ return enumValCamelCase(removePrefix(enumValueName, enumName))
+}
+
+// removePrefix is used to remove the given prefix from the given str. It does not require
+// an exact match and ignores case and underscores. If the all non-underscore characters
+// would be removed from str, str is returned unchanged. If str does not have the given
+// prefix (even with the very lenient matching, in regard to case and underscores), then
+// str is returned unchanged.
+//
+// The algorithm is adapted from the protoc source:
+//
+// https://github.com/protocolbuffers/protobuf/blob/v21.3/src/google/protobuf/descriptor.cc#L922
+func removePrefix(str, prefix string) string {
+ j := 0
+ for i, r := range str {
+ if r == '_' {
+ // skip underscores in the input
+ continue
+ }
+
+ p, sz := utf8.DecodeRuneInString(prefix[j:])
+ for p == '_' {
+ j += sz // consume/skip underscore
+ p, sz = utf8.DecodeRuneInString(prefix[j:])
+ }
+
+ if j == len(prefix) {
+ // matched entire prefix; return rest of str
+ // but skipping any leading underscores
+ result := strings.TrimLeft(str[i:], "_")
+ if len(result) == 0 {
+ // result can't be empty string
+ return str
+ }
+ return result
+ }
+ if unicode.ToLower(r) != unicode.ToLower(p) {
+ // does not match prefix
+ return str
+ }
+ j += sz // consume matched rune of prefix
+ }
+ return str
+}
+
+// enumValCamelCase converts the given string to upper-camel-case.
+//
+// The algorithm is adapted from the protoc source:
+//
+// https://github.com/protocolbuffers/protobuf/blob/v21.3/src/google/protobuf/descriptor.cc#L887
+func enumValCamelCase(name string) string {
+ var js []rune
+ nextUpper := true
+ for _, r := range name {
+ if r == '_' {
+ nextUpper = true
+ continue
+ }
+ if nextUpper {
+ nextUpper = false
+ js = append(js, unicode.ToUpper(r))
+ } else {
+ js = append(js, unicode.ToLower(r))
+ }
+ }
+ return string(js)
+}
+
+func isBuiltinTypeName(typeName string) bool {
+ switch typeName {
+ case "int32", "int64", "uint32", "uint64", "sint32", "sint64",
+ "fixed32", "fixed64", "sfixed32", "sfixed64",
+ "bool", "double", "float", "string", "bytes":
+ return true
+ default:
+ return false
+ }
+}
+
+func getTypeName(fd protoreflect.FieldDescriptor) string {
+ switch fd.Kind() {
+ case protoreflect.MessageKind, protoreflect.GroupKind:
+ return "." + string(fd.Message().FullName())
+ case protoreflect.EnumKind:
+ return "." + string(fd.Enum().FullName())
+ default:
+ return fd.Kind().String()
+ }
+}
+
+func findExtensionRangeOptionSpan(
+ file protoreflect.FileDescriptor,
+ extended protoreflect.MessageDescriptor,
+ extRangeIndex int,
+ extRange *descriptorpb.DescriptorProto_ExtensionRange,
+ path ...int32,
+) (ast.SourceSpan, bool) {
+ // NB: Typically, we have an AST for a file and NOT source code info, because the
+ // compiler validates options before computing source code info. However, we might
+ // be validating an extension (whose source/AST we have), but whose extendee (and
+ // thus extension range options for declarations) could be in some other file, which
+ // could be provided to the compiler as an already-compiled descriptor. So this
+ // function can fallback to using source code info if an AST is not available.
+
+ if r, ok := file.(Result); ok && r.AST() != nil {
+ // Find the location using the AST, which will generally be higher fidelity
+ // than what we might find in a file descriptor's source code info.
+ exts := r.ExtensionsNode(extRange)
+ return findOptionSpan(r.FileNode(), exts, extRange.Options.ProtoReflect().Descriptor(), path...)
+ }
+
+ srcLocs := file.SourceLocations()
+ if srcLocs.Len() == 0 {
+ // no source code info, can't do any better than the filename. We
+ // return true as the boolean so the caller doesn't try again with
+ // an alternate path, since we won't be able to do any better.
+ return ast.UnknownSpan(file.Path()), true
+ }
+ msgPath, ok := internal.ComputePath(extended)
+ if !ok {
+ // Same as above: return true since no subsequent query can do better.
+ return ast.UnknownSpan(file.Path()), true
+ }
+
+ //nolint:gocritic // intentionally assigning to different slice variables
+ extRangePath := append(msgPath, internal.MessageExtensionRangesTag, int32(extRangeIndex))
+ optsPath := append(extRangePath, internal.ExtensionRangeOptionsTag) //nolint:gocritic
+ fullPath := append(optsPath, path...) //nolint:gocritic
+ srcLoc := srcLocs.ByPath(fullPath)
+ if srcLoc.Path != nil {
+ // found it
+ return asSpan(file.Path(), srcLoc), true
+ }
+
+ // Slow path to find closest match :/
+ // We look for longest matching path that is at least len(extRangePath)
+ // long. If we find a path that is longer (meaning a path that points INSIDE
+ // the request element), accept the first such location.
+ var bestMatch protoreflect.SourceLocation
+ var bestMatchPathLen int
+ for i, length := 0, srcLocs.Len(); i < length; i++ {
+ srcLoc := srcLocs.Get(i)
+ if len(srcLoc.Path) >= len(extRangePath) &&
+ isDescendantPath(fullPath, srcLoc.Path) &&
+ len(srcLoc.Path) > bestMatchPathLen {
+ bestMatch = srcLoc
+ bestMatchPathLen = len(srcLoc.Path)
+ } else if isDescendantPath(srcLoc.Path, path) {
+ return asSpan(file.Path(), srcLoc), false
+ }
+ }
+ if bestMatchPathLen > 0 {
+ return asSpan(file.Path(), bestMatch), false
+ }
+ return ast.UnknownSpan(file.Path()), false
+}
+
+func (r *result) findScalarOptionSpan(
+ root ast.NodeWithOptions,
+ name string,
+) ast.SourceSpan {
+ match := ast.Node(root)
+ root.RangeOptions(func(n *ast.OptionNode) bool {
+ if len(n.Name.Parts) == 1 && !n.Name.Parts[0].IsExtension() &&
+ string(n.Name.Parts[0].Name.AsIdentifier()) == name {
+ match = n
+ return false
+ }
+ return true
+ })
+ return r.FileNode().NodeInfo(match)
+}
+
+func (r *result) findOptionSpan(
+ d protoutil.DescriptorProtoWrapper,
+ path ...int32,
+) ast.SourceSpan {
+ node := r.Node(d.AsProto())
+ nodeWithOpts, ok := node.(ast.NodeWithOptions)
+ if !ok {
+ return r.FileNode().NodeInfo(node)
+ }
+ span, _ := findOptionSpan(r.FileNode(), nodeWithOpts, d.Options().ProtoReflect().Descriptor(), path...)
+ return span
+}
+
+func findOptionSpan(
+ file ast.FileDeclNode,
+ root ast.NodeWithOptions,
+ md protoreflect.MessageDescriptor,
+ path ...int32,
+) (ast.SourceSpan, bool) {
+ bestMatch := ast.Node(root)
+ var bestMatchLen int
+ var repeatedIndices []int
+ root.RangeOptions(func(n *ast.OptionNode) bool {
+ desc := md
+ limit := len(n.Name.Parts)
+ if limit > len(path) {
+ limit = len(path)
+ }
+ var nextIsIndex bool
+ for i := 0; i < limit; i++ {
+ if desc == nil || nextIsIndex {
+ // Can't match anymore. Try next option.
+ return true
+ }
+ wantField := desc.Fields().ByNumber(protoreflect.FieldNumber(path[i]))
+ if wantField == nil {
+ // Should not be possible... next option won't fare any better since
+ // it's a disagreement between given path and given descriptor so bail.
+ return false
+ }
+ if n.Name.Parts[i].Open != nil ||
+ string(n.Name.Parts[i].Name.AsIdentifier()) != string(wantField.Name()) {
+ // This is an extension/custom option or indicates the wrong name.
+ // Try the next one.
+ return true
+ }
+ desc = wantField.Message()
+ nextIsIndex = wantField.Cardinality() == protoreflect.Repeated
+ }
+ // If we made it this far, we've matched everything so far.
+ if len(n.Name.Parts) >= len(path) {
+ // Either an exact match (if equal) or this option points *inside* the
+ // item we care about (if greater). Either way, the first such result
+ // is a keeper.
+ bestMatch = n.Name.Parts[len(path)-1]
+ bestMatchLen = len(n.Name.Parts)
+ return false
+ }
+ // We've got more path elements to try to match with the value.
+ match, matchLen := findMatchingValueNode(
+ desc,
+ path[len(n.Name.Parts):],
+ nextIsIndex,
+ 0,
+ &repeatedIndices,
+ n,
+ n.Val)
+ if match != nil {
+ totalMatchLen := matchLen + len(n.Name.Parts)
+ if totalMatchLen > bestMatchLen {
+ bestMatch, bestMatchLen = match, totalMatchLen
+ }
+ }
+ return bestMatchLen != len(path) // no exact match, so keep looking
+ })
+ return file.NodeInfo(bestMatch), bestMatchLen == len(path)
+}
+
+func findMatchingValueNode(
+ md protoreflect.MessageDescriptor,
+ path protoreflect.SourcePath,
+ currIsRepeated bool,
+ repeatedCount int,
+ repeatedIndices *[]int,
+ node ast.Node,
+ val ast.ValueNode,
+) (ast.Node, int) {
+ var matchLen int
+ var index int
+ if currIsRepeated {
+ // Compute the index of the current value (or, if an array literal, the
+ // index of the first value in the array).
+ if len(*repeatedIndices) > repeatedCount {
+ (*repeatedIndices)[repeatedCount]++
+ index = (*repeatedIndices)[repeatedCount]
+ } else {
+ *repeatedIndices = append(*repeatedIndices, 0)
+ index = 0
+ }
+ repeatedCount++
+ }
+
+ if arrayVal, ok := val.(*ast.ArrayLiteralNode); ok {
+ if !currIsRepeated {
+ // This should not happen.
+ return nil, 0
+ }
+ offset := int(path[0]) - index
+ if offset >= len(arrayVal.Elements) {
+ // The index we are looking for is not in this array.
+ return nil, 0
+ }
+ elem := arrayVal.Elements[offset]
+ // We've matched the index!
+ matchLen++
+ path = path[1:]
+ // Recurse into array element.
+ nextMatch, nextMatchLen := findMatchingValueNode(
+ md,
+ path,
+ false,
+ repeatedCount,
+ repeatedIndices,
+ elem,
+ elem,
+ )
+ return nextMatch, nextMatchLen + matchLen
+ }
+
+ if currIsRepeated {
+ if index != int(path[0]) {
+ // Not a match!
+ return nil, 0
+ }
+ // We've matched the index!
+ matchLen++
+ path = path[1:]
+ if len(path) == 0 {
+ // We're done matching!
+ return node, matchLen
+ }
+ }
+
+ msgValue, ok := val.(*ast.MessageLiteralNode)
+ if !ok {
+ // We can't go any further
+ return node, matchLen
+ }
+
+ var wantField protoreflect.FieldDescriptor
+ if md != nil {
+ wantField = md.Fields().ByNumber(protoreflect.FieldNumber(path[0]))
+ }
+ if wantField == nil {
+ // Should not be possible... next option won't fare any better since
+ // it's a disagreement between given path and given descriptor so bail.
+ return nil, 0
+ }
+ for _, field := range msgValue.Elements {
+ if field.Name.Open != nil ||
+ string(field.Name.Name.AsIdentifier()) != string(wantField.Name()) {
+ // This is an extension/custom option or indicates the wrong name.
+ // Try the next one.
+ continue
+ }
+ // We've matched this field.
+ matchLen++
+ path = path[1:]
+ if len(path) == 0 {
+ // Perfect match!
+ return field, matchLen
+ }
+ nextMatch, nextMatchLen := findMatchingValueNode(
+ wantField.Message(),
+ path,
+ wantField.Cardinality() == protoreflect.Repeated,
+ repeatedCount,
+ repeatedIndices,
+ field,
+ field.Val,
+ )
+ return nextMatch, nextMatchLen + matchLen
+ }
+
+ // If we didn't find the right field, just return what we have so far.
+ return node, matchLen
+}
+
+func isDescendantPath(descendant, ancestor protoreflect.SourcePath) bool {
+ if len(descendant) < len(ancestor) {
+ return false
+ }
+ for i := range ancestor {
+ if descendant[i] != ancestor[i] {
+ return false
+ }
+ }
+ return true
+}
+
+func asSpan(file string, srcLoc protoreflect.SourceLocation) ast.SourceSpan {
+ return ast.NewSourceSpan(
+ ast.SourcePos{
+ Filename: file,
+ Line: srcLoc.StartLine + 1,
+ Col: srcLoc.StartColumn + 1,
+ },
+ ast.SourcePos{
+ Filename: file,
+ Line: srcLoc.EndLine + 1,
+ Col: srcLoc.EndColumn + 1,
+ },
+ )
+}
+
+func (r *result) getImportLocation(path string) ast.SourceSpan {
+ node, ok := r.FileNode().(*ast.FileNode)
+ if !ok {
+ return ast.UnknownSpan(path)
+ }
+ for _, decl := range node.Decls {
+ imp, ok := decl.(*ast.ImportNode)
+ if !ok {
+ continue
+ }
+ if imp.Name.AsString() == path {
+ return node.NodeInfo(imp.Name)
+ }
+ }
+ // Couldn't find it? Should never happen...
+ return ast.UnknownSpan(path)
+}
+
+func isEditions(r *result) bool {
+ return descriptorpb.Edition(r.Edition()) >= descriptorpb.Edition_EDITION_2023
+}