diff --git a/Cargo.lock b/Cargo.lock index ca04a83dce..957971fc87 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1499,6 +1499,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "email_address" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449" +dependencies = [ + "serde", +] + [[package]] name = "encode_unicode" version = "0.3.6" @@ -1560,6 +1569,7 @@ dependencies = [ "chrono", "colored", "duct", + "email_address", "env_logger 0.10.1", "flate2", "form_urlencoded", @@ -1598,6 +1608,7 @@ dependencies = [ "quickcheck", "radix_fmt", "rand 0.8.5", + "regex", "reqwest 0.12.4", "rsa", "serde", @@ -1670,7 +1681,7 @@ dependencies = [ "handlebars", "indexmap 2.2.6", "insta", - "itertools 0.12.1", + "itertools 0.13.0", "junction", "litparser", "litparser-derive", @@ -2042,8 +2053,8 @@ dependencies = [ "aho-corasick", "bstr 1.8.0", "log", - "regex-automata 0.4.3", - "regex-syntax 0.8.2", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", ] [[package]] @@ -2615,7 +2626,7 @@ dependencies = [ "globset", "log", "memchr", - "regex-automata 0.4.3", + "regex-automata 0.4.9", "same-file", "walkdir", "winapi-util", @@ -2713,9 +2724,9 @@ dependencies = [ [[package]] name = "itertools" -version = "0.12.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] @@ -4221,14 +4232,14 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.2" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.3", - "regex-syntax 0.8.2", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", ] [[package]] @@ -4242,13 +4253,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.3" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.2", + "regex-syntax 0.8.5", ] [[package]] @@ -4265,9 +4276,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.2" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "remove_dir_all" diff --git a/pkg/encorebuild/jsruntimebuild.go b/pkg/encorebuild/jsruntimebuild.go index 771b76a0d5..82db5fc545 100644 --- a/pkg/encorebuild/jsruntimebuild.go +++ b/pkg/encorebuild/jsruntimebuild.go @@ -108,7 +108,8 @@ func (b *JSRuntimeBuilder) makeDistFolder() { b.log.Info().Msg("creating dist folder") // Sanity-check the runtime dir configuration so we don't delete the wrong thing. base := filepath.Base(b.cfg.RepoDir) - if b.cfg.RepoDir == "" || (base != "encore" && base != "encr.dev") { + parentBase := filepath.Base(filepath.Dir(b.cfg.RepoDir)) + if b.cfg.RepoDir == "" || (base != "encore" && base != "encr.dev" && parentBase != "encore.worktrees") { Bailf("invalid repo directory %q, aborting", b.cfg.RepoDir) } diff --git a/proto/encore/parser/schema/v1/schema.pb.go b/proto/encore/parser/schema/v1/schema.pb.go index b0b0ad0f32..e586a35399 100644 --- a/proto/encore/parser/schema/v1/schema.pb.go +++ b/proto/encore/parser/schema/v1/schema.pb.go @@ -124,6 +124,55 @@ func (Builtin) EnumDescriptor() ([]byte, []int) { return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{0} } +type ValidationRule_Is int32 + +const ( + ValidationRule_UNKNOWN ValidationRule_Is = 0 + ValidationRule_EMAIL ValidationRule_Is = 1 + ValidationRule_URL ValidationRule_Is = 2 +) + +// Enum value maps for ValidationRule_Is. +var ( + ValidationRule_Is_name = map[int32]string{ + 0: "UNKNOWN", + 1: "EMAIL", + 2: "URL", + } + ValidationRule_Is_value = map[string]int32{ + "UNKNOWN": 0, + "EMAIL": 1, + "URL": 2, + } +) + +func (x ValidationRule_Is) Enum() *ValidationRule_Is { + p := new(ValidationRule_Is) + *p = x + return p +} + +func (x ValidationRule_Is) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ValidationRule_Is) Descriptor() protoreflect.EnumDescriptor { + return file_encore_parser_schema_v1_schema_proto_enumTypes[1].Descriptor() +} + +func (ValidationRule_Is) Type() protoreflect.EnumType { + return &file_encore_parser_schema_v1_schema_proto_enumTypes[1] +} + +func (x ValidationRule_Is) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ValidationRule_Is.Descriptor instead. +func (ValidationRule_Is) EnumDescriptor() ([]byte, []int) { + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{1, 0} +} + // Type represents the base of our schema on which everything else is built on-top of. It has to be one, and only one, // thing from our list of meta types. // @@ -146,7 +195,8 @@ type Type struct { // *Type_Literal // *Type_TypeParameter // *Type_Config - Typ isType_Typ `protobuf_oneof:"typ"` + Typ isType_Typ `protobuf_oneof:"typ"` + Validation *ValidationExpr `protobuf:"bytes,15,opt,name=validation,proto3,oneof" json:"validation,omitempty"` // The validation expression for this type } func (x *Type) Reset() { @@ -258,6 +308,13 @@ func (x *Type) GetConfig() *ConfigValue { return nil } +func (x *Type) GetValidation() *ValidationExpr { + if x != nil { + return x.Validation + } + return nil +} + type isType_Typ interface { isType_Typ() } @@ -325,6 +382,266 @@ func (*Type_TypeParameter) isType_Typ() {} func (*Type_Config) isType_Typ() {} +type ValidationRule struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Rule: + // + // *ValidationRule_MinLen + // *ValidationRule_MaxLen + // *ValidationRule_MinVal + // *ValidationRule_MaxVal + // *ValidationRule_StartsWith + // *ValidationRule_EndsWith + // *ValidationRule_MatchesRegexp + // *ValidationRule_Is_ + Rule isValidationRule_Rule `protobuf_oneof:"rule"` +} + +func (x *ValidationRule) Reset() { + *x = ValidationRule{} + if protoimpl.UnsafeEnabled { + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ValidationRule) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ValidationRule) ProtoMessage() {} + +func (x *ValidationRule) ProtoReflect() protoreflect.Message { + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ValidationRule.ProtoReflect.Descriptor instead. +func (*ValidationRule) Descriptor() ([]byte, []int) { + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{1} +} + +func (m *ValidationRule) GetRule() isValidationRule_Rule { + if m != nil { + return m.Rule + } + return nil +} + +func (x *ValidationRule) GetMinLen() uint64 { + if x, ok := x.GetRule().(*ValidationRule_MinLen); ok { + return x.MinLen + } + return 0 +} + +func (x *ValidationRule) GetMaxLen() uint64 { + if x, ok := x.GetRule().(*ValidationRule_MaxLen); ok { + return x.MaxLen + } + return 0 +} + +func (x *ValidationRule) GetMinVal() float64 { + if x, ok := x.GetRule().(*ValidationRule_MinVal); ok { + return x.MinVal + } + return 0 +} + +func (x *ValidationRule) GetMaxVal() float64 { + if x, ok := x.GetRule().(*ValidationRule_MaxVal); ok { + return x.MaxVal + } + return 0 +} + +func (x *ValidationRule) GetStartsWith() string { + if x, ok := x.GetRule().(*ValidationRule_StartsWith); ok { + return x.StartsWith + } + return "" +} + +func (x *ValidationRule) GetEndsWith() string { + if x, ok := x.GetRule().(*ValidationRule_EndsWith); ok { + return x.EndsWith + } + return "" +} + +func (x *ValidationRule) GetMatchesRegexp() string { + if x, ok := x.GetRule().(*ValidationRule_MatchesRegexp); ok { + return x.MatchesRegexp + } + return "" +} + +func (x *ValidationRule) GetIs() ValidationRule_Is { + if x, ok := x.GetRule().(*ValidationRule_Is_); ok { + return x.Is + } + return ValidationRule_UNKNOWN +} + +type isValidationRule_Rule interface { + isValidationRule_Rule() +} + +type ValidationRule_MinLen struct { + MinLen uint64 `protobuf:"varint,1,opt,name=min_len,json=minLen,proto3,oneof"` +} + +type ValidationRule_MaxLen struct { + MaxLen uint64 `protobuf:"varint,2,opt,name=max_len,json=maxLen,proto3,oneof"` +} + +type ValidationRule_MinVal struct { + MinVal float64 `protobuf:"fixed64,3,opt,name=min_val,json=minVal,proto3,oneof"` +} + +type ValidationRule_MaxVal struct { + MaxVal float64 `protobuf:"fixed64,4,opt,name=max_val,json=maxVal,proto3,oneof"` +} + +type ValidationRule_StartsWith struct { + StartsWith string `protobuf:"bytes,5,opt,name=starts_with,json=startsWith,proto3,oneof"` +} + +type ValidationRule_EndsWith struct { + EndsWith string `protobuf:"bytes,6,opt,name=ends_with,json=endsWith,proto3,oneof"` +} + +type ValidationRule_MatchesRegexp struct { + MatchesRegexp string `protobuf:"bytes,7,opt,name=matches_regexp,json=matchesRegexp,proto3,oneof"` +} + +type ValidationRule_Is_ struct { + Is ValidationRule_Is `protobuf:"varint,8,opt,name=is,proto3,enum=encore.parser.schema.v1.ValidationRule_Is,oneof"` +} + +func (*ValidationRule_MinLen) isValidationRule_Rule() {} + +func (*ValidationRule_MaxLen) isValidationRule_Rule() {} + +func (*ValidationRule_MinVal) isValidationRule_Rule() {} + +func (*ValidationRule_MaxVal) isValidationRule_Rule() {} + +func (*ValidationRule_StartsWith) isValidationRule_Rule() {} + +func (*ValidationRule_EndsWith) isValidationRule_Rule() {} + +func (*ValidationRule_MatchesRegexp) isValidationRule_Rule() {} + +func (*ValidationRule_Is_) isValidationRule_Rule() {} + +type ValidationExpr struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Expr: + // + // *ValidationExpr_Rule + // *ValidationExpr_And_ + // *ValidationExpr_Or_ + Expr isValidationExpr_Expr `protobuf_oneof:"expr"` +} + +func (x *ValidationExpr) Reset() { + *x = ValidationExpr{} + if protoimpl.UnsafeEnabled { + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ValidationExpr) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ValidationExpr) ProtoMessage() {} + +func (x *ValidationExpr) ProtoReflect() protoreflect.Message { + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ValidationExpr.ProtoReflect.Descriptor instead. +func (*ValidationExpr) Descriptor() ([]byte, []int) { + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{2} +} + +func (m *ValidationExpr) GetExpr() isValidationExpr_Expr { + if m != nil { + return m.Expr + } + return nil +} + +func (x *ValidationExpr) GetRule() *ValidationRule { + if x, ok := x.GetExpr().(*ValidationExpr_Rule); ok { + return x.Rule + } + return nil +} + +func (x *ValidationExpr) GetAnd() *ValidationExpr_And { + if x, ok := x.GetExpr().(*ValidationExpr_And_); ok { + return x.And + } + return nil +} + +func (x *ValidationExpr) GetOr() *ValidationExpr_Or { + if x, ok := x.GetExpr().(*ValidationExpr_Or_); ok { + return x.Or + } + return nil +} + +type isValidationExpr_Expr interface { + isValidationExpr_Expr() +} + +type ValidationExpr_Rule struct { + Rule *ValidationRule `protobuf:"bytes,1,opt,name=rule,proto3,oneof"` +} + +type ValidationExpr_And_ struct { + And *ValidationExpr_And `protobuf:"bytes,2,opt,name=and,proto3,oneof"` +} + +type ValidationExpr_Or_ struct { + Or *ValidationExpr_Or `protobuf:"bytes,3,opt,name=or,proto3,oneof"` +} + +func (*ValidationExpr_Rule) isValidationExpr_Expr() {} + +func (*ValidationExpr_And_) isValidationExpr_Expr() {} + +func (*ValidationExpr_Or_) isValidationExpr_Expr() {} + // TypeParameterRef is a reference to a `TypeParameter` within a declaration block type TypeParameterRef struct { state protoimpl.MessageState @@ -338,7 +655,7 @@ type TypeParameterRef struct { func (x *TypeParameterRef) Reset() { *x = TypeParameterRef{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[1] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -351,7 +668,7 @@ func (x *TypeParameterRef) String() string { func (*TypeParameterRef) ProtoMessage() {} func (x *TypeParameterRef) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[1] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -364,7 +681,7 @@ func (x *TypeParameterRef) ProtoReflect() protoreflect.Message { // Deprecated: Use TypeParameterRef.ProtoReflect.Descriptor instead. func (*TypeParameterRef) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{1} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{3} } func (x *TypeParameterRef) GetDeclId() uint32 { @@ -424,7 +741,7 @@ type Decl struct { func (x *Decl) Reset() { *x = Decl{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[2] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -437,7 +754,7 @@ func (x *Decl) String() string { func (*Decl) ProtoMessage() {} func (x *Decl) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[2] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -450,7 +767,7 @@ func (x *Decl) ProtoReflect() protoreflect.Message { // Deprecated: Use Decl.ProtoReflect.Descriptor instead. func (*Decl) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{2} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{4} } func (x *Decl) GetId() uint32 { @@ -508,7 +825,7 @@ type TypeParameter struct { func (x *TypeParameter) Reset() { *x = TypeParameter{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[3] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -521,7 +838,7 @@ func (x *TypeParameter) String() string { func (*TypeParameter) ProtoMessage() {} func (x *TypeParameter) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[3] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -534,7 +851,7 @@ func (x *TypeParameter) ProtoReflect() protoreflect.Message { // Deprecated: Use TypeParameter.ProtoReflect.Descriptor instead. func (*TypeParameter) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{3} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{5} } func (x *TypeParameter) GetName() string { @@ -564,7 +881,7 @@ type Loc struct { func (x *Loc) Reset() { *x = Loc{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[4] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -577,7 +894,7 @@ func (x *Loc) String() string { func (*Loc) ProtoMessage() {} func (x *Loc) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[4] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -590,7 +907,7 @@ func (x *Loc) ProtoReflect() protoreflect.Message { // Deprecated: Use Loc.ProtoReflect.Descriptor instead. func (*Loc) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{4} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{6} } func (x *Loc) GetPkgPath() string { @@ -669,7 +986,7 @@ type Named struct { func (x *Named) Reset() { *x = Named{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[5] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -682,7 +999,7 @@ func (x *Named) String() string { func (*Named) ProtoMessage() {} func (x *Named) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[5] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -695,7 +1012,7 @@ func (x *Named) ProtoReflect() protoreflect.Message { // Deprecated: Use Named.ProtoReflect.Descriptor instead. func (*Named) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{5} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{7} } func (x *Named) GetId() uint32 { @@ -724,7 +1041,7 @@ type Struct struct { func (x *Struct) Reset() { *x = Struct{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[6] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -737,7 +1054,7 @@ func (x *Struct) String() string { func (*Struct) ProtoMessage() {} func (x *Struct) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[6] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -750,7 +1067,7 @@ func (x *Struct) ProtoReflect() protoreflect.Message { // Deprecated: Use Struct.ProtoReflect.Descriptor instead. func (*Struct) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{6} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{8} } func (x *Struct) GetFields() []*Field { @@ -780,7 +1097,7 @@ type Field struct { func (x *Field) Reset() { *x = Field{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[7] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -793,7 +1110,7 @@ func (x *Field) String() string { func (*Field) ProtoMessage() {} func (x *Field) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[7] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -806,7 +1123,7 @@ func (x *Field) ProtoReflect() protoreflect.Message { // Deprecated: Use Field.ProtoReflect.Descriptor instead. func (*Field) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{7} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{9} } func (x *Field) GetTyp() *Type { @@ -888,7 +1205,7 @@ type WireSpec struct { func (x *WireSpec) Reset() { *x = WireSpec{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[8] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -901,7 +1218,7 @@ func (x *WireSpec) String() string { func (*WireSpec) ProtoMessage() {} func (x *WireSpec) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[8] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -914,7 +1231,7 @@ func (x *WireSpec) ProtoReflect() protoreflect.Message { // Deprecated: Use WireSpec.ProtoReflect.Descriptor instead. func (*WireSpec) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{8} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{10} } func (m *WireSpec) GetLocation() isWireSpec_Location { @@ -967,7 +1284,7 @@ type Tag struct { func (x *Tag) Reset() { *x = Tag{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[9] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -980,7 +1297,7 @@ func (x *Tag) String() string { func (*Tag) ProtoMessage() {} func (x *Tag) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[9] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -993,7 +1310,7 @@ func (x *Tag) ProtoReflect() protoreflect.Message { // Deprecated: Use Tag.ProtoReflect.Descriptor instead. func (*Tag) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{9} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{11} } func (x *Tag) GetKey() string { @@ -1030,7 +1347,7 @@ type Map struct { func (x *Map) Reset() { *x = Map{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[10] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1043,7 +1360,7 @@ func (x *Map) String() string { func (*Map) ProtoMessage() {} func (x *Map) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[10] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1056,7 +1373,7 @@ func (x *Map) ProtoReflect() protoreflect.Message { // Deprecated: Use Map.ProtoReflect.Descriptor instead. func (*Map) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{10} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{12} } func (x *Map) GetKey() *Type { @@ -1085,7 +1402,7 @@ type List struct { func (x *List) Reset() { *x = List{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[11] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1098,7 +1415,7 @@ func (x *List) String() string { func (*List) ProtoMessage() {} func (x *List) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[11] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1111,7 +1428,7 @@ func (x *List) ProtoReflect() protoreflect.Message { // Deprecated: Use List.ProtoReflect.Descriptor instead. func (*List) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{11} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{13} } func (x *List) GetElem() *Type { @@ -1133,7 +1450,7 @@ type Pointer struct { func (x *Pointer) Reset() { *x = Pointer{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[12] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1146,7 +1463,7 @@ func (x *Pointer) String() string { func (*Pointer) ProtoMessage() {} func (x *Pointer) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[12] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1159,7 +1476,7 @@ func (x *Pointer) ProtoReflect() protoreflect.Message { // Deprecated: Use Pointer.ProtoReflect.Descriptor instead. func (*Pointer) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{12} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{14} } func (x *Pointer) GetBase() *Type { @@ -1181,7 +1498,7 @@ type Union struct { func (x *Union) Reset() { *x = Union{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[13] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1194,7 +1511,7 @@ func (x *Union) String() string { func (*Union) ProtoMessage() {} func (x *Union) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[13] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1207,7 +1524,7 @@ func (x *Union) ProtoReflect() protoreflect.Message { // Deprecated: Use Union.ProtoReflect.Descriptor instead. func (*Union) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{13} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{15} } func (x *Union) GetTypes() []*Type { @@ -1236,7 +1553,7 @@ type Literal struct { func (x *Literal) Reset() { *x = Literal{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[14] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1249,7 +1566,7 @@ func (x *Literal) String() string { func (*Literal) ProtoMessage() {} func (x *Literal) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[14] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1262,7 +1579,7 @@ func (x *Literal) ProtoReflect() protoreflect.Message { // Deprecated: Use Literal.ProtoReflect.Descriptor instead. func (*Literal) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{14} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{16} } func (m *Literal) GetValue() isLiteral_Value { @@ -1354,7 +1671,7 @@ type ConfigValue struct { func (x *ConfigValue) Reset() { *x = ConfigValue{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[15] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1367,7 +1684,7 @@ func (x *ConfigValue) String() string { func (*ConfigValue) ProtoMessage() {} func (x *ConfigValue) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[15] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1380,7 +1697,7 @@ func (x *ConfigValue) ProtoReflect() protoreflect.Message { // Deprecated: Use ConfigValue.ProtoReflect.Descriptor instead. func (*ConfigValue) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{15} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{17} } func (x *ConfigValue) GetElem() *Type { @@ -1397,6 +1714,100 @@ func (x *ConfigValue) GetIsValuesList() bool { return false } +type ValidationExpr_And struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Exprs []*ValidationExpr `protobuf:"bytes,1,rep,name=exprs,proto3" json:"exprs,omitempty"` +} + +func (x *ValidationExpr_And) Reset() { + *x = ValidationExpr_And{} + if protoimpl.UnsafeEnabled { + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ValidationExpr_And) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ValidationExpr_And) ProtoMessage() {} + +func (x *ValidationExpr_And) ProtoReflect() protoreflect.Message { + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ValidationExpr_And.ProtoReflect.Descriptor instead. +func (*ValidationExpr_And) Descriptor() ([]byte, []int) { + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{2, 0} +} + +func (x *ValidationExpr_And) GetExprs() []*ValidationExpr { + if x != nil { + return x.Exprs + } + return nil +} + +type ValidationExpr_Or struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Exprs []*ValidationExpr `protobuf:"bytes,1,rep,name=exprs,proto3" json:"exprs,omitempty"` +} + +func (x *ValidationExpr_Or) Reset() { + *x = ValidationExpr_Or{} + if protoimpl.UnsafeEnabled { + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ValidationExpr_Or) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ValidationExpr_Or) ProtoMessage() {} + +func (x *ValidationExpr_Or) ProtoReflect() protoreflect.Message { + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ValidationExpr_Or.ProtoReflect.Descriptor instead. +func (*ValidationExpr_Or) Descriptor() ([]byte, []int) { + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{2, 1} +} + +func (x *ValidationExpr_Or) GetExprs() []*ValidationExpr { + if x != nil { + return x.Exprs + } + return nil +} + type WireSpec_Header struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -1410,7 +1821,7 @@ type WireSpec_Header struct { func (x *WireSpec_Header) Reset() { *x = WireSpec_Header{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[16] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1423,7 +1834,7 @@ func (x *WireSpec_Header) String() string { func (*WireSpec_Header) ProtoMessage() {} func (x *WireSpec_Header) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[16] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1436,7 +1847,7 @@ func (x *WireSpec_Header) ProtoReflect() protoreflect.Message { // Deprecated: Use WireSpec_Header.ProtoReflect.Descriptor instead. func (*WireSpec_Header) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{8, 0} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{10, 0} } func (x *WireSpec_Header) GetName() string { @@ -1459,7 +1870,7 @@ type WireSpec_Query struct { func (x *WireSpec_Query) Reset() { *x = WireSpec_Query{} if protoimpl.UnsafeEnabled { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[17] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1472,7 +1883,7 @@ func (x *WireSpec_Query) String() string { func (*WireSpec_Query) ProtoMessage() {} func (x *WireSpec_Query) ProtoReflect() protoreflect.Message { - mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[17] + mi := &file_encore_parser_schema_v1_schema_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1485,7 +1896,7 @@ func (x *WireSpec_Query) ProtoReflect() protoreflect.Message { // Deprecated: Use WireSpec_Query.ProtoReflect.Descriptor instead. func (*WireSpec_Query) Descriptor() ([]byte, []int) { - return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{8, 1} + return file_encore_parser_schema_v1_schema_proto_rawDescGZIP(), []int{10, 1} } func (x *WireSpec_Query) GetName() string { @@ -1502,7 +1913,7 @@ var file_encore_parser_schema_v1_schema_proto_rawDesc = []byte{ 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x17, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x22, - 0xed, 0x04, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x36, 0x0a, 0x05, 0x6e, 0x61, 0x6d, 0x65, + 0xca, 0x05, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x36, 0x0a, 0x05, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x48, 0x00, 0x52, 0x05, 0x6e, 0x61, 0x6d, 0x65, 0x64, @@ -1540,149 +1951,198 @@ var file_encore_parser_schema_v1_schema_proto_rawDesc = []byte{ 0x69, 0x67, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x48, 0x00, - 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x42, 0x05, 0x0a, 0x03, 0x74, 0x79, 0x70, 0x22, - 0x48, 0x0a, 0x10, 0x54, 0x79, 0x70, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x52, 0x65, 0x66, 0x12, 0x17, 0x0a, 0x07, 0x64, 0x65, 0x63, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x64, 0x65, 0x63, 0x6c, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x5f, 0x69, 0x64, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, - 0x08, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x49, 0x64, 0x78, 0x22, 0xe8, 0x01, 0x0a, 0x04, 0x44, 0x65, - 0x63, 0x6c, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, - 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x31, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, - 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, - 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x47, 0x0a, 0x0b, 0x74, 0x79, 0x70, - 0x65, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, - 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x74, 0x79, 0x70, 0x65, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x12, 0x10, 0x0a, 0x03, 0x64, 0x6f, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x64, 0x6f, 0x63, 0x12, 0x2e, 0x0a, 0x03, 0x6c, 0x6f, 0x63, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1c, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, - 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x63, 0x52, - 0x03, 0x6c, 0x6f, 0x63, 0x22, 0x23, 0x0a, 0x0d, 0x54, 0x79, 0x70, 0x65, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x99, 0x02, 0x0a, 0x03, 0x4c, 0x6f, - 0x63, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, 0x67, 0x50, 0x61, 0x74, 0x68, 0x12, 0x19, 0x0a, 0x08, - 0x70, 0x6b, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, - 0x70, 0x6b, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x6e, - 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x6e, - 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x70, 0x6f, 0x73, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x50, 0x6f, 0x73, - 0x12, 0x17, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x70, 0x6f, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x05, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x50, 0x6f, 0x73, 0x12, 0x24, 0x0a, 0x0e, 0x73, 0x72, 0x63, - 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x05, 0x52, 0x0c, 0x73, 0x72, 0x63, 0x4c, 0x69, 0x6e, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, - 0x20, 0x0a, 0x0c, 0x73, 0x72, 0x63, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x65, 0x6e, 0x64, 0x18, - 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x73, 0x72, 0x63, 0x4c, 0x69, 0x6e, 0x65, 0x45, 0x6e, - 0x64, 0x12, 0x22, 0x0a, 0x0d, 0x73, 0x72, 0x63, 0x5f, 0x63, 0x6f, 0x6c, 0x5f, 0x73, 0x74, 0x61, - 0x72, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0b, 0x73, 0x72, 0x63, 0x43, 0x6f, 0x6c, - 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x1e, 0x0a, 0x0b, 0x73, 0x72, 0x63, 0x5f, 0x63, 0x6f, 0x6c, - 0x5f, 0x65, 0x6e, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x73, 0x72, 0x63, 0x43, - 0x6f, 0x6c, 0x45, 0x6e, 0x64, 0x22, 0x5d, 0x0a, 0x05, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x12, 0x0e, - 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x44, - 0x0a, 0x0e, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, - 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, - 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0d, 0x74, 0x79, 0x70, 0x65, 0x41, 0x72, 0x67, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x22, 0x40, 0x0a, 0x06, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x12, 0x36, - 0x0a, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, - 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x06, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x22, 0xd3, 0x02, 0x0a, 0x05, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x12, 0x2f, 0x0a, 0x03, 0x74, 0x79, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, - 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x03, 0x74, 0x79, - 0x70, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x64, 0x6f, 0x63, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x64, 0x6f, 0x63, 0x12, 0x1b, 0x0a, 0x09, 0x6a, 0x73, 0x6f, 0x6e, 0x5f, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6a, 0x73, 0x6f, 0x6e, - 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, - 0x12, 0x2a, 0x0a, 0x11, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, - 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x71, 0x75, 0x65, - 0x72, 0x79, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x17, 0x0a, 0x07, - 0x72, 0x61, 0x77, 0x5f, 0x74, 0x61, 0x67, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, - 0x61, 0x77, 0x54, 0x61, 0x67, 0x12, 0x30, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x08, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, - 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x61, - 0x67, 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x12, 0x3a, 0x0a, 0x04, 0x77, 0x69, 0x72, 0x65, 0x18, - 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, - 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, - 0x57, 0x69, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, 0x48, 0x00, 0x52, 0x04, 0x77, 0x69, 0x72, 0x65, - 0x88, 0x01, 0x01, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x77, 0x69, 0x72, 0x65, 0x22, 0xf2, 0x01, 0x0a, - 0x08, 0x57, 0x69, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, 0x42, 0x0a, 0x06, 0x68, 0x65, 0x61, - 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x6e, 0x63, 0x6f, - 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x2e, 0x76, 0x31, 0x2e, 0x57, 0x69, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x48, 0x65, 0x61, - 0x64, 0x65, 0x72, 0x48, 0x00, 0x52, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x3f, 0x0a, - 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, + 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4c, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x69, + 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x57, 0x69, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, 0x2e, - 0x51, 0x75, 0x65, 0x72, 0x79, 0x48, 0x00, 0x52, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x1a, 0x2a, - 0x0a, 0x06, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x17, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x88, 0x01, - 0x01, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x1a, 0x29, 0x0a, 0x05, 0x51, 0x75, - 0x65, 0x72, 0x79, 0x12, 0x17, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x48, 0x00, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x42, 0x07, 0x0a, 0x05, - 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x42, 0x0a, 0x0a, 0x08, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x22, 0x45, 0x0a, 0x03, 0x54, 0x61, 0x67, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, - 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x6b, 0x0a, 0x03, 0x4d, 0x61, 0x70, 0x12, - 0x2f, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x65, + 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x45, 0x78, 0x70, 0x72, 0x48, 0x01, 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x42, 0x05, 0x0a, 0x03, 0x74, 0x79, 0x70, 0x42, 0x0d, 0x0a, + 0x0b, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xd4, 0x02, 0x0a, + 0x0e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x12, + 0x19, 0x0a, 0x07, 0x6d, 0x69, 0x6e, 0x5f, 0x6c, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, + 0x48, 0x00, 0x52, 0x06, 0x6d, 0x69, 0x6e, 0x4c, 0x65, 0x6e, 0x12, 0x19, 0x0a, 0x07, 0x6d, 0x61, + 0x78, 0x5f, 0x6c, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x48, 0x00, 0x52, 0x06, 0x6d, + 0x61, 0x78, 0x4c, 0x65, 0x6e, 0x12, 0x19, 0x0a, 0x07, 0x6d, 0x69, 0x6e, 0x5f, 0x76, 0x61, 0x6c, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x06, 0x6d, 0x69, 0x6e, 0x56, 0x61, 0x6c, + 0x12, 0x19, 0x0a, 0x07, 0x6d, 0x61, 0x78, 0x5f, 0x76, 0x61, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x01, 0x48, 0x00, 0x52, 0x06, 0x6d, 0x61, 0x78, 0x56, 0x61, 0x6c, 0x12, 0x21, 0x0a, 0x0b, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x73, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, + 0x48, 0x00, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, 0x74, 0x68, 0x12, 0x1d, + 0x0a, 0x09, 0x65, 0x6e, 0x64, 0x73, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x18, 0x06, 0x20, 0x01, 0x28, + 0x09, 0x48, 0x00, 0x52, 0x08, 0x65, 0x6e, 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x12, 0x27, 0x0a, + 0x0e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x5f, 0x72, 0x65, 0x67, 0x65, 0x78, 0x70, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0d, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, + 0x52, 0x65, 0x67, 0x65, 0x78, 0x70, 0x12, 0x3c, 0x0a, 0x02, 0x69, 0x73, 0x18, 0x08, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x2a, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, + 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x2e, 0x49, 0x73, 0x48, 0x00, + 0x52, 0x02, 0x69, 0x73, 0x22, 0x25, 0x0a, 0x02, 0x49, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, + 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x4d, 0x41, 0x49, 0x4c, + 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x55, 0x52, 0x4c, 0x10, 0x02, 0x42, 0x06, 0x0a, 0x04, 0x72, + 0x75, 0x6c, 0x65, 0x22, 0xe1, 0x02, 0x0a, 0x0e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x45, 0x78, 0x70, 0x72, 0x12, 0x3d, 0x0a, 0x04, 0x72, 0x75, 0x6c, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, + 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x56, + 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x48, 0x00, 0x52, + 0x04, 0x72, 0x75, 0x6c, 0x65, 0x12, 0x3f, 0x0a, 0x03, 0x61, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, + 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x78, 0x70, 0x72, 0x2e, 0x41, 0x6e, 0x64, 0x48, + 0x00, 0x52, 0x03, 0x61, 0x6e, 0x64, 0x12, 0x3c, 0x0a, 0x02, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, + 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x78, 0x70, 0x72, 0x2e, 0x4f, 0x72, 0x48, 0x00, + 0x52, 0x02, 0x6f, 0x72, 0x1a, 0x44, 0x0a, 0x03, 0x41, 0x6e, 0x64, 0x12, 0x3d, 0x0a, 0x05, 0x65, + 0x78, 0x70, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x6e, 0x63, + 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, + 0x78, 0x70, 0x72, 0x52, 0x05, 0x65, 0x78, 0x70, 0x72, 0x73, 0x1a, 0x43, 0x0a, 0x02, 0x4f, 0x72, + 0x12, 0x3d, 0x0a, 0x05, 0x65, 0x78, 0x70, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x27, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, + 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x78, 0x70, 0x72, 0x52, 0x05, 0x65, 0x78, 0x70, 0x72, 0x73, 0x42, + 0x06, 0x0a, 0x04, 0x65, 0x78, 0x70, 0x72, 0x22, 0x48, 0x0a, 0x10, 0x54, 0x79, 0x70, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x65, 0x66, 0x12, 0x17, 0x0a, 0x07, 0x64, + 0x65, 0x63, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x64, 0x65, + 0x63, 0x6c, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x5f, 0x69, 0x64, + 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x49, 0x64, + 0x78, 0x22, 0xe8, 0x01, 0x0a, 0x04, 0x44, 0x65, 0x63, 0x6c, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x31, + 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x33, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1d, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, - 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x39, 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x31, 0x0a, - 0x04, 0x65, 0x6c, 0x65, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x65, 0x6e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x65, 0x6c, 0x65, 0x6d, - 0x22, 0x3c, 0x0a, 0x07, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x12, 0x31, 0x0a, 0x04, 0x62, - 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x65, 0x6e, 0x63, 0x6f, + 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, + 0x65, 0x12, 0x47, 0x0a, 0x0b, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, + 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, + 0x2e, 0x54, 0x79, 0x70, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, + 0x74, 0x79, 0x70, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x12, 0x10, 0x0a, 0x03, 0x64, 0x6f, + 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x64, 0x6f, 0x63, 0x12, 0x2e, 0x0a, 0x03, + 0x6c, 0x6f, 0x63, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x22, 0x3c, - 0x0a, 0x05, 0x55, 0x6e, 0x69, 0x6f, 0x6e, 0x12, 0x33, 0x0a, 0x05, 0x74, 0x79, 0x70, 0x65, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, + 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x63, 0x52, 0x03, 0x6c, 0x6f, 0x63, 0x22, 0x23, 0x0a, 0x0d, + 0x54, 0x79, 0x70, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x22, 0x99, 0x02, 0x0a, 0x03, 0x4c, 0x6f, 0x63, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, 0x67, + 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, 0x67, + 0x50, 0x61, 0x74, 0x68, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, + 0x1a, 0x0a, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x5f, 0x70, 0x6f, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, + 0x73, 0x74, 0x61, 0x72, 0x74, 0x50, 0x6f, 0x73, 0x12, 0x17, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, + 0x70, 0x6f, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x50, 0x6f, + 0x73, 0x12, 0x24, 0x0a, 0x0e, 0x73, 0x72, 0x63, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x74, + 0x61, 0x72, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0c, 0x73, 0x72, 0x63, 0x4c, 0x69, + 0x6e, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x20, 0x0a, 0x0c, 0x73, 0x72, 0x63, 0x5f, 0x6c, + 0x69, 0x6e, 0x65, 0x5f, 0x65, 0x6e, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x73, + 0x72, 0x63, 0x4c, 0x69, 0x6e, 0x65, 0x45, 0x6e, 0x64, 0x12, 0x22, 0x0a, 0x0d, 0x73, 0x72, 0x63, + 0x5f, 0x63, 0x6f, 0x6c, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x05, + 0x52, 0x0b, 0x73, 0x72, 0x63, 0x43, 0x6f, 0x6c, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x1e, 0x0a, + 0x0b, 0x73, 0x72, 0x63, 0x5f, 0x63, 0x6f, 0x6c, 0x5f, 0x65, 0x6e, 0x64, 0x18, 0x09, 0x20, 0x01, + 0x28, 0x05, 0x52, 0x09, 0x73, 0x72, 0x63, 0x43, 0x6f, 0x6c, 0x45, 0x6e, 0x64, 0x22, 0x5d, 0x0a, + 0x05, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x44, 0x0a, 0x0e, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x61, + 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, + 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0d, 0x74, + 0x79, 0x70, 0x65, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x40, 0x0a, 0x06, + 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x12, 0x36, 0x0a, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, - 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x05, 0x74, 0x79, 0x70, 0x65, 0x73, 0x22, 0x84, 0x01, 0x0a, - 0x07, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x12, 0x12, 0x0a, 0x03, 0x73, 0x74, 0x72, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x03, 0x73, 0x74, 0x72, 0x12, 0x1a, 0x0a, 0x07, - 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, - 0x07, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x12, 0x12, 0x0a, 0x03, 0x69, 0x6e, 0x74, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x03, 0x69, 0x6e, 0x74, 0x12, 0x16, 0x0a, 0x05, - 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x05, 0x66, - 0x6c, 0x6f, 0x61, 0x74, 0x12, 0x14, 0x0a, 0x04, 0x6e, 0x75, 0x6c, 0x6c, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x08, 0x48, 0x00, 0x52, 0x04, 0x6e, 0x75, 0x6c, 0x6c, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x22, 0x64, 0x0a, 0x0b, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x12, 0x31, 0x0a, 0x04, 0x65, 0x6c, 0x65, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1d, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, - 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, - 0x04, 0x65, 0x6c, 0x65, 0x6d, 0x12, 0x22, 0x0a, 0x0c, 0x49, 0x73, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x73, 0x4c, 0x69, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x49, 0x73, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x2a, 0xe5, 0x01, 0x0a, 0x07, 0x42, 0x75, - 0x69, 0x6c, 0x74, 0x69, 0x6e, 0x12, 0x07, 0x0a, 0x03, 0x41, 0x4e, 0x59, 0x10, 0x00, 0x12, 0x08, - 0x0a, 0x04, 0x42, 0x4f, 0x4f, 0x4c, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x54, 0x38, - 0x10, 0x02, 0x12, 0x09, 0x0a, 0x05, 0x49, 0x4e, 0x54, 0x31, 0x36, 0x10, 0x03, 0x12, 0x09, 0x0a, - 0x05, 0x49, 0x4e, 0x54, 0x33, 0x32, 0x10, 0x04, 0x12, 0x09, 0x0a, 0x05, 0x49, 0x4e, 0x54, 0x36, - 0x34, 0x10, 0x05, 0x12, 0x09, 0x0a, 0x05, 0x55, 0x49, 0x4e, 0x54, 0x38, 0x10, 0x06, 0x12, 0x0a, - 0x0a, 0x06, 0x55, 0x49, 0x4e, 0x54, 0x31, 0x36, 0x10, 0x07, 0x12, 0x0a, 0x0a, 0x06, 0x55, 0x49, - 0x4e, 0x54, 0x33, 0x32, 0x10, 0x08, 0x12, 0x0a, 0x0a, 0x06, 0x55, 0x49, 0x4e, 0x54, 0x36, 0x34, - 0x10, 0x09, 0x12, 0x0b, 0x0a, 0x07, 0x46, 0x4c, 0x4f, 0x41, 0x54, 0x33, 0x32, 0x10, 0x0a, 0x12, - 0x0b, 0x0a, 0x07, 0x46, 0x4c, 0x4f, 0x41, 0x54, 0x36, 0x34, 0x10, 0x0b, 0x12, 0x0a, 0x0a, 0x06, - 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x0c, 0x12, 0x09, 0x0a, 0x05, 0x42, 0x59, 0x54, 0x45, - 0x53, 0x10, 0x0d, 0x12, 0x08, 0x0a, 0x04, 0x54, 0x49, 0x4d, 0x45, 0x10, 0x0e, 0x12, 0x08, 0x0a, - 0x04, 0x55, 0x55, 0x49, 0x44, 0x10, 0x0f, 0x12, 0x08, 0x0a, 0x04, 0x4a, 0x53, 0x4f, 0x4e, 0x10, - 0x10, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x53, 0x45, 0x52, 0x5f, 0x49, 0x44, 0x10, 0x11, 0x12, 0x07, - 0x0a, 0x03, 0x49, 0x4e, 0x54, 0x10, 0x12, 0x12, 0x08, 0x0a, 0x04, 0x55, 0x49, 0x4e, 0x54, 0x10, - 0x13, 0x42, 0x28, 0x5a, 0x26, 0x65, 0x6e, 0x63, 0x72, 0x2e, 0x64, 0x65, 0x76, 0x2f, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x61, 0x72, 0x73, 0x65, - 0x72, 0x2f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2f, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, + 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x22, 0xd3, + 0x02, 0x0a, 0x05, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x2f, 0x0a, 0x03, 0x74, 0x79, 0x70, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, + 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, + 0x54, 0x79, 0x70, 0x65, 0x52, 0x03, 0x74, 0x79, 0x70, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, + 0x03, 0x64, 0x6f, 0x63, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x64, 0x6f, 0x63, 0x12, + 0x1b, 0x0a, 0x09, 0x6a, 0x73, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x6a, 0x73, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, + 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, + 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x12, 0x2a, 0x0a, 0x11, 0x71, 0x75, 0x65, 0x72, + 0x79, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x06, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, + 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x72, 0x61, 0x77, 0x5f, 0x74, 0x61, 0x67, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x61, 0x77, 0x54, 0x61, 0x67, 0x12, 0x30, 0x0a, + 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x65, 0x6e, + 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x61, 0x67, 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x12, + 0x3a, 0x0a, 0x04, 0x77, 0x69, 0x72, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, + 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x57, 0x69, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, + 0x48, 0x00, 0x52, 0x04, 0x77, 0x69, 0x72, 0x65, 0x88, 0x01, 0x01, 0x42, 0x07, 0x0a, 0x05, 0x5f, + 0x77, 0x69, 0x72, 0x65, 0x22, 0xf2, 0x01, 0x0a, 0x08, 0x57, 0x69, 0x72, 0x65, 0x53, 0x70, 0x65, + 0x63, 0x12, 0x42, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x28, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, + 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x57, 0x69, 0x72, 0x65, + 0x53, 0x70, 0x65, 0x63, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x48, 0x00, 0x52, 0x06, 0x68, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x3f, 0x0a, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, + 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x57, + 0x69, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x48, 0x00, 0x52, + 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x1a, 0x2a, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x12, 0x17, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x6e, 0x61, + 0x6d, 0x65, 0x1a, 0x29, 0x0a, 0x05, 0x51, 0x75, 0x65, 0x72, 0x79, 0x12, 0x17, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x88, 0x01, 0x01, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x42, 0x0a, 0x0a, + 0x08, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x45, 0x0a, 0x03, 0x54, 0x61, 0x67, + 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, + 0x65, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x22, 0x6b, 0x0a, 0x03, 0x4d, 0x61, 0x70, 0x12, 0x2f, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, + 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, + 0x79, 0x70, 0x65, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x33, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, + 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, + 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x39, 0x0a, + 0x04, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x04, 0x65, 0x6c, 0x65, 0x6d, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, + 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, + 0x70, 0x65, 0x52, 0x04, 0x65, 0x6c, 0x65, 0x6d, 0x22, 0x3c, 0x0a, 0x07, 0x50, 0x6f, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x12, 0x31, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1d, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, + 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, + 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x22, 0x3c, 0x0a, 0x05, 0x55, 0x6e, 0x69, 0x6f, 0x6e, 0x12, + 0x33, 0x0a, 0x05, 0x74, 0x79, 0x70, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, + 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x05, 0x74, + 0x79, 0x70, 0x65, 0x73, 0x22, 0x84, 0x01, 0x0a, 0x07, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, + 0x12, 0x12, 0x0a, 0x03, 0x73, 0x74, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, + 0x03, 0x73, 0x74, 0x72, 0x12, 0x1a, 0x0a, 0x07, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x07, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, + 0x12, 0x12, 0x0a, 0x03, 0x69, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, + 0x03, 0x69, 0x6e, 0x74, 0x12, 0x16, 0x0a, 0x05, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x05, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x12, 0x14, 0x0a, 0x04, + 0x6e, 0x75, 0x6c, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x04, 0x6e, 0x75, + 0x6c, 0x6c, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x64, 0x0a, 0x0b, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x31, 0x0a, 0x04, 0x65, 0x6c, + 0x65, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x65, 0x6e, 0x63, 0x6f, 0x72, + 0x65, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, + 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x65, 0x6c, 0x65, 0x6d, 0x12, 0x22, 0x0a, + 0x0c, 0x49, 0x73, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x08, 0x52, 0x0c, 0x49, 0x73, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x4c, 0x69, 0x73, + 0x74, 0x2a, 0xe5, 0x01, 0x0a, 0x07, 0x42, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, 0x12, 0x07, 0x0a, + 0x03, 0x41, 0x4e, 0x59, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x42, 0x4f, 0x4f, 0x4c, 0x10, 0x01, + 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x54, 0x38, 0x10, 0x02, 0x12, 0x09, 0x0a, 0x05, 0x49, 0x4e, + 0x54, 0x31, 0x36, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x49, 0x4e, 0x54, 0x33, 0x32, 0x10, 0x04, + 0x12, 0x09, 0x0a, 0x05, 0x49, 0x4e, 0x54, 0x36, 0x34, 0x10, 0x05, 0x12, 0x09, 0x0a, 0x05, 0x55, + 0x49, 0x4e, 0x54, 0x38, 0x10, 0x06, 0x12, 0x0a, 0x0a, 0x06, 0x55, 0x49, 0x4e, 0x54, 0x31, 0x36, + 0x10, 0x07, 0x12, 0x0a, 0x0a, 0x06, 0x55, 0x49, 0x4e, 0x54, 0x33, 0x32, 0x10, 0x08, 0x12, 0x0a, + 0x0a, 0x06, 0x55, 0x49, 0x4e, 0x54, 0x36, 0x34, 0x10, 0x09, 0x12, 0x0b, 0x0a, 0x07, 0x46, 0x4c, + 0x4f, 0x41, 0x54, 0x33, 0x32, 0x10, 0x0a, 0x12, 0x0b, 0x0a, 0x07, 0x46, 0x4c, 0x4f, 0x41, 0x54, + 0x36, 0x34, 0x10, 0x0b, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x0c, + 0x12, 0x09, 0x0a, 0x05, 0x42, 0x59, 0x54, 0x45, 0x53, 0x10, 0x0d, 0x12, 0x08, 0x0a, 0x04, 0x54, + 0x49, 0x4d, 0x45, 0x10, 0x0e, 0x12, 0x08, 0x0a, 0x04, 0x55, 0x55, 0x49, 0x44, 0x10, 0x0f, 0x12, + 0x08, 0x0a, 0x04, 0x4a, 0x53, 0x4f, 0x4e, 0x10, 0x10, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x53, 0x45, + 0x52, 0x5f, 0x49, 0x44, 0x10, 0x11, 0x12, 0x07, 0x0a, 0x03, 0x49, 0x4e, 0x54, 0x10, 0x12, 0x12, + 0x08, 0x0a, 0x04, 0x55, 0x49, 0x4e, 0x54, 0x10, 0x13, 0x42, 0x28, 0x5a, 0x26, 0x65, 0x6e, 0x63, + 0x72, 0x2e, 0x64, 0x65, 0x76, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x65, 0x6e, 0x63, 0x6f, + 0x72, 0x65, 0x2f, 0x70, 0x61, 0x72, 0x73, 0x65, 0x72, 0x2f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x2f, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1697,61 +2157,73 @@ func file_encore_parser_schema_v1_schema_proto_rawDescGZIP() []byte { return file_encore_parser_schema_v1_schema_proto_rawDescData } -var file_encore_parser_schema_v1_schema_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_encore_parser_schema_v1_schema_proto_msgTypes = make([]protoimpl.MessageInfo, 18) +var file_encore_parser_schema_v1_schema_proto_enumTypes = make([]protoimpl.EnumInfo, 2) +var file_encore_parser_schema_v1_schema_proto_msgTypes = make([]protoimpl.MessageInfo, 22) var file_encore_parser_schema_v1_schema_proto_goTypes = []interface{}{ - (Builtin)(0), // 0: encore.parser.schema.v1.Builtin - (*Type)(nil), // 1: encore.parser.schema.v1.Type - (*TypeParameterRef)(nil), // 2: encore.parser.schema.v1.TypeParameterRef - (*Decl)(nil), // 3: encore.parser.schema.v1.Decl - (*TypeParameter)(nil), // 4: encore.parser.schema.v1.TypeParameter - (*Loc)(nil), // 5: encore.parser.schema.v1.Loc - (*Named)(nil), // 6: encore.parser.schema.v1.Named - (*Struct)(nil), // 7: encore.parser.schema.v1.Struct - (*Field)(nil), // 8: encore.parser.schema.v1.Field - (*WireSpec)(nil), // 9: encore.parser.schema.v1.WireSpec - (*Tag)(nil), // 10: encore.parser.schema.v1.Tag - (*Map)(nil), // 11: encore.parser.schema.v1.Map - (*List)(nil), // 12: encore.parser.schema.v1.List - (*Pointer)(nil), // 13: encore.parser.schema.v1.Pointer - (*Union)(nil), // 14: encore.parser.schema.v1.Union - (*Literal)(nil), // 15: encore.parser.schema.v1.Literal - (*ConfigValue)(nil), // 16: encore.parser.schema.v1.ConfigValue - (*WireSpec_Header)(nil), // 17: encore.parser.schema.v1.WireSpec.Header - (*WireSpec_Query)(nil), // 18: encore.parser.schema.v1.WireSpec.Query + (Builtin)(0), // 0: encore.parser.schema.v1.Builtin + (ValidationRule_Is)(0), // 1: encore.parser.schema.v1.ValidationRule.Is + (*Type)(nil), // 2: encore.parser.schema.v1.Type + (*ValidationRule)(nil), // 3: encore.parser.schema.v1.ValidationRule + (*ValidationExpr)(nil), // 4: encore.parser.schema.v1.ValidationExpr + (*TypeParameterRef)(nil), // 5: encore.parser.schema.v1.TypeParameterRef + (*Decl)(nil), // 6: encore.parser.schema.v1.Decl + (*TypeParameter)(nil), // 7: encore.parser.schema.v1.TypeParameter + (*Loc)(nil), // 8: encore.parser.schema.v1.Loc + (*Named)(nil), // 9: encore.parser.schema.v1.Named + (*Struct)(nil), // 10: encore.parser.schema.v1.Struct + (*Field)(nil), // 11: encore.parser.schema.v1.Field + (*WireSpec)(nil), // 12: encore.parser.schema.v1.WireSpec + (*Tag)(nil), // 13: encore.parser.schema.v1.Tag + (*Map)(nil), // 14: encore.parser.schema.v1.Map + (*List)(nil), // 15: encore.parser.schema.v1.List + (*Pointer)(nil), // 16: encore.parser.schema.v1.Pointer + (*Union)(nil), // 17: encore.parser.schema.v1.Union + (*Literal)(nil), // 18: encore.parser.schema.v1.Literal + (*ConfigValue)(nil), // 19: encore.parser.schema.v1.ConfigValue + (*ValidationExpr_And)(nil), // 20: encore.parser.schema.v1.ValidationExpr.And + (*ValidationExpr_Or)(nil), // 21: encore.parser.schema.v1.ValidationExpr.Or + (*WireSpec_Header)(nil), // 22: encore.parser.schema.v1.WireSpec.Header + (*WireSpec_Query)(nil), // 23: encore.parser.schema.v1.WireSpec.Query } var file_encore_parser_schema_v1_schema_proto_depIdxs = []int32{ - 6, // 0: encore.parser.schema.v1.Type.named:type_name -> encore.parser.schema.v1.Named - 7, // 1: encore.parser.schema.v1.Type.struct:type_name -> encore.parser.schema.v1.Struct - 11, // 2: encore.parser.schema.v1.Type.map:type_name -> encore.parser.schema.v1.Map - 12, // 3: encore.parser.schema.v1.Type.list:type_name -> encore.parser.schema.v1.List + 9, // 0: encore.parser.schema.v1.Type.named:type_name -> encore.parser.schema.v1.Named + 10, // 1: encore.parser.schema.v1.Type.struct:type_name -> encore.parser.schema.v1.Struct + 14, // 2: encore.parser.schema.v1.Type.map:type_name -> encore.parser.schema.v1.Map + 15, // 3: encore.parser.schema.v1.Type.list:type_name -> encore.parser.schema.v1.List 0, // 4: encore.parser.schema.v1.Type.builtin:type_name -> encore.parser.schema.v1.Builtin - 13, // 5: encore.parser.schema.v1.Type.pointer:type_name -> encore.parser.schema.v1.Pointer - 14, // 6: encore.parser.schema.v1.Type.union:type_name -> encore.parser.schema.v1.Union - 15, // 7: encore.parser.schema.v1.Type.literal:type_name -> encore.parser.schema.v1.Literal - 2, // 8: encore.parser.schema.v1.Type.type_parameter:type_name -> encore.parser.schema.v1.TypeParameterRef - 16, // 9: encore.parser.schema.v1.Type.config:type_name -> encore.parser.schema.v1.ConfigValue - 1, // 10: encore.parser.schema.v1.Decl.type:type_name -> encore.parser.schema.v1.Type - 4, // 11: encore.parser.schema.v1.Decl.type_params:type_name -> encore.parser.schema.v1.TypeParameter - 5, // 12: encore.parser.schema.v1.Decl.loc:type_name -> encore.parser.schema.v1.Loc - 1, // 13: encore.parser.schema.v1.Named.type_arguments:type_name -> encore.parser.schema.v1.Type - 8, // 14: encore.parser.schema.v1.Struct.fields:type_name -> encore.parser.schema.v1.Field - 1, // 15: encore.parser.schema.v1.Field.typ:type_name -> encore.parser.schema.v1.Type - 10, // 16: encore.parser.schema.v1.Field.tags:type_name -> encore.parser.schema.v1.Tag - 9, // 17: encore.parser.schema.v1.Field.wire:type_name -> encore.parser.schema.v1.WireSpec - 17, // 18: encore.parser.schema.v1.WireSpec.header:type_name -> encore.parser.schema.v1.WireSpec.Header - 18, // 19: encore.parser.schema.v1.WireSpec.query:type_name -> encore.parser.schema.v1.WireSpec.Query - 1, // 20: encore.parser.schema.v1.Map.key:type_name -> encore.parser.schema.v1.Type - 1, // 21: encore.parser.schema.v1.Map.value:type_name -> encore.parser.schema.v1.Type - 1, // 22: encore.parser.schema.v1.List.elem:type_name -> encore.parser.schema.v1.Type - 1, // 23: encore.parser.schema.v1.Pointer.base:type_name -> encore.parser.schema.v1.Type - 1, // 24: encore.parser.schema.v1.Union.types:type_name -> encore.parser.schema.v1.Type - 1, // 25: encore.parser.schema.v1.ConfigValue.elem:type_name -> encore.parser.schema.v1.Type - 26, // [26:26] is the sub-list for method output_type - 26, // [26:26] is the sub-list for method input_type - 26, // [26:26] is the sub-list for extension type_name - 26, // [26:26] is the sub-list for extension extendee - 0, // [0:26] is the sub-list for field type_name + 16, // 5: encore.parser.schema.v1.Type.pointer:type_name -> encore.parser.schema.v1.Pointer + 17, // 6: encore.parser.schema.v1.Type.union:type_name -> encore.parser.schema.v1.Union + 18, // 7: encore.parser.schema.v1.Type.literal:type_name -> encore.parser.schema.v1.Literal + 5, // 8: encore.parser.schema.v1.Type.type_parameter:type_name -> encore.parser.schema.v1.TypeParameterRef + 19, // 9: encore.parser.schema.v1.Type.config:type_name -> encore.parser.schema.v1.ConfigValue + 4, // 10: encore.parser.schema.v1.Type.validation:type_name -> encore.parser.schema.v1.ValidationExpr + 1, // 11: encore.parser.schema.v1.ValidationRule.is:type_name -> encore.parser.schema.v1.ValidationRule.Is + 3, // 12: encore.parser.schema.v1.ValidationExpr.rule:type_name -> encore.parser.schema.v1.ValidationRule + 20, // 13: encore.parser.schema.v1.ValidationExpr.and:type_name -> encore.parser.schema.v1.ValidationExpr.And + 21, // 14: encore.parser.schema.v1.ValidationExpr.or:type_name -> encore.parser.schema.v1.ValidationExpr.Or + 2, // 15: encore.parser.schema.v1.Decl.type:type_name -> encore.parser.schema.v1.Type + 7, // 16: encore.parser.schema.v1.Decl.type_params:type_name -> encore.parser.schema.v1.TypeParameter + 8, // 17: encore.parser.schema.v1.Decl.loc:type_name -> encore.parser.schema.v1.Loc + 2, // 18: encore.parser.schema.v1.Named.type_arguments:type_name -> encore.parser.schema.v1.Type + 11, // 19: encore.parser.schema.v1.Struct.fields:type_name -> encore.parser.schema.v1.Field + 2, // 20: encore.parser.schema.v1.Field.typ:type_name -> encore.parser.schema.v1.Type + 13, // 21: encore.parser.schema.v1.Field.tags:type_name -> encore.parser.schema.v1.Tag + 12, // 22: encore.parser.schema.v1.Field.wire:type_name -> encore.parser.schema.v1.WireSpec + 22, // 23: encore.parser.schema.v1.WireSpec.header:type_name -> encore.parser.schema.v1.WireSpec.Header + 23, // 24: encore.parser.schema.v1.WireSpec.query:type_name -> encore.parser.schema.v1.WireSpec.Query + 2, // 25: encore.parser.schema.v1.Map.key:type_name -> encore.parser.schema.v1.Type + 2, // 26: encore.parser.schema.v1.Map.value:type_name -> encore.parser.schema.v1.Type + 2, // 27: encore.parser.schema.v1.List.elem:type_name -> encore.parser.schema.v1.Type + 2, // 28: encore.parser.schema.v1.Pointer.base:type_name -> encore.parser.schema.v1.Type + 2, // 29: encore.parser.schema.v1.Union.types:type_name -> encore.parser.schema.v1.Type + 2, // 30: encore.parser.schema.v1.ConfigValue.elem:type_name -> encore.parser.schema.v1.Type + 4, // 31: encore.parser.schema.v1.ValidationExpr.And.exprs:type_name -> encore.parser.schema.v1.ValidationExpr + 4, // 32: encore.parser.schema.v1.ValidationExpr.Or.exprs:type_name -> encore.parser.schema.v1.ValidationExpr + 33, // [33:33] is the sub-list for method output_type + 33, // [33:33] is the sub-list for method input_type + 33, // [33:33] is the sub-list for extension type_name + 33, // [33:33] is the sub-list for extension extendee + 0, // [0:33] is the sub-list for field type_name } func init() { file_encore_parser_schema_v1_schema_proto_init() } @@ -1773,7 +2245,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TypeParameterRef); i { + switch v := v.(*ValidationRule); i { case 0: return &v.state case 1: @@ -1785,7 +2257,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Decl); i { + switch v := v.(*ValidationExpr); i { case 0: return &v.state case 1: @@ -1797,7 +2269,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TypeParameter); i { + switch v := v.(*TypeParameterRef); i { case 0: return &v.state case 1: @@ -1809,7 +2281,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Loc); i { + switch v := v.(*Decl); i { case 0: return &v.state case 1: @@ -1821,7 +2293,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Named); i { + switch v := v.(*TypeParameter); i { case 0: return &v.state case 1: @@ -1833,7 +2305,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Struct); i { + switch v := v.(*Loc); i { case 0: return &v.state case 1: @@ -1845,7 +2317,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Field); i { + switch v := v.(*Named); i { case 0: return &v.state case 1: @@ -1857,7 +2329,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*WireSpec); i { + switch v := v.(*Struct); i { case 0: return &v.state case 1: @@ -1869,7 +2341,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Tag); i { + switch v := v.(*Field); i { case 0: return &v.state case 1: @@ -1881,7 +2353,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Map); i { + switch v := v.(*WireSpec); i { case 0: return &v.state case 1: @@ -1893,7 +2365,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*List); i { + switch v := v.(*Tag); i { case 0: return &v.state case 1: @@ -1905,7 +2377,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Pointer); i { + switch v := v.(*Map); i { case 0: return &v.state case 1: @@ -1917,7 +2389,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Union); i { + switch v := v.(*List); i { case 0: return &v.state case 1: @@ -1929,7 +2401,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Literal); i { + switch v := v.(*Pointer); i { case 0: return &v.state case 1: @@ -1941,7 +2413,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ConfigValue); i { + switch v := v.(*Union); i { case 0: return &v.state case 1: @@ -1953,7 +2425,7 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*WireSpec_Header); i { + switch v := v.(*Literal); i { case 0: return &v.state case 1: @@ -1965,6 +2437,54 @@ func file_encore_parser_schema_v1_schema_proto_init() { } } file_encore_parser_schema_v1_schema_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ConfigValue); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_encore_parser_schema_v1_schema_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ValidationExpr_And); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_encore_parser_schema_v1_schema_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ValidationExpr_Or); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_encore_parser_schema_v1_schema_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*WireSpec_Header); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_encore_parser_schema_v1_schema_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WireSpec_Query); i { case 0: return &v.state @@ -1989,27 +2509,42 @@ func file_encore_parser_schema_v1_schema_proto_init() { (*Type_TypeParameter)(nil), (*Type_Config)(nil), } - file_encore_parser_schema_v1_schema_proto_msgTypes[7].OneofWrappers = []interface{}{} - file_encore_parser_schema_v1_schema_proto_msgTypes[8].OneofWrappers = []interface{}{ + file_encore_parser_schema_v1_schema_proto_msgTypes[1].OneofWrappers = []interface{}{ + (*ValidationRule_MinLen)(nil), + (*ValidationRule_MaxLen)(nil), + (*ValidationRule_MinVal)(nil), + (*ValidationRule_MaxVal)(nil), + (*ValidationRule_StartsWith)(nil), + (*ValidationRule_EndsWith)(nil), + (*ValidationRule_MatchesRegexp)(nil), + (*ValidationRule_Is_)(nil), + } + file_encore_parser_schema_v1_schema_proto_msgTypes[2].OneofWrappers = []interface{}{ + (*ValidationExpr_Rule)(nil), + (*ValidationExpr_And_)(nil), + (*ValidationExpr_Or_)(nil), + } + file_encore_parser_schema_v1_schema_proto_msgTypes[9].OneofWrappers = []interface{}{} + file_encore_parser_schema_v1_schema_proto_msgTypes[10].OneofWrappers = []interface{}{ (*WireSpec_Header_)(nil), (*WireSpec_Query_)(nil), } - file_encore_parser_schema_v1_schema_proto_msgTypes[14].OneofWrappers = []interface{}{ + file_encore_parser_schema_v1_schema_proto_msgTypes[16].OneofWrappers = []interface{}{ (*Literal_Str)(nil), (*Literal_Boolean)(nil), (*Literal_Int)(nil), (*Literal_Float)(nil), (*Literal_Null)(nil), } - file_encore_parser_schema_v1_schema_proto_msgTypes[16].OneofWrappers = []interface{}{} - file_encore_parser_schema_v1_schema_proto_msgTypes[17].OneofWrappers = []interface{}{} + file_encore_parser_schema_v1_schema_proto_msgTypes[20].OneofWrappers = []interface{}{} + file_encore_parser_schema_v1_schema_proto_msgTypes[21].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_encore_parser_schema_v1_schema_proto_rawDesc, - NumEnums: 1, - NumMessages: 18, + NumEnums: 2, + NumMessages: 22, NumExtensions: 0, NumServices: 0, }, diff --git a/proto/encore/parser/schema/v1/schema.proto b/proto/encore/parser/schema/v1/schema.proto index 5be373d1fc..d4117430b3 100644 --- a/proto/encore/parser/schema/v1/schema.proto +++ b/proto/encore/parser/schema/v1/schema.proto @@ -27,6 +27,43 @@ message Type { /* Encore Special Types */ ConfigValue config = 7; // This value is a config value } + + optional ValidationExpr validation = 15; // The validation expression for this type +} + +message ValidationRule { + oneof rule { + uint64 min_len = 1; + uint64 max_len = 2; + double min_val = 3; + double max_val = 4; + string starts_with = 5; + string ends_with = 6; + string matches_regexp = 7; + Is is = 8; + } + + enum Is { + UNKNOWN = 0; + EMAIL = 1; + URL = 2; + } +} + +message ValidationExpr { + oneof expr { + ValidationRule rule = 1; + And and = 2; + Or or = 3; + } + + message And { + repeated ValidationExpr exprs = 1; + } + + message Or { + repeated ValidationExpr exprs = 1; + } } // TypeParameterRef is a reference to a `TypeParameter` within a declaration block diff --git a/runtimes/core/Cargo.toml b/runtimes/core/Cargo.toml index 5fe5e8c426..3b11570de7 100644 --- a/runtimes/core/Cargo.toml +++ b/runtimes/core/Cargo.toml @@ -94,6 +94,8 @@ aws-sdk-s3 = "1.58.0" aws-smithy-types = { version = "1.2.8", features = ["byte-stream-poll-next", "rt-tokio"] } percent-encoding = "2.3.1" aws-credential-types = "1.2.1" +regex = "1.11.1" +email_address = "0.2.9" [build-dependencies] prost-build = "0.12.3" diff --git a/runtimes/core/src/api/call.rs b/runtimes/core/src/api/call.rs index 5f5ba3888d..6ad29db293 100644 --- a/runtimes/core/src/api/call.rs +++ b/runtimes/core/src/api/call.rs @@ -48,7 +48,6 @@ impl ServiceRegistry { deploy_id: String, http_client: reqwest::Client, tracer: Tracer, - is_worker: bool, ) -> anyhow::Result { let mut base_urls = HashMap::with_capacity(sd.services.len()); let mut service_auth = HashMap::with_capacity(sd.services.len()); @@ -80,7 +79,7 @@ impl ServiceRegistry { service_auth.insert(svc, auth_method); } } - } else if !hosted_services.is_empty() && !is_worker { + } else if !hosted_services.is_empty() { // This shouldn't happen if things are configured correctly. ::log::error!( "internal encore error: cannot host services without provided own address" diff --git a/runtimes/core/src/api/jsonschema/de.rs b/runtimes/core/src/api/jsonschema/de.rs index c84a209496..fc3fc4ce12 100644 --- a/runtimes/core/src/api/jsonschema/de.rs +++ b/runtimes/core/src/api/jsonschema/de.rs @@ -7,7 +7,7 @@ use std::marker::PhantomData; use serde::de::{DeserializeSeed, MapAccess, SeqAccess, Unexpected, Visitor}; use serde::Deserializer; -use crate::api::jsonschema::Registry; +use crate::api::jsonschema::{validation::Validation, Registry}; use crate::api::{self, PValue, PValues}; use serde_json::Number as JSONNumber; @@ -36,8 +36,11 @@ pub enum Value { /// Consume a single value, one of a union of possible types. Union(Vec), - // Reference to another value. + /// Reference to another value. Ref(usize), + + /// A value with additional value-based validation. + Validation(Validation), } #[derive(Debug, Clone, Default)] @@ -98,6 +101,7 @@ impl Value { Value::Option(bov) => bov.expecting(reg), Value::Ref(idx) => reg.get(*idx).expecting(reg), Value::Struct { .. } => Cow::Borrowed("a JSON object"), + Value::Validation(v) => v.bov.expecting(reg), Value::Union(types) => { let mut s = String::new(); let num = types.len(); @@ -266,6 +270,26 @@ macro_rules! recurse { }}; } +macro_rules! validate_pval { + ($self:ident, $v:ident, $method:ident, $value:expr) => {{ + let inner = recurse!($self, &$v.bov, $method, $value)?; + match $v.validate_pval(&inner) { + Ok(()) => Ok(inner), + Err(err) => Err(serde::de::Error::custom(err)), + } + }}; +} + +macro_rules! validate_jval { + ($self:ident, $v:ident, $method:ident, $value:expr) => {{ + recurse!($self, &$v.bov, $method, $value)?; + match $v.validate_jval($value) { + Ok(()) => Ok(()), + Err(err) => Err(serde::de::Error::custom(err)), + } + }}; +} + macro_rules! recurse0 { ($self:ident, $bov:expr, $method:ident) => {{ match $bov { @@ -330,6 +354,7 @@ impl<'de> Visitor<'de> for DecodeValue<'_> { Value::Option(_) => formatter.write_str("any valid JSON value or null"), Value::Struct { .. } => formatter.write_str("a JSON object"), Value::Ref(_) => formatter.write_str("a JSON value"), + Value::Validation(v) => formatter.write_str(v.bov.expecting(self.reg).as_ref()), } } @@ -345,6 +370,7 @@ impl<'de> Visitor<'de> for DecodeValue<'_> { recurse!(self, val, visit_bool, value) } Value::Literal(Literal::Bool(bool)) if *bool == value => Ok(PValue::Bool(value)), + Value::Validation(v) => validate_pval!(self, v, visit_bool, value), Value::Union(types) => { for typ in types { let res: Result<_, E> = recurse!(self, typ, visit_bool, value); @@ -376,6 +402,7 @@ impl<'de> Visitor<'de> for DecodeValue<'_> { recurse!(self, val, visit_i64, value) } Value::Literal(Literal::Int(val)) if *val == value => Ok(PValue::Number(value.into())), + Value::Validation(v) => validate_pval!(self, v, visit_i64, value), Value::Union(types) => { for typ in types { let res: Result<_, E> = recurse!(self, typ, visit_i64, value); @@ -409,6 +436,7 @@ impl<'de> Visitor<'de> for DecodeValue<'_> { Value::Literal(Literal::Int(val)) if *val == value as i64 => { Ok(PValue::Number(value.into())) } + Value::Validation(v) => validate_pval!(self, v, visit_u64, value), Value::Union(types) => { for typ in types { let res: Result<_, E> = recurse!(self, typ, visit_u64, value); @@ -451,6 +479,7 @@ impl<'de> Visitor<'de> for DecodeValue<'_> { ))) } } + Value::Validation(v) => validate_pval!(self, v, visit_f64, value), Value::Union(types) => { for typ in types { let res: Result<_, E> = recurse!(self, typ, visit_f64, value); @@ -479,6 +508,10 @@ impl<'de> Visitor<'de> for DecodeValue<'_> { } #[inline] + #[cfg_attr( + feature = "rttrace", + tracing::instrument(skip(self), ret, level = "trace") + )] fn visit_string(self, value: String) -> Result where E: serde::de::Error, @@ -577,6 +610,7 @@ impl<'de> Visitor<'de> for DecodeValue<'_> { value, ))), }, + Value::Validation(v) => validate_pval!(self, v, visit_string, value), Value::Union(types) => { for typ in types { @@ -666,6 +700,7 @@ impl<'de> Visitor<'de> for DecodeValue<'_> { }, Value::Ref(idx) => recurse_ref!(self, idx, visit_seq, seq), Value::Option(bov) => recurse!(self, bov, visit_seq, seq), + Value::Validation(v) => validate_pval!(self, v, visit_seq, seq), Value::Union(candidates) => { let mut vec: Vec = Vec::new(); while let Some(val) = seq.next_element()? { @@ -821,6 +856,7 @@ impl<'de> Visitor<'de> for DecodeValue<'_> { Value::Ref(idx) => recurse_ref!(self, idx, visit_map, map), Value::Option(bov) => recurse!(self, bov, visit_map, map), + Value::Validation(v) => validate_pval!(self, v, visit_map, map), Value::Union(candidates) => { let mut values = serde_json::Map::new(); while let Some((key, value)) = map.next_entry()? { @@ -900,6 +936,7 @@ impl DecodeValue<'_> { Value::Basic(Basic::Any | Basic::Null) => Ok(()), Value::Option(_) => Ok(()), Value::Ref(idx) => recurse_ref!(self, idx, validate, value), + Value::Validation(v) => validate_jval!(self, v, validate, value), Value::Union(types) => { for typ in types { let res: Result<_, E> = recurse!(self, typ, validate, value); @@ -925,6 +962,7 @@ impl DecodeValue<'_> { bool, ))), }, + Value::Validation(v) => validate_jval!(self, v, validate, value), Value::Union(types) => { for typ in types { let res: Result<_, E> = recurse!(self, typ, validate, value); @@ -957,6 +995,7 @@ impl DecodeValue<'_> { num, ))), }, + Value::Validation(v) => validate_jval!(self, v, validate, value), Value::Union(types) => { for typ in types { let res: Result<_, E> = recurse!(self, typ, validate, value); @@ -994,6 +1033,7 @@ impl DecodeValue<'_> { string, ))), }, + Value::Validation(v) => validate_jval!(self, v, validate, value), Value::Union(types) => { for typ in types { let res: Result<_, E> = recurse!(self, typ, validate, value); @@ -1046,6 +1086,7 @@ impl DecodeValue<'_> { } Ok(()) } + Value::Validation(v) => validate_jval!(self, v, validate, value), Value::Union(types) => { for typ in types { let res: Result<_, E> = recurse!(self, typ, validate, value); @@ -1165,6 +1206,7 @@ impl DecodeValue<'_> { Ok(()) } + Value::Validation(v) => validate_jval!(self, v, validate, value), _ => Err(serde::de::Error::invalid_type(Unexpected::Map, self)), }, @@ -1186,6 +1228,9 @@ impl DecodeValue<'_> { JVal::Array(vals) => match self.value { Value::Ref(idx) => return recurse_ref!(self, idx, transform, JVal::Array(vals)), Value::Option(bov) => return recurse!(self, bov, transform, JVal::Array(vals)), + Value::Validation(v) => { + return recurse!(self, &v.bov, transform, JVal::Array(vals)) + } Value::Basic(Basic::Any) => { let mut new_vals = Vec::with_capacity(vals.len()); for val in vals { @@ -1249,6 +1294,9 @@ impl DecodeValue<'_> { JVal::Object(obj) => match self.value { Value::Ref(idx) => return recurse_ref!(self, idx, transform, JVal::Object(obj)), Value::Option(bov) => return recurse!(self, bov, transform, JVal::Object(obj)), + Value::Validation(v) => { + return recurse!(self, &v.bov, transform, JVal::Object(obj)) + } Value::Basic(Basic::Any) => { let mut new_obj = BTreeMap::new(); for (key, val) in obj { @@ -1328,6 +1376,9 @@ impl DecodeValue<'_> { JVal::String(str) => match self.value { Value::Ref(idx) => return recurse_ref!(self, idx, transform, JVal::String(str)), Value::Option(bov) => return recurse!(self, bov, transform, JVal::String(str)), + Value::Validation(v) => { + return recurse!(self, &v.bov, transform, JVal::String(str)) + } Value::Basic(Basic::DateTime) => api::DateTime::parse_from_rfc3339(&str) .map(PValue::DateTime) .map_err(|e| { diff --git a/runtimes/core/src/api/jsonschema/meta.rs b/runtimes/core/src/api/jsonschema/meta.rs index 8d57dc7ded..52b0f586fb 100644 --- a/runtimes/core/src/api/jsonschema/meta.rs +++ b/runtimes/core/src/api/jsonschema/meta.rs @@ -10,6 +10,8 @@ use crate::encore::parser::meta::v1 as meta; use crate::encore::parser::schema::v1 as schema; use crate::encore::parser::schema::v1::r#type::Typ; +use super::validation; + impl Registry { pub fn schema(self: &Arc, id: usize) -> JSONSchema { JSONSchema { @@ -110,9 +112,9 @@ impl BuilderCtx<'_, '_> { /// Computes the JSONSchema value for the given type. #[inline] fn typ(&mut self, typ: T) -> Result { - let typ = typ.tt()?; + let tt = typ.tt()?; - match typ { + let val = match tt { Typ::Named(named) => self.named(named), Typ::Builtin(builtin) => { let builtin = schema::Builtin::try_from(*builtin).context("invalid builtin")?; @@ -133,6 +135,16 @@ impl BuilderCtx<'_, '_> { .ok_or_else(|| anyhow::anyhow!("missing type argument"))?; Ok(Value::Ref(*idx)) } + }?; + + if let Some(expr) = typ.validation() { + let bov = self.bov(val); + Ok(Value::Validation(validation::Validation { + expr: expr.try_into()?, + bov, + })) + } else { + Ok(val) } } @@ -250,13 +262,13 @@ impl BuilderCtx<'_, '_> { // Note: JSON doesn't support anything but string keys, // so we don't actually track the key type for the purpose // of JSON schemas. Ignore it here. - let value = self.typ(map.value.tt()?)?; + let value = self.typ(&map.value)?; Ok(Value::Map(self.bov(value))) } #[inline] fn list(&mut self, list: &schema::List) -> Result { - let value = self.typ(list.elem.tt()?)?; + let value = self.typ(&list.elem)?; Ok(Value::Array(self.bov(value))) } @@ -298,8 +310,9 @@ impl BuilderCtx<'_, '_> { } } -trait ToType { +trait ToType: std::fmt::Debug { fn tt(&self) -> Result<&Typ>; + fn validation(&self) -> Option<&schema::ValidationExpr>; } impl ToType for Option @@ -309,6 +322,10 @@ where fn tt(&self) -> Result<&Typ> { self.as_ref().context("missing type")?.tt() } + + fn validation(&self) -> Option<&schema::ValidationExpr> { + self.as_ref().and_then(|t| t.validation()) + } } impl ToType for Box @@ -318,17 +335,19 @@ where fn tt(&self) -> Result<&Typ> { self.deref().tt() } + + fn validation(&self) -> Option<&schema::ValidationExpr> { + self.deref().validation() + } } impl ToType for schema::Type { fn tt(&self) -> Result<&Typ> { self.typ.as_ref().context("missing type") } -} -impl ToType for Typ { - fn tt(&self) -> Result<&Typ> { - Ok(self) + fn validation(&self) -> Option<&schema::ValidationExpr> { + self.validation.as_ref() } } @@ -336,4 +355,8 @@ impl ToType for &T { fn tt(&self) -> Result<&Typ> { (*self).tt() } + + fn validation(&self) -> Option<&schema::ValidationExpr> { + (*self).validation() + } } diff --git a/runtimes/core/src/api/jsonschema/mod.rs b/runtimes/core/src/api/jsonschema/mod.rs index 8c3e7e94ca..1f8a58c6b0 100644 --- a/runtimes/core/src/api/jsonschema/mod.rs +++ b/runtimes/core/src/api/jsonschema/mod.rs @@ -12,6 +12,7 @@ mod de; mod meta; mod parse; mod ser; +mod validation; use crate::api::jsonschema::parse::ParseWithSchema; use crate::api::APIResult; @@ -128,6 +129,11 @@ impl Value { Some(v) => v.write_debug(reg, f), None => write!(f, "Ref({})", idx), }, + Value::Validation(v) => f + .debug_struct("Validation") + .field("bov", &v.bov) + .field("expr", &v.expr) + .finish(), } } } diff --git a/runtimes/core/src/api/jsonschema/parse.rs b/runtimes/core/src/api/jsonschema/parse.rs index cd75d52d93..690b6ac718 100644 --- a/runtimes/core/src/api/jsonschema/parse.rs +++ b/runtimes/core/src/api/jsonschema/parse.rs @@ -102,6 +102,23 @@ fn parse_header_value(header: &str, reg: &Registry, schema: &Value) -> APIResult // Recurse Value::Ref(idx) => parse_header_value(header, reg, ®.values[*idx]), + Value::Validation(v) => { + let inner = match &v.bov { + BasicOrValue::Basic(basic) => parse_basic_str(basic, header), + BasicOrValue::Value(idx) => parse_header_value(header, reg, ®.values[*idx]), + }?; + match v.validate_pval(&inner) { + Ok(()) => Ok(inner), + Err(err) => Err(api::Error { + code: api::ErrCode::InvalidArgument, + message: "invalid header value".to_string(), + internal_message: Some(format!("invalid header value: {}", err)), + stack: None, + details: None, + }), + } + } + // If we have an empty header for an option, that's fine. Value::Option(_) if header.is_empty() => Ok(PValue::Null), @@ -201,11 +218,32 @@ impl ParseWithSchema for PValue { } } +#[cfg_attr( + feature = "rttrace", + tracing::instrument(skip(reg), ret, level = "trace") +)] fn parse_json_value(this: PValue, reg: &Registry, schema: &Value) -> APIResult { match schema { // Recurse Value::Ref(idx) => parse_json_value(this, reg, ®.values[*idx]), + Value::Validation(v) => { + let inner = match &v.bov { + BasicOrValue::Basic(basic) => parse_basic_json(reg, basic, this), + BasicOrValue::Value(idx) => parse_json_value(this, reg, ®.values[*idx]), + }?; + match v.validate_pval(&inner) { + Ok(()) => Ok(inner), + Err(err) => Err(api::Error { + code: api::ErrCode::InvalidArgument, + message: err.to_string(), + internal_message: None, + stack: None, + details: None, + }), + } + } + // If we have a null value for an option, that's fine. Value::Option(_) if this.is_null() => Ok(PValue::Null), diff --git a/runtimes/core/src/api/jsonschema/validation.rs b/runtimes/core/src/api/jsonschema/validation.rs new file mode 100644 index 0000000000..6cd2d8a0d5 --- /dev/null +++ b/runtimes/core/src/api/jsonschema/validation.rs @@ -0,0 +1,533 @@ +use crate::{api::PValue, encore::parser::schema::v1 as schema}; +use thiserror::Error; + +use super::BasicOrValue; + +#[derive(Debug, Clone)] +pub struct Validation { + pub bov: BasicOrValue, + pub expr: Expr, +} + +impl Validation { + pub fn validate_pval<'a>(&'a self, val: &'a PValue) -> Result<(), Error<'a>> { + self.expr.validate_pval(val) + } + + pub fn validate_jval<'a>(&'a self, val: &'a serde_json::Value) -> Result<(), Error<'a>> { + self.expr.validate_jval(val) + } +} + +#[derive(Debug, Clone)] +pub enum Expr { + Rule(Rule), + And(Vec), + Or(Vec), +} + +macro_rules! impl_validate { + ($method:ident, $typ:ty) => { + pub fn $method<'a>(&'a self, val: &'a $typ) -> Result<(), Error<'a>> { + match self { + Expr::Rule(rule) => rule.$method(val), + Expr::And(exprs) => { + for expr in exprs { + expr.$method(val)?; + } + Ok(()) + } + Expr::Or(exprs) => { + let mut first_err = None; + for expr in exprs { + match expr.$method(val) { + Ok(()) => return Ok(()), + Err(err) => { + if first_err.is_none() { + first_err = Some(err); + } + } + } + } + match first_err { + Some(err) => Err(err), + None => Ok(()), + } + } + } + } + }; +} + +impl Expr { + impl_validate!(validate_pval, PValue); + impl_validate!(validate_jval, serde_json::Value); +} + +#[derive(Debug, Clone)] +pub enum Rule { + MinLen(u64), + MaxLen(u64), + MinVal(f64), + MaxVal(f64), + StartsWith(String), + EndsWith(String), + MatchesRegexp(regex::Regex), + Is(Is), +} + +#[derive(Debug, Clone)] +pub enum Is { + Email, + Url, +} + +#[derive(Error, Debug)] +pub enum Error<'a> { + #[error("length too short (got {got}, expected at least {min})")] + MinLen { got: usize, min: usize }, + #[error("length too long (got {got}, expected at most {max})")] + MaxLen { got: usize, max: usize }, + + #[error("value must be at least {min} (got {got})")] + MinVal { + got: &'a serde_json::Number, + min: f64, + }, + #[error("value must be at most {max} (got {got})")] + MaxVal { + got: &'a serde_json::Number, + max: f64, + }, + + #[error("value does not match the regexp {regexp:#?}")] + MatchesRegexp { regexp: &'a str }, + + #[error("value does not start with {prefix:#?}")] + StartsWith { prefix: &'a str }, + + #[error("value does not end with {suffix:#?}")] + EndsWith { suffix: &'a str }, + + #[error("value is not {expected}")] + Is { expected: &'a str }, + + #[error("unexpected type (expected {want})")] + UnexpectedType { want: &'a str }, +} + +impl Rule { + #[cfg_attr( + feature = "rttrace", + tracing::instrument(skip(self), ret, level = "trace") + )] + pub fn validate_pval<'a>(&'a self, val: &'a PValue) -> Result<(), Error<'a>> { + match self { + Rule::MinLen(min_len) => match val { + PValue::Array(arr) => { + if arr.len() < *min_len as usize { + Err(Error::MinLen { + got: arr.len(), + min: *min_len as usize, + }) + } else { + Ok(()) + } + } + PValue::String(str) => { + if str.len() < *min_len as usize { + Err(Error::MinLen { + got: str.len(), + min: *min_len as usize, + }) + } else { + Ok(()) + } + } + + _ => Err(Error::UnexpectedType { + want: "string or array", + }), + }, + + Rule::MaxLen(max_len) => match val { + PValue::Array(arr) => { + if arr.len() > *max_len as usize { + Err(Error::MaxLen { + got: arr.len(), + max: *max_len as usize, + }) + } else { + Ok(()) + } + } + PValue::String(str) => { + if str.len() > *max_len as usize { + Err(Error::MaxLen { + got: str.len(), + max: *max_len as usize, + }) + } else { + Ok(()) + } + } + + _ => Err(Error::UnexpectedType { + want: "string or array", + }), + }, + + Rule::MinVal(min_val) => match val { + PValue::Number(num) => { + let bad = if num.is_i64() { + num.as_i64().unwrap() < *min_val as i64 + } else if num.is_u64() { + num.as_u64().unwrap() < *min_val as u64 + } else if num.is_f64() { + num.as_f64().unwrap() < *min_val + } else { + return Err(Error::UnexpectedType { want: "number" }); + }; + if bad { + Err(Error::MinVal { + got: num, + min: *min_val, + }) + } else { + Ok(()) + } + } + + _ => Err(Error::UnexpectedType { want: "number" }), + }, + + Rule::MaxVal(max_val) => match val { + PValue::Number(num) => { + let bad = if num.is_i64() { + num.as_i64().unwrap() > *max_val as i64 + } else if num.is_u64() { + num.as_u64().unwrap() > *max_val as u64 + } else if num.is_f64() { + num.as_f64().unwrap() > *max_val + } else { + return Err(Error::UnexpectedType { want: "number" }); + }; + if bad { + Err(Error::MaxVal { + got: num, + max: *max_val, + }) + } else { + Ok(()) + } + } + + _ => Err(Error::UnexpectedType { want: "number" }), + }, + + Rule::StartsWith(prefix) => match val { + PValue::String(str) => { + if str.starts_with(prefix) { + Ok(()) + } else { + Err(Error::StartsWith { prefix }) + } + } + + _ => Err(Error::UnexpectedType { want: "string" }), + }, + + Rule::EndsWith(suffix) => match val { + PValue::String(str) => { + if str.ends_with(suffix) { + Ok(()) + } else { + Err(Error::EndsWith { suffix }) + } + } + + _ => Err(Error::UnexpectedType { want: "string" }), + }, + + Rule::MatchesRegexp(re) => match val { + PValue::String(str) => { + if re.is_match(str) { + Ok(()) + } else { + Err(Error::MatchesRegexp { + regexp: re.as_str(), + }) + } + } + + _ => Err(Error::UnexpectedType { want: "string" }), + }, + + Rule::Is(Is::Email) => match val { + PValue::String(str) => { + let email = email_address::EmailAddress::parse_with_options( + str, + email_address::Options::default().without_display_text(), + ); + match email { + Ok(_) => Ok(()), + Err(_) => Err(Error::Is { + expected: "an email", + }), + } + } + + _ => Err(Error::UnexpectedType { want: "string" }), + }, + + Rule::Is(Is::Url) => match val { + PValue::String(str) => { + let u = url::Url::parse(str); + match u { + Ok(_) => Ok(()), + Err(_) => Err(Error::Is { expected: "a url" }), + } + } + + _ => Err(Error::UnexpectedType { want: "string" }), + }, + } + } + + #[cfg_attr( + feature = "rttrace", + tracing::instrument(skip(self), ret, level = "trace") + )] + pub fn validate_jval<'a>(&'a self, val: &'a serde_json::Value) -> Result<(), Error<'a>> { + use serde_json::Value as JVal; + + match self { + Rule::MinLen(min_len) => match val { + JVal::Array(arr) => { + if arr.len() < *min_len as usize { + Err(Error::MinLen { + got: arr.len(), + min: *min_len as usize, + }) + } else { + Ok(()) + } + } + JVal::String(str) => { + if str.len() < *min_len as usize { + Err(Error::MinLen { + got: str.len(), + min: *min_len as usize, + }) + } else { + Ok(()) + } + } + + _ => Err(Error::UnexpectedType { + want: "string or array", + }), + }, + + Rule::MaxLen(max_len) => match val { + JVal::Array(arr) => { + if arr.len() > *max_len as usize { + Err(Error::MaxLen { + got: arr.len(), + max: *max_len as usize, + }) + } else { + Ok(()) + } + } + JVal::String(str) => { + if str.len() > *max_len as usize { + Err(Error::MaxLen { + got: str.len(), + max: *max_len as usize, + }) + } else { + Ok(()) + } + } + + _ => Err(Error::UnexpectedType { + want: "string or array", + }), + }, + + Rule::MinVal(min_val) => match val { + JVal::Number(num) => { + let bad = if num.is_i64() { + num.as_i64().unwrap() < *min_val as i64 + } else if num.is_u64() { + num.as_u64().unwrap() < *min_val as u64 + } else if num.is_f64() { + num.as_f64().unwrap() < *min_val + } else { + return Err(Error::UnexpectedType { want: "number" }); + }; + if bad { + Err(Error::MinVal { + got: num, + min: *min_val, + }) + } else { + Ok(()) + } + } + + _ => Err(Error::UnexpectedType { want: "number" }), + }, + + Rule::MaxVal(max_val) => match val { + JVal::Number(num) => { + let bad = if num.is_i64() { + num.as_i64().unwrap() > *max_val as i64 + } else if num.is_u64() { + num.as_u64().unwrap() > *max_val as u64 + } else if num.is_f64() { + num.as_f64().unwrap() > *max_val + } else { + return Err(Error::UnexpectedType { want: "number" }); + }; + if bad { + Err(Error::MaxVal { + got: num, + max: *max_val, + }) + } else { + Ok(()) + } + } + + _ => Err(Error::UnexpectedType { want: "number" }), + }, + + Rule::StartsWith(want) => match val { + JVal::String(got) => { + if got.starts_with(got) { + Ok(()) + } else { + Err(Error::StartsWith { prefix: want }) + } + } + + _ => Err(Error::UnexpectedType { want: "string" }), + }, + + Rule::EndsWith(want) => match val { + JVal::String(got) => { + if got.ends_with(got) { + Ok(()) + } else { + Err(Error::EndsWith { suffix: want }) + } + } + + _ => Err(Error::UnexpectedType { want: "string" }), + }, + + Rule::MatchesRegexp(re) => match val { + JVal::String(str) => { + if re.is_match(str) { + Ok(()) + } else { + Err(Error::MatchesRegexp { + regexp: re.as_str(), + }) + } + } + + _ => Err(Error::UnexpectedType { want: "string" }), + }, + + Rule::Is(Is::Email) => match val { + JVal::String(str) => { + let email = email_address::EmailAddress::parse_with_options( + str, + email_address::Options::default().without_display_text(), + ); + match email { + Ok(_) => Ok(()), + Err(_) => Err(Error::Is { expected: "email" }), + } + } + + _ => Err(Error::UnexpectedType { want: "string" }), + }, + + Rule::Is(Is::Url) => match val { + JVal::String(str) => { + let u = url::Url::parse(str); + match u { + Ok(_) => Ok(()), + Err(_) => Err(Error::Is { expected: "url" }), + } + } + + _ => Err(Error::UnexpectedType { want: "string" }), + }, + } + } +} + +impl TryFrom<&schema::ValidationExpr> for Expr { + type Error = anyhow::Error; + + fn try_from(expr: &schema::ValidationExpr) -> Result { + let Some(expr) = &expr.expr else { + return Err(anyhow::anyhow!("missing expr")); + }; + + use schema::validation_expr::Expr as PbExpr; + + match expr { + PbExpr::Rule(rule) => Ok(Expr::Rule(rule.try_into()?)), + PbExpr::And(expr) => { + let mut and = Vec::new(); + for expr in &expr.exprs { + and.push(expr.try_into()?); + } + Ok(Expr::And(and)) + } + PbExpr::Or(expr) => { + let mut or = Vec::new(); + for expr in &expr.exprs { + or.push(expr.try_into()?); + } + Ok(Expr::Or(or)) + } + } + } +} + +impl TryFrom<&schema::ValidationRule> for Rule { + type Error = anyhow::Error; + + fn try_from(rule: &schema::ValidationRule) -> Result { + let Some(rule) = &rule.rule else { + return Err(anyhow::anyhow!("missing validation rule")); + }; + + use schema::validation_rule::Is as PbIs; + use schema::validation_rule::Rule as PbRule; + match rule { + PbRule::MinLen(val) => Ok(Rule::MinLen(*val)), + PbRule::MaxLen(val) => Ok(Rule::MaxLen(*val)), + PbRule::MinVal(val) => Ok(Rule::MinVal(*val)), + PbRule::MaxVal(val) => Ok(Rule::MaxVal(*val)), + PbRule::StartsWith(val) => Ok(Rule::StartsWith(val.clone())), + PbRule::EndsWith(val) => Ok(Rule::EndsWith(val.clone())), + PbRule::MatchesRegexp(val) => { + let re = regex::Regex::new(val)?; + Ok(Rule::MatchesRegexp(re)) + } + PbRule::Is(is) => Ok(Rule::Is(match PbIs::try_from(*is)? { + PbIs::Unknown => anyhow::bail!("unknown 'is' rule"), + PbIs::Email => Is::Email, + PbIs::Url => Is::Url, + })), + } + } +} diff --git a/runtimes/core/src/api/manager.rs b/runtimes/core/src/api/manager.rs index 2786b13940..0bb7f43606 100644 --- a/runtimes/core/src/api/manager.rs +++ b/runtimes/core/src/api/manager.rs @@ -41,7 +41,6 @@ pub struct ManagerConfig<'a> { pub platform_validator: Arc, pub pubsub_push_registry: pubsub::PushHandlerRegistry, pub runtime: tokio::runtime::Handle, - pub is_worker: bool, pub testing: bool, pub proxied_push_subs: HashMap, } @@ -62,14 +61,14 @@ pub struct Manager { impl ManagerConfig<'_> { pub fn build(mut self) -> anyhow::Result { - let gateway_listen_addr = if !self.hosted_gateway_rids.is_empty() && !self.is_worker { + let gateway_listen_addr = if !self.hosted_gateway_rids.is_empty() { // We have a gateway. Have the gateway listen on the provided listen_addr. Some(listen_addr()) } else { None }; - let api_listener = if !self.hosted_services.is_empty() && !self.is_worker { + let api_listener = if !self.hosted_services.is_empty() { // If we already have a gateway, it's listening on the externally provided listen addr. // Use a random local port in that case. let addr = if gateway_listen_addr.is_some() { @@ -134,7 +133,6 @@ impl ManagerConfig<'_> { self.deploy_id.clone(), self.http_client.clone(), self.tracer.clone(), - self.is_worker, ) .context("unable to create service registry")?; let service_registry = Arc::new(service_registry); @@ -157,9 +155,6 @@ impl ManagerConfig<'_> { let mut auth_data_schemas = HashMap::new(); for gw in &self.meta.gateways { - if self.is_worker { - continue; - } let Some(gw_cfg) = hosted_gateways.get(gw.encore_name.as_str()) else { continue; }; @@ -201,7 +196,7 @@ impl ManagerConfig<'_> { ); } - let api_server = if !hosted_services.is_empty() && !self.is_worker { + let api_server = if !hosted_services.is_empty() { let server = server::Server::new( endpoints.clone(), hosted_endpoints, diff --git a/runtimes/core/src/api/schema/encoding.rs b/runtimes/core/src/api/schema/encoding.rs index b5fff97e3d..4d92c7e16c 100644 --- a/runtimes/core/src/api/schema/encoding.rs +++ b/runtimes/core/src/api/schema/encoding.rs @@ -239,17 +239,18 @@ impl<'a> TypeArgResolver<'a> { Typ::Struct(strukt) => { let mut cows = Vec::with_capacity(strukt.fields.len()); for field in &strukt.fields { - let typ = field.typ.as_ref().context("field without type")?; - let typ = typ.typ.as_ref().context("type without type")?; + let t = field.typ.as_ref().context("field without type")?; + let typ = t.typ.as_ref().context("type without type")?; let resolved = self.resolve(typ)?; - cows.push(resolved); + cows.push((resolved, t.validation.as_ref())); } let mut fields = Vec::with_capacity(strukt.fields.len()); - for (field, typ) in strukt.fields.iter().zip(cows) { + for (field, (typ, v)) in strukt.fields.iter().zip(cows) { fields.push(schema::Field { typ: Some(schema::Type { typ: Some(typ.into_owned()), + validation: v.cloned(), }), ..field.clone() }); @@ -259,21 +260,23 @@ impl<'a> TypeArgResolver<'a> { Typ::Map(map) => { let key = map.key.as_ref().context("map without key")?; - let key = key.typ.as_ref().context("key without type")?; + let key_typ = key.typ.as_ref().context("key without type")?; let value = map.value.as_ref().context("map without value")?; - let value = value.typ.as_ref().context("value without type")?; - let key = self.resolve(key)?; - let value = self.resolve(value)?; + let val_typ = value.typ.as_ref().context("value without type")?; + let key_typ = self.resolve(key_typ)?; + let val_typ = self.resolve(val_typ)?; - if matches!((&key, &value), (Cow::Borrowed(_), Cow::Borrowed(_))) { + if matches!((&key_typ, &val_typ), (Cow::Borrowed(_), Cow::Borrowed(_))) { Ok(Cow::Borrowed(typ)) } else { Ok(Cow::Owned(Typ::Map(Box::new(schema::Map { key: Some(Box::new(schema::Type { - typ: Some(key.into_owned()), + typ: Some(key_typ.into_owned()), + validation: key.validation.clone(), })), value: Some(Box::new(schema::Type { - typ: Some(value.into_owned()), + typ: Some(val_typ.into_owned()), + validation: value.validation.clone(), })), })))) } @@ -281,14 +284,15 @@ impl<'a> TypeArgResolver<'a> { Typ::List(list) => { let elem = list.elem.as_ref().context("list without elem")?; - let elem = elem.typ.as_ref().context("elem without type")?; - let elem = self.resolve(elem)?; - if matches!(elem, Cow::Borrowed(_)) { + let elem_typ = elem.typ.as_ref().context("elem without type")?; + let elem_typ = self.resolve(elem_typ)?; + if matches!(elem_typ, Cow::Borrowed(_)) { Ok(Cow::Borrowed(typ)) } else { Ok(Cow::Owned(Typ::List(Box::new(schema::List { elem: Some(Box::new(schema::Type { - typ: Some(elem.into_owned()), + typ: Some(elem_typ.into_owned()), + validation: elem.validation.clone(), })), })))) } @@ -298,8 +302,10 @@ impl<'a> TypeArgResolver<'a> { let types = self.resolve_types(&union.types)?; let types = types .into_iter() - .map(|t| schema::Type { - typ: Some(t.into_owned()), + .zip(&union.types) + .map(|(typ, t)| schema::Type { + typ: Some(typ.into_owned()), + validation: t.validation.clone(), }) .collect::>(); diff --git a/runtimes/core/src/api/server.rs b/runtimes/core/src/api/server.rs index ffe519bd7b..6de3d9d351 100644 --- a/runtimes/core/src/api/server.rs +++ b/runtimes/core/src/api/server.rs @@ -4,6 +4,8 @@ use std::future::Future; use std::pin::Pin; use std::sync::{Arc, Mutex, RwLock}; +use rand::Rng; + use crate::api::endpoint::{EndpointHandler, SharedEndpointData}; use crate::api::paths::Pather; use crate::api::reqauth::svcauth; @@ -143,7 +145,12 @@ impl Server { endpoint_name: EndpointName, handler: Arc, ) -> anyhow::Result<()> { - match self.hosted_endpoints.lock().unwrap().remove(&endpoint_name) { + match self + .hosted_endpoints + .lock() + .unwrap() + .get_mut(&endpoint_name) + { None => Ok(()), // anyhow::bail!("no handler found for endpoint: {}", endpoint_name), Some(h) => { let endpoint = self.endpoints.get(&endpoint_name).unwrap().to_owned(); @@ -154,7 +161,7 @@ impl Server { shared: self.shared.clone(), }; - h.set(handler); + h.add(handler); Ok(()) } } @@ -185,7 +192,7 @@ impl Pather for EndpointPathResolver { #[derive(Clone)] struct ReplaceableHandler { /// Underlying handler. The RwLock is used to be able to inject the underlying handler. - handler: Arc>>, + handler: Arc>>, } impl Debug for ReplaceableHandler { @@ -203,13 +210,18 @@ impl Default for ReplaceableHandler { impl ReplaceableHandler { pub fn new() -> Self { Self { - handler: Arc::new(RwLock::new(None)), + handler: Arc::new(RwLock::new(vec![])), } } /// Set sets the handler. pub fn set(&self, handler: H) { - *self.handler.write().unwrap() = Some(handler); + *self.handler.write().unwrap() = vec![handler]; + } + + /// Set sets the handler. + pub fn add(&self, handler: H) { + self.handler.write().unwrap().push(handler); } } @@ -220,11 +232,24 @@ where type Future = MaybeHandlerFuture; fn call(self, req: axum::extract::Request, state: ()) -> Self::Future { - match self.handler.read().unwrap().as_ref() { - None => MaybeHandlerFuture { fut: None }, - Some(handler) => MaybeHandlerFuture { - fut: Some(Box::pin(handler.clone().call(req, state))), - }, + let handlers = self.handler.read().unwrap(); + match handlers.len() { + 0 => MaybeHandlerFuture { fut: None }, + 1 => { + let handler = handlers[0].clone(); + MaybeHandlerFuture { + fut: Some(Box::pin(handler.call(req, state))), + } + } + n => { + // Get a random handler between 0 and n. + let idx = rand::thread_rng().gen_range(0..n); + let handler = handlers[idx].clone(); + + MaybeHandlerFuture { + fut: Some(Box::pin(handler.call(req, state))), + } + } } } } diff --git a/runtimes/core/src/lib.rs b/runtimes/core/src/lib.rs index 1f15ae3e75..df644a1c1c 100644 --- a/runtimes/core/src/lib.rs +++ b/runtimes/core/src/lib.rs @@ -195,7 +195,7 @@ impl RuntimeBuilder { if let Some(proc_config) = self.proc_cfg { proc_config.apply(&mut cfg)?; } - Runtime::new(cfg, md, self.test_mode, self.is_worker) + Runtime::new(cfg, md, self.test_mode) } } @@ -219,7 +219,6 @@ impl Runtime { mut cfg: runtimepb::RuntimeConfig, md: metapb::Data, testing: bool, - is_worker: bool, ) -> anyhow::Result { // Initialize OpenSSL system root certificates, so that libraries can find them. openssl_probe::init_ssl_cert_env_vars(); @@ -359,7 +358,6 @@ impl Runtime { platform_validator, pubsub_push_registry: pubsub.push_registry(), runtime: tokio_rt.handle().clone(), - is_worker, testing, proxied_push_subs, } diff --git a/runtimes/js/encore.dev/internal/runtime/mod.ts b/runtimes/js/encore.dev/internal/runtime/mod.ts index f65159dae7..95fca068c6 100644 --- a/runtimes/js/encore.dev/internal/runtime/mod.ts +++ b/runtimes/js/encore.dev/internal/runtime/mod.ts @@ -1,4 +1,3 @@ -import { isMainThread } from "node:worker_threads"; import { Runtime } from "./napi/napi.cjs"; export * from "./napi/napi.cjs"; @@ -7,5 +6,4 @@ const testMode = process.env.NODE_ENV === "test"; export const RT = new Runtime({ testMode, - isWorker: !isMainThread && !testMode, }); diff --git a/runtimes/js/encore.dev/package.json b/runtimes/js/encore.dev/package.json index 7da031e527..982839e3fd 100644 --- a/runtimes/js/encore.dev/package.json +++ b/runtimes/js/encore.dev/package.json @@ -69,6 +69,11 @@ "bun": "./storage/objects/mod.ts", "default": "./dist/storage/objects/mod.js" }, + "./validate": { + "types": "./validate/mod.ts", + "bun": "./validate/mod.ts", + "default": "./dist/validate/mod.js" + }, "./internal/codegen/*": { "types": "./internal/codegen/*.ts", "bun": "./internal/codegen/*.ts", diff --git a/runtimes/js/encore.dev/validate/mod.ts b/runtimes/js/encore.dev/validate/mod.ts new file mode 100644 index 0000000000..25a4d07d49 --- /dev/null +++ b/runtimes/js/encore.dev/validate/mod.ts @@ -0,0 +1,55 @@ +declare const __validate: unique symbol; + +export type Min = { + [__validate]: { + minValue: N; + }; +}; + +export type Max = { + [__validate]: { + maxValue: N; + }; +}; + +export type MinLen = { + [__validate]: { + minLen: N; + }; +}; + +export type MaxLen = { + [__validate]: { + maxLen: N; + }; +}; + +export type MatchesRegexp = { + [__validate]: { + matchesRegexp: S; + }; +}; + +export type StartsWith = { + [__validate]: { + startsWith: S; + }; +}; + +export type EndsWith = { + [__validate]: { + endsWith: S; + }; +}; + +export type IsEmail = { + [__validate]: { + isEmail: true; + }; +}; + +export type IsURL = { + [__validate]: { + isURL: true; + }; +}; diff --git a/runtimes/js/src/runtime.rs b/runtimes/js/src/runtime.rs index 5173e76182..f80d43fb32 100644 --- a/runtimes/js/src/runtime.rs +++ b/runtimes/js/src/runtime.rs @@ -13,9 +13,12 @@ use napi::{bindgen_prelude::*, JsObject}; use napi::{Error, JsUnknown, Status}; use napi_derive::napi; use std::future::Future; -use std::sync::Arc; +use std::sync::{Arc, OnceLock}; use std::thread; +// TODO: remove storing of result after `get_or_try_init` is stabilized +static RUNTIME: OnceLock>> = OnceLock::new(); + #[napi] pub struct Runtime { pub(crate) runtime: Arc, @@ -25,7 +28,23 @@ pub struct Runtime { #[derive(Default)] pub struct RuntimeOptions { pub test_mode: Option, - pub is_worker: Option, +} + +fn init_runtime(test_mode: bool) -> napi::Result { + // Initialize logging. + encore_runtime_core::log::init(); + + encore_runtime_core::Runtime::builder() + .with_test_mode(test_mode) + .with_meta_autodetect() + .with_runtime_config_from_env() + .build() + .map_err(|e| { + Error::new( + Status::GenericFailure, + format!("failed to initialize runtime: {:?}", e), + ) + }) } #[napi] @@ -33,36 +52,28 @@ impl Runtime { #[napi(constructor)] pub fn new(options: Option) -> napi::Result { let options = options.unwrap_or_default(); - // Initialize logging. - encore_runtime_core::log::init(); - let test_mode = options .test_mode .unwrap_or(std::env::var("NODE_ENV").is_ok_and(|val| val == "test")); - let is_worker = options.is_worker.unwrap_or(false); - let runtime = encore_runtime_core::Runtime::builder() - .with_test_mode(test_mode) - .with_meta_autodetect() - .with_runtime_config_from_env() - .with_worker(is_worker) - .build() - .map_err(|e| { - Error::new( - Status::GenericFailure, - format!("failed to initialize runtime: {:?}", e), - ) - })?; - let runtime = Arc::new(runtime); - // If we're running tests, there's no specific entrypoint so - // start the runtime in the background immediately. if test_mode { - let runtime = runtime.clone(); - thread::spawn(move || { - runtime.run_blocking(); - }); + let runtime = Arc::new(init_runtime(test_mode)?); + // If we're running tests, there's no specific entrypoint so + // start the runtime in the background immediately. + if test_mode { + let runtime = runtime.clone(); + thread::spawn(move || { + runtime.run_blocking(); + }); + } + + return Ok(Self { runtime }); } + let runtime = RUNTIME + .get_or_init(|| Ok(Arc::new(init_runtime(false)?))) + .clone()?; + Ok(Self { runtime }) } diff --git a/tsparser/Cargo.toml b/tsparser/Cargo.toml index bac9956129..4cd4a4ee27 100644 --- a/tsparser/Cargo.toml +++ b/tsparser/Cargo.toml @@ -34,7 +34,7 @@ serde = { version = "1.0.188", features = ["rc"] } serde_json = "1.0.107" url = "2.4.1" convert_case = "0.6.0" -itertools = "0.12.0" +itertools = "0.13.0" duct = "0.13.6" indexmap = { version = "2.1.0", features = ["serde"] } serde_yaml = "0.9.32" diff --git a/tsparser/examples/testparse.rs b/tsparser/examples/testparse.rs index 47fae9ab33..6522eed266 100644 --- a/tsparser/examples/testparse.rs +++ b/tsparser/examples/testparse.rs @@ -4,7 +4,6 @@ use std::path::PathBuf; use std::rc::Rc; use std::sync::{Arc, Mutex}; -use anyhow::Result; use swc_common::errors::{Emitter, EmitterWriter, Handler, HANDLER}; use swc_common::{Globals, SourceMap, SourceMapper, GLOBALS}; @@ -12,7 +11,7 @@ use encore_tsparser::builder::Builder; use encore_tsparser::parser::parser::ParseContext; use encore_tsparser::{app, builder}; -fn main() -> Result<()> { +fn main() { env_logger::init(); let js_runtime_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) @@ -34,8 +33,8 @@ fn main() -> Result<()> { let errs = Rc::new(Handler::with_emitter(true, false, Box::new(emitter))); - GLOBALS.set(&globals, || -> Result<()> { - HANDLER.set(&errs, || -> Result<()> { + GLOBALS.set(&globals, || { + HANDLER.set(&errs, || { let builder = Builder::new()?; let _parse: Option<(builder::App, app::AppDesc)> = None; @@ -68,12 +67,10 @@ fn main() -> Result<()> { }; match builder.parse(&pp) { - Ok(_) => { + Some(desc) => { println!("successfully parsed {}", app_root.display()); - Ok(()) } - Err(err) => { - log::error!("failed to parse: {:?}", err); + None => { // Get any errors from the emitter. let errs = errors.lock().unwrap(); let mut err_msg = String::new(); @@ -83,7 +80,7 @@ fn main() -> Result<()> { } err_msg.push_str(&format!("{:?}", err)); eprintln!("{}", err_msg); - anyhow::bail!("parse failure") + panic!("parse failure") } } }) diff --git a/tsparser/litparser-derive/src/lib.rs b/tsparser/litparser-derive/src/lib.rs index 4ca7684f93..3832c8cce6 100644 --- a/tsparser/litparser-derive/src/lib.rs +++ b/tsparser/litparser-derive/src/lib.rs @@ -23,7 +23,7 @@ pub fn derive_lit_parser(input: proc_macro::TokenStream) -> proc_macro::TokenStr // The generated impl. #[allow(non_snake_case)] impl #impl_generics litparser::LitParser for #name #ty_generics #where_clause { - fn parse_lit(#input_ident: &swc_ecma_ast::Expr) -> anyhow::Result { + fn parse_lit(#input_ident: &swc_ecma_ast::Expr) -> litparser::ParseResult { #impl_stream } } @@ -49,7 +49,7 @@ fn add_trait_bounds(mut generics: Generics) -> Generics { fn generate_impl(data: &Data, input_ident: &syn::Ident) -> proc_macro2::TokenStream { let init_stream = fields_init(data); let match_stream = match_expr(data, input_ident); - let return_stream = gen_return(data); + let return_stream = gen_return(data, input_ident); match *data { Data::Struct(ref data) => match data.fields { Fields::Named(_) => { @@ -100,7 +100,7 @@ fn match_expr(data: &Data, input_ident: &syn::Ident) -> proc_macro2::TokenStream let lit_ident = format_ident!("lit"); let prop_ident = format_ident!("prop"); let kv_ident = format_ident!("kv"); - let field_case_stream = gen_field_match_cases(data, &kv_ident); + let field_case_stream = gen_field_match_cases(data, &prop_ident, &kv_ident); let match_prop_stream = match_prop(&prop_ident, &kv_ident, field_case_stream); quote! { match #input_ident { @@ -109,7 +109,10 @@ fn match_expr(data: &Data, input_ident: &syn::Ident) -> proc_macro2::TokenStream #match_prop_stream } } - _ => anyhow::bail!("expected object literal"), + _ => return Err(litparser::ParseError { + span: ::span(#input_ident), + message: "expected object literal".to_string(), + }), } } } @@ -122,14 +125,22 @@ fn match_prop( ) -> proc_macro2::TokenStream { quote! { match #prop_ident { - swc_ecma_ast::PropOrSpread::Spread(_) => anyhow::bail!("spread operator not supported"), + swc_ecma_ast::PropOrSpread::Spread(_) => { + return Err(litparser::ParseError { + span: ::span(#prop_ident), + message: "spread operator unsupported".to_string(), + }); + } swc_ecma_ast::PropOrSpread::Prop(prop) => match prop.as_ref() { swc_ecma_ast::Prop::Shorthand(_) | swc_ecma_ast::Prop::Assign(_) | swc_ecma_ast::Prop::Getter(_) | swc_ecma_ast::Prop::Setter(_) | swc_ecma_ast::Prop::Method(_) => { - anyhow::bail!("prop type {:?} not supported", prop) + return Err(litparser::ParseError { + span: ::span(prop), + message: format!("prop type {:?} not supported", prop), + }); } swc_ecma_ast::Prop::KeyValue(#kv_ident) => match &#kv_ident.key { @@ -142,7 +153,10 @@ fn match_prop( swc_ecma_ast::PropName::Num(_) | swc_ecma_ast::PropName::BigInt(_) | swc_ecma_ast::PropName::Computed(_) => { - anyhow::bail!("prop name kind {:?} not supported", kv.key) + return Err(litparser::ParseError { + span: ::span(#kv_ident), + message: format!("prop name kind {:?} not supported", kv.key), + }); } }, } @@ -151,17 +165,24 @@ fn match_prop( } // Generates an expression for the match cases for the fields. -fn gen_field_match_cases(data: &Data, kv_ident: &syn::Ident) -> proc_macro2::TokenStream { +fn gen_field_match_cases( + data: &Data, + prop_ident: &syn::Ident, + kv_ident: &syn::Ident, +) -> proc_macro2::TokenStream { match *data { Data::Struct(ref data) => match data.fields { Fields::Named(ref fields) => { let match_cases = fields.named.iter().map(|f| { - let name = &f.ident; - let match_literal = format!("{}", name.as_ref().unwrap()); + let name = f.ident.as_ref().unwrap(); + let match_literal = format!("{}", name); quote_spanned! {f.span() => #match_literal => { if #name.is_some() { - anyhow::bail!("field {} set twice", #match_literal); + return Err(litparser::ParseError { + span: ::span(#prop_ident), + message: format!("field {} set twice", #match_literal), + }); } let val = LitParser::parse_lit(&*#kv_ident.value)?; #name = Some(val); @@ -170,7 +191,12 @@ fn gen_field_match_cases(data: &Data, kv_ident: &syn::Ident) -> proc_macro2::Tok }); quote! { #(#match_cases)* - x @ _ => anyhow::bail!("unrecognized prop name {}", x), + x @ _ => { + return Err(litparser::ParseError { + span: ::span(#prop_ident), + message: "unexpected field".to_string(), + }); + } } } Fields::Unnamed(_) => { @@ -184,7 +210,7 @@ fn gen_field_match_cases(data: &Data, kv_ident: &syn::Ident) -> proc_macro2::Tok } } -fn gen_return(data: &Data) -> proc_macro2::TokenStream { +fn gen_return(data: &Data, input_ident: &syn::Ident) -> proc_macro2::TokenStream { match *data { Data::Struct(ref data) => match data.fields { Fields::Named(ref fields) => { @@ -197,7 +223,10 @@ fn gen_return(data: &Data) -> proc_macro2::TokenStream { } } else { quote_spanned! {f.span() => - #name: #name.ok_or_else(|| anyhow::anyhow!(concat!(stringify!(#name), " not set")))? + #name: #name.ok_or_else(|| litparser::ParseError { + span: ::span(#input_ident), + message: format!("field {} is required but is missing", stringify!(#name)), + })? } } }); diff --git a/tsparser/litparser-derive/tests/integration_tests.rs b/tsparser/litparser-derive/tests/integration_tests.rs index 4e10103e19..a7f289eb98 100644 --- a/tsparser/litparser-derive/tests/integration_tests.rs +++ b/tsparser/litparser-derive/tests/integration_tests.rs @@ -9,7 +9,7 @@ use swc_common::{FileName, SourceMap}; use swc_ecma_parser::lexer::Lexer; use swc_ecma_parser::{Parser, Syntax}; -use litparser::LitParser; +use litparser::{LitParser, ParseResult}; #[test] fn test_parse() { @@ -46,7 +46,7 @@ fn test_parse_refs() { foo: Option<&'a str>, } impl LitParser for Dummy<'_> { - fn parse_lit(_input: &swc_ecma_ast::Expr) -> anyhow::Result { + fn parse_lit(_input: &swc_ecma_ast::Expr) -> ParseResult { Ok(Self { foo: None }) } } diff --git a/tsparser/litparser/src/lib.rs b/tsparser/litparser/src/lib.rs index f518bdf6fc..1584817d49 100644 --- a/tsparser/litparser/src/lib.rs +++ b/tsparser/litparser/src/lib.rs @@ -1,90 +1,146 @@ -use anyhow::Result; use duration_string::DurationString; use num_bigint::{BigInt, ToBigInt}; use std::{ + error::Error, fmt::{Debug, Display}, ops::{Deref, DerefMut}, path::{Component, PathBuf}, }; -use swc_common::{pass::Either, util::take::Take, Span, Spanned}; +use swc_common::{errors::HANDLER, pass::Either, util::take::Take, Span, Spanned}; use swc_ecma_ast as ast; +#[derive(Debug, Clone, Hash)] +pub struct ParseError { + pub span: Span, + pub message: String, +} + +impl Error for ParseError {} + +impl Display for ParseError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.message) + } +} + +impl ParseError { + pub fn report(self) { + HANDLER.with(|handler| { + handler.span_err(self.span, &self.message); + }); + } +} + +#[macro_export] +macro_rules! report_and_continue { + ($e:expr) => { + match $e { + Ok(v) => v, + Err(err) => { + err.report(); + continue; + } + } + }; +} + +#[macro_export] +macro_rules! report_and_return { + ($e:expr) => { + match $e { + Ok(v) => v, + Err(err) => { + err.report(); + return; + } + } + }; +} + +pub trait ToParseErr { + fn parse_err>(&self, message: S) -> ParseError; +} + +impl ToParseErr for T +where + T: Spanned, +{ + fn parse_err>(&self, message: S) -> ParseError { + ParseError { + span: self.span(), + message: message.into(), + } + } +} + +pub type ParseResult = Result; + pub trait LitParser: Sized { - fn parse_lit(input: &ast::Expr) -> Result; + fn parse_lit(input: &ast::Expr) -> ParseResult; } impl LitParser for Sp where T: LitParser, { - fn parse_lit(input: &ast::Expr) -> Result { + fn parse_lit(input: &ast::Expr) -> ParseResult { let res = T::parse_lit(input)?; Ok(Sp(input.span(), res)) } } impl LitParser for String { - fn parse_lit(input: &ast::Expr) -> Result { + fn parse_lit(input: &ast::Expr) -> ParseResult { match input { ast::Expr::Lit(ast::Lit::Str(str)) => Ok(str.value.to_string()), - _ => { - anyhow::bail!("expected string literal, got {:?}", input) - } + _ => Err(input.parse_err("expected string literal")), } } } impl LitParser for bool { - fn parse_lit(input: &ast::Expr) -> Result { + fn parse_lit(input: &ast::Expr) -> ParseResult { match input { ast::Expr::Lit(ast::Lit::Bool(b)) => Ok(b.value), - _ => anyhow::bail!("expected boolean literal, got {:?}", input), + _ => Err(input.parse_err("expected boolean literal")), } } } impl LitParser for i32 { - fn parse_lit(input: &ast::Expr) -> Result { + fn parse_lit(input: &ast::Expr) -> ParseResult { let big = parse_const_bigint(input)?; - let val: i32 = big - .try_into() - .map_err(|_| anyhow::anyhow!("expected number literal, got {:?}", input))?; - Ok(val) + big.try_into() + .map_err(|_| input.parse_err("expected number literal")) } } impl LitParser for u32 { - fn parse_lit(input: &ast::Expr) -> Result { + fn parse_lit(input: &ast::Expr) -> ParseResult { let big = parse_const_bigint(input)?; - let val: u32 = big - .try_into() - .map_err(|_| anyhow::anyhow!("expected unsigned number literal, got {:?}", input))?; - Ok(val) + big.try_into() + .map_err(|_| input.parse_err("expected unsigned number literal")) } } impl LitParser for i64 { - fn parse_lit(input: &ast::Expr) -> Result { + fn parse_lit(input: &ast::Expr) -> ParseResult { let big = parse_const_bigint(input)?; - let val: i64 = big - .try_into() - .map_err(|_| anyhow::anyhow!("expected number literal, got {:?}", input))?; - Ok(val) + big.try_into() + .map_err(|_| input.parse_err("expected number literal")) } } impl LitParser for u64 { - fn parse_lit(input: &ast::Expr) -> Result { + fn parse_lit(input: &ast::Expr) -> ParseResult { let big = parse_const_bigint(input)?; - let val: u64 = big - .try_into() - .map_err(|_| anyhow::anyhow!("expected unsigned number literal, got {:?}", input))?; - Ok(val) + big.try_into() + .map_err(|_| input.parse_err("expected unsigned number literal")) } } impl LitParser for ast::Expr { - fn parse_lit(input: &ast::Expr) -> Result { + fn parse_lit(input: &ast::Expr) -> ParseResult { Ok(input.clone()) } } @@ -93,7 +149,7 @@ impl LitParser for Option where T: LitParser, { - fn parse_lit(input: &ast::Expr) -> Result> { + fn parse_lit(input: &ast::Expr) -> ParseResult> { let t = T::parse_lit(input)?; Ok(Some(t)) } @@ -104,7 +160,7 @@ where L: LitParser, R: LitParser, { - fn parse_lit(input: &ast::Expr) -> Result> { + fn parse_lit(input: &ast::Expr) -> ParseResult> { let res = L::parse_lit(input) .map(Either::Left) .or_else(|_| R::parse_lit(input).map(Either::Right))?; @@ -114,42 +170,54 @@ where } impl LitParser for std::time::Duration { - fn parse_lit(input: &ast::Expr) -> Result { + fn parse_lit(input: &ast::Expr) -> ParseResult { match input { ast::Expr::Lit(ast::Lit::Str(str)) => { - let dur = - DurationString::try_from(str.value.to_string()).map_err(anyhow::Error::msg)?; + let dur = DurationString::try_from(str.value.to_string()) + .map_err(|e| str.parse_err(e))?; Ok(dur.into()) } - _ => anyhow::bail!("expected duration string, got {:?}", input), + _ => Err(input.parse_err("expected duration string literal")), } } } /// Represents a local, relative path (without ".." or a root). #[derive(Debug, Clone)] -pub struct LocalRelPath(pub PathBuf); +pub struct LocalRelPath { + pub span: Span, + pub buf: PathBuf, +} + +impl Spanned for LocalRelPath { + fn span(&self) -> Span { + self.span + } +} impl LocalRelPath { - pub fn try_from>(str: S) -> Result { + pub fn try_from>(sp: Span, str: S) -> ParseResult { let str = str.as_ref(); let path = PathBuf::from(str); for c in path.components() { match c { Component::CurDir => {} Component::Normal(_) => {} - _ => anyhow::bail!("expected a local relative path, got {:?}", str), + _ => return Err(sp.parse_err("expected a local relative path")), } } - Ok(LocalRelPath(clean_path::clean(path))) + Ok(LocalRelPath { + span: sp, + buf: clean_path::clean(path), + }) } } impl LitParser for LocalRelPath { - fn parse_lit(input: &ast::Expr) -> Result { + fn parse_lit(input: &ast::Expr) -> ParseResult { match input { - ast::Expr::Lit(ast::Lit::Str(str)) => LocalRelPath::try_from(&str.value), - _ => anyhow::bail!("expected a local relative path, got {:?}", input), + ast::Expr::Lit(ast::Lit::Str(str)) => LocalRelPath::try_from(str.span, &str.value), + _ => Err(input.parse_err("expected a local relative path")), } } } @@ -164,7 +232,7 @@ impl LitParser for Nullable where T: LitParser, { - fn parse_lit(input: &ast::Expr) -> Result { + fn parse_lit(input: &ast::Expr) -> ParseResult { match input { ast::Expr::Lit(ast::Lit::Null(_)) => Ok(Nullable::Null), _ => { @@ -187,16 +255,16 @@ where } } -fn parse_const_bigint(expr: &ast::Expr) -> Result { +fn parse_const_bigint(expr: &ast::Expr) -> ParseResult { match expr { ast::Expr::Lit(ast::Lit::Num(num)) => { let int = num.value as i64; if int as f64 != num.value { - anyhow::bail!("expected integer literal, got float"); + return Err(num.parse_err("expected integer literal")); } - let big = int.to_bigint().ok_or_else(|| { - anyhow::anyhow!("expected integer literal, got too large integer") - })?; + let Some(big) = int.to_bigint() else { + return Err(num.parse_err("integer too large")); + }; Ok(big) } ast::Expr::Unary(unary) => match unary.op { @@ -205,7 +273,7 @@ fn parse_const_bigint(expr: &ast::Expr) -> Result { Ok(-x) } ast::UnaryOp::Plus => parse_const_bigint(&unary.arg), - _ => anyhow::bail!("unsupported unary operator {:?}", unary.op), + _ => Err(unary.parse_err(format!("unsupported unary operator {:?}", unary.op))), }, ast::Expr::Bin(bin) => { let x = parse_const_bigint(&bin.left)?; @@ -223,18 +291,27 @@ fn parse_const_bigint(expr: &ast::Expr) -> Result { if remainder.is_zero() { Ok(quo) } else { - anyhow::bail!("expected integer division, got {:?}", expr) + Err(bin.parse_err("expected integer division")) } } - _ => anyhow::bail!("expected arithmetic operator, got {:?}", bin.op), + _ => Err(bin.parse_err(format!("expected arithmetic operator, got {:?}", bin.op))), } } - _ => anyhow::bail!("expected integer literal, got {:?}", expr), + _ => Err(expr.parse_err("expected integer literal")), } } pub struct Sp(Span, T); +impl Clone for Sp +where + T: Clone, +{ + fn clone(&self) -> Self { + Sp(self.0, self.1.clone()) + } +} + impl Sp { pub fn new(sp: Span, val: T) -> Self { Self(sp, val) @@ -244,6 +321,10 @@ impl Sp { Self::new(Span::dummy(), val) } + pub fn with(&self, val: U) -> Sp { + Sp::new(self.0, val) + } + pub fn split(self) -> (Span, T) { (self.0, self.1) } @@ -262,10 +343,43 @@ impl Sp { { Sp(self.0, f(self.1)) } + + pub fn get(&self) -> &T { + &self.1 + } + + pub fn as_deref(&self) -> &T::Target + where + T: Deref, + { + self.1.deref() + } +} + +impl Sp> { + pub fn transpose(self) -> Result, E> { + match self.1 { + Ok(inner) => Ok(Sp(self.0, inner)), + Err(err) => Err(err), + } + } +} + +impl AsRef for Sp { + fn as_ref(&self) -> &T { + &self.1 + } +} + +impl AsMut for Sp { + fn as_mut(&mut self) -> &mut T { + &mut self.1 + } } impl Deref for Sp { type Target = T; + fn deref(&self) -> &Self::Target { &self.1 } @@ -306,15 +420,6 @@ where } } -impl Clone for Sp -where - T: Clone, -{ - fn clone(&self) -> Self { - Self(self.0, self.1.clone()) - } -} - impl Copy for Sp where T: Copy {} impl Debug for Sp diff --git a/tsparser/src/app/mod.rs b/tsparser/src/app/mod.rs index de931bbd7a..b917c2d56e 100644 --- a/tsparser/src/app/mod.rs +++ b/tsparser/src/app/mod.rs @@ -1,6 +1,5 @@ use std::collections::HashMap; -use anyhow::Result; use matchit::InsertError; use swc_common::errors::HANDLER; @@ -11,6 +10,7 @@ use crate::parser::resources::apis::api::{Method, Methods}; use crate::parser::resources::Resource; use crate::parser::respath::Path; use crate::parser::Range; +use litparser::ParseResult as PResult; #[derive(Debug)] pub struct AppDesc { @@ -78,7 +78,7 @@ impl AppDesc { } } -pub fn validate_and_describe(pc: &ParseContext, parse: ParseResult) -> Result { +pub fn validate_and_describe(pc: &ParseContext, parse: ParseResult) -> PResult { let meta = compute_meta(pc, &parse)?; let desc = AppDesc { parse, meta }; diff --git a/tsparser/src/bin/tsparser-encore.rs b/tsparser/src/bin/tsparser-encore.rs index 6d27615e30..0d1909d462 100644 --- a/tsparser/src/bin/tsparser-encore.rs +++ b/tsparser/src/bin/tsparser-encore.rs @@ -111,14 +111,13 @@ fn main() -> Result<()> { }; match builder.parse(&pp) { - Ok(result) => { + Some(result) => { log::info!("parse successful"); write_result(Ok(result.meta.encode_to_vec().as_slice()))?; parse = Some((app, result)); } - Err(err) => { - log::error!("failed to parse: {:?}", err); - // Get any errors from the emitter. + None => { + // Get errors from the emitter. let errs = errors.lock().unwrap(); let mut err_msg = String::new(); for err in errs.iter() { @@ -126,14 +125,9 @@ fn main() -> Result<()> { err_msg.push('\n'); } - if err.is::() { - // Don't include stack trace or detailed error info - // if this is a parse error. - write_result(Err(anyhow::anyhow!(PlainError(err_msg))))? - } else { - err_msg.push_str(&format!("{:?}", err)); - write_result(Err(anyhow::anyhow!(err_msg)))? - } + // Don't include stack trace or detailed error info + // if this is a parse error. + write_result(Err(anyhow::anyhow!(PlainError(err_msg))))? } } } diff --git a/tsparser/src/builder/parse.rs b/tsparser/src/builder/parse.rs index e9eb63f263..95c00240dd 100644 --- a/tsparser/src/builder/parse.rs +++ b/tsparser/src/builder/parse.rs @@ -1,8 +1,6 @@ use std::fmt::Display; use std::path::Path; -use anyhow::Result; - use crate::app::{validate_and_describe, AppDesc}; use crate::parser::parser::{ParseContext, Parser}; use crate::parser::resourceparser::PassOneParser; @@ -27,7 +25,7 @@ impl Display for ParseError { } impl Builder<'_> { - pub fn parse(&self, params: &ParseParams) -> Result { + pub fn parse(&self, params: &ParseParams) -> Option { let pc = params.pc; let pass1 = PassOneParser::new( pc.file_set.clone(), @@ -36,12 +34,19 @@ impl Builder<'_> { ); let parser = Parser::new(pc, pass1); - let result = parser.parse()?; - let desc = validate_and_describe(pc, result)?; + let result = parser.parse(); + let desc = match validate_and_describe(pc, result) { + Ok(desc) => desc, + Err(err) => { + err.report(); + return None; + } + }; if pc.errs.has_errors() { - anyhow::bail!(ParseError); + None + } else { + Some(desc) } - Ok(desc) } } diff --git a/tsparser/src/builder/templates/entrypoints/combined/main.handlebars b/tsparser/src/builder/templates/entrypoints/combined/main.handlebars index c8bfd20b35..7c068e842a 100644 --- a/tsparser/src/builder/templates/entrypoints/combined/main.handlebars +++ b/tsparser/src/builder/templates/entrypoints/combined/main.handlebars @@ -1,4 +1,7 @@ import { registerGateways, registerHandlers, run, type Handler } from "encore.dev/internal/codegen/appinit"; +import { Worker, isMainThread } from "node:worker_threads"; +import { fileURLToPath } from "node:url"; +import { availableParallelism } from "node:os"; {{#each gateways}} import { {{bind_name}} as {{encoreNameToIdent encore_name}}GW } from {{toJSON import_path}}; @@ -33,4 +36,13 @@ const handlers: Handler[] = [ registerGateways(gateways); registerHandlers(handlers); -await run(); +if (isMainThread) { + const path = fileURLToPath(import.meta.url); + for (let i = 0; i < availableParallelism()-1; i++) { + new Worker(path); + } + + await run(); +} else { + await new Promise(() => { }); +} diff --git a/tsparser/src/builder/templates/entrypoints/gateways/main.handlebars b/tsparser/src/builder/templates/entrypoints/gateways/main.handlebars index d7ca3672a5..b8d239323a 100644 --- a/tsparser/src/builder/templates/entrypoints/gateways/main.handlebars +++ b/tsparser/src/builder/templates/entrypoints/gateways/main.handlebars @@ -1,4 +1,7 @@ import { registerGateways, run } from "encore.dev/internal/codegen/appinit"; +import { Worker, isMainThread } from "node:worker_threads"; +import { fileURLToPath } from "node:url"; +import { availableParallelism } from "node:os"; {{#each gateways}} import { {{bind_name}} as {{encoreNameToIdent encore_name}}Impl } from {{toJSON import_path}}; @@ -6,10 +9,19 @@ import { {{bind_name}} as {{encoreNameToIdent encore_name}}Impl } from {{toJSON const gateways = [ {{#each gateways}} - {{encore_name}}Impl, + {{encoreNameToIdent encore_name}}Impl, {{/each}} ]; registerGateways(gateways); -await run(); +if (isMainThread) { + const path = fileURLToPath(import.meta.url); + for (let i = 0; i < availableParallelism()-1; i++) { + new Worker(path); + } + + await run(); +} else { + await new Promise(() => { }); +} diff --git a/tsparser/src/builder/templates/entrypoints/services/main.handlebars b/tsparser/src/builder/templates/entrypoints/services/main.handlebars index ba8c9df1db..31c89d8ef7 100644 --- a/tsparser/src/builder/templates/entrypoints/services/main.handlebars +++ b/tsparser/src/builder/templates/entrypoints/services/main.handlebars @@ -1,4 +1,8 @@ import { registerHandlers, run, type Handler } from "encore.dev/internal/codegen/appinit"; +import { Worker, isMainThread } from "node:worker_threads"; +import { fileURLToPath } from "node:url"; +import { availableParallelism } from "node:os"; + {{#each endpoints}} import { {{name}} as {{name}}Impl{{@index}} } from {{toJSON import_path}}; {{/each}} @@ -20,4 +24,14 @@ const handlers: Handler[] = [ ]; registerHandlers(handlers); -await run(); + +if (isMainThread) { + const path = fileURLToPath(import.meta.url); + for (let i = 0; i < availableParallelism()-1; i++) { + new Worker(path); + } + + await run(); +} else { + await new Promise(() => { }); +} diff --git a/tsparser/src/legacymeta/mod.rs b/tsparser/src/legacymeta/mod.rs index 98a422b658..4e4dabe52e 100644 --- a/tsparser/src/legacymeta/mod.rs +++ b/tsparser/src/legacymeta/mod.rs @@ -2,7 +2,6 @@ use std::collections::{HashMap, HashSet}; use std::path::Path; use std::rc::Rc; -use anyhow::{Context, Result}; use swc_common::errors::HANDLER; use crate::encore::parser::meta::v1; @@ -13,16 +12,17 @@ use crate::parser::resources::apis::{authhandler, gateway}; use crate::parser::resources::infra::cron::CronJobSchedule; use crate::parser::resources::infra::{cron, objects, pubsub_subscription, pubsub_topic, sqldb}; use crate::parser::resources::Resource; -use crate::parser::types::ObjectId; +use crate::parser::types::{Object, ObjectId}; use crate::parser::usageparser::Usage; use crate::parser::{respath, FilePath, Range}; +use litparser::{ParseResult as PResult, ToParseErr}; mod api_schema; mod schema; const DEFAULT_API_GATEWAY_NAME: &str = "api-gateway"; -pub fn compute_meta(pc: &ParseContext, parse: &ParseResult) -> Result { +pub fn compute_meta(pc: &ParseContext, parse: &ParseResult) -> PResult { let app_root = pc.app_root.as_path(); let schema = SchemaBuilder::new(pc, app_root); @@ -46,14 +46,23 @@ struct MetaBuilder<'a> { } impl MetaBuilder<'_> { - pub fn build(mut self) -> Result { + pub fn build(mut self) -> PResult { // self.data.app_revision = parse_app_revision(&self.app_root)?; self.data.app_revision = std::env::var("ENCORE_APP_REVISION").unwrap_or_default(); let mut svc_index = HashMap::new(); let mut svc_to_pkg_index = HashMap::new(); for svc in &self.parse.services { - let rel_path = self.rel_path_string(svc.root.as_path())?; + let Some(rel_path) = self.rel_path_string(svc.root.as_path()) else { + HANDLER.with(|h| { + h.err(&format!( + "unable to compute relative path to service: {}", + svc.name + )) + }); + continue; + }; + svc_to_pkg_index.insert(svc.name.clone(), self.data.pkgs.len()); self.data.pkgs.push(v1::Package { rel_path: rel_path.clone(), @@ -115,7 +124,15 @@ impl MetaBuilder<'_> { let request_schema = self.schema.transform_request(ep)?; let response_schema = self .schema - .transform_response(ep.encoding.raw_resp_schema.clone())?; + .transform_response(ep.encoding.raw_resp_schema.clone().map(|s| s.take())) + .map_err(|err| { + let sp = ep + .encoding + .raw_resp_schema + .as_ref() + .map_or(ep.range.to_span(), |s| s.span()); + sp.parse_err(err.to_string()) + })?; let access_type: i32 = match (ep.expose, ep.require_auth) { (false, _) => v1::rpc::AccessType::Private as i32, @@ -126,12 +143,18 @@ impl MetaBuilder<'_> { let static_assets = ep .static_assets .as_ref() - .map(|sa| -> Result { - let dir_rel_path = self.rel_path_string(&sa.dir)?; + .map(|sa| -> PResult { + let dir_rel_path = self.rel_path_string(&sa.dir).ok_or( + sa.dir.parse_err("could not resolve static asset directory"), + )?; let not_found_rel_path = sa .not_found .as_ref() - .map(|p| self.rel_path_string(p)) + .map(|p| { + self.rel_path_string(p).ok_or( + p.parse_err("could not resolve static notFound path"), + ) + }) .transpose()?; Ok(v1::rpc::StaticAssets { dir_rel_path, @@ -175,10 +198,14 @@ impl MetaBuilder<'_> { static_assets, }; - let service_idx = svc_index - .get(&ep.service_name) - .ok_or(anyhow::anyhow!("missing service: {}", ep.service_name))? - .to_owned(); + let Some(service_idx) = + svc_index.get(&ep.service_name).map(|idx| idx.to_owned()) + else { + return Err(ep + .range + .to_span() + .parse_err(format!("missing service {}", ep.service_name))); + }; let service = &mut self.data.svcs[service_idx]; if let Some(obj) = &b.object { @@ -214,14 +241,19 @@ impl MetaBuilder<'_> { } Resource::Secret(secret) => { - let service = self - .service_for_range(&secret.range) - .ok_or(anyhow::anyhow!( - "secrets must be loaded from within services" - ))?; + let service = self.service_for_range(&secret.range).ok_or( + secret + .range + .parse_err("secrets must be loaded from within services"), + )?; + let pkg_idx = svc_to_pkg_index .get(&service.name) - .ok_or(anyhow::anyhow!("missing service: {}", &service.name))? + .ok_or( + secret + .range + .parse_err(format!("missing service: {}", &service.name)), + )? .to_owned(); let pkg = &mut self.data.pkgs[pkg_idx]; pkg.secrets.push(secret.name.clone()); @@ -241,13 +273,18 @@ impl MetaBuilder<'_> { } } + // Keep track of things we've seen so we can report errors pointing at + // the previous definition when we see a duplicate. + let mut first_gateway: Option<&gateway::Gateway> = None; + let mut first_auth_handler: Option<&Object> = None; + // Make a second pass for resources that depend on other resources. for r in &dependent { match r { Dependent::PubSubSubscription((b, sub)) => { let topic_idx = topic_idx .get(&sub.topic.id) - .ok_or(anyhow::anyhow!("missing topic"))? + .ok_or_else(|| sub.topic.parse_err("topic not found"))? .to_owned(); let result = self.pubsub_subscription(b, sub)?; let topic = &mut self.data.pubsub_topics[topic_idx]; @@ -257,7 +294,7 @@ impl MetaBuilder<'_> { Dependent::CronJob((_b, cj)) => { let (svc_idx, ep_idx) = endpoint_idx .get(&cj.endpoint.id) - .ok_or(anyhow::anyhow!("missing endpoint"))? + .ok_or(cj.endpoint.parse_err("endpoint not found"))? .to_owned(); let svc = &self.data.svcs[svc_idx]; let ep = &svc.rpcs[ep_idx]; @@ -288,21 +325,30 @@ impl MetaBuilder<'_> { let service_name = self .service_for_range(&ah.range) - .ok_or(anyhow::anyhow!( - "unable to determine service for auth handler" - ))? + .ok_or( + ah.range + .parse_err("unable to determine service for auth handler"), + )? .name .clone(); let loc = loc_from_range(self.app_root, &self.pc.file_set, ah.range)?; + let params = self + .schema + .typ(&ah.encoding.auth_param) + .map_err(|err| ah.encoding.auth_param.parse_err(err.to_string()))?; + let auth_data = self + .schema + .typ(&ah.encoding.auth_data) + .map_err(|err| ah.encoding.auth_data.parse_err(err.to_string()))?; Some(v1::AuthHandler { name: ah.name.clone(), doc: ah.doc.clone().unwrap_or_default(), pkg_path: loc.pkg_path.clone(), pkg_name: loc.pkg_name.clone(), loc: Some(loc), - params: Some(self.schema.typ(&ah.encoding.auth_param)?), - auth_data: Some(self.schema.typ(&ah.encoding.auth_data)?), + params: Some(params), + auth_data: Some(auth_data), service_name, }) } else { @@ -311,19 +357,51 @@ impl MetaBuilder<'_> { let service_name = self .service_for_range(&gw.range) - .ok_or(anyhow::anyhow!("unable to determine service for gateway"))? + .ok_or( + gw.range + .parse_err("unable to determine service for gateway"), + )? .name .clone(); - if self.data.auth_handler.is_some() { - anyhow::bail!("multiple auth handlers not yet supported"); - } else if !self.data.gateways.is_empty() { - anyhow::bail!("multiple gateways not yet supported"); + if let Some(first) = first_gateway { + HANDLER.with(|h| { + h.struct_span_err( + gw.range.to_span(), + "multiple gateways not yet supported", + ) + .span_help(first.range.to_span(), "previous gateway defined here") + .emit(); + }); + continue; + } else { + first_gateway = Some(gw); + } + + if let Some(ah) = &gw.auth_handler { + if let Some(first) = first_auth_handler { + HANDLER.with(|h| { + h.struct_span_err( + ah.range.to_span(), + "multiple auth handlers not yet supported", + ) + .span_help( + first.range.to_span(), + "previous auth handler defined here", + ) + .emit(); + }); + continue; + } else { + first_auth_handler = Some(ah); + } } + self.data.auth_handler.clone_from(&auth_handler); if gw.name != "api-gateway" { - anyhow::bail!("only the 'api-gateway' gateway is supported"); + gw.range.err("only the 'api-gateway' gateway is supported"); + continue; } let encore_name = DEFAULT_API_GATEWAY_NAME.to_string(); @@ -345,9 +423,11 @@ impl MetaBuilder<'_> { for u in &self.parse.usages { match u { Usage::PublishTopic(publish) => { - let svc = self - .service_for_range(&publish.range) - .ok_or(anyhow::anyhow!("unable to determine service for publish"))?; + let svc = + self.service_for_range(&publish.range) + .ok_or(publish.range.parse_err( + "unable to determine which service this 'publish' call is within", + ))?; // Add the publisher if it hasn't already been seen. let key = (svc.name.clone(), publish.topic.name.clone()); @@ -356,7 +436,7 @@ impl MetaBuilder<'_> { let idx = topic_by_name .get(&publish.topic.name) - .ok_or(anyhow::anyhow!("missing topic: {}", publish.topic.name))? + .ok_or(publish.range.parse_err("could not resolve topic"))? .to_owned(); let topic = &mut self.data.pubsub_topics[idx]; topic @@ -366,12 +446,9 @@ impl MetaBuilder<'_> { } Usage::AccessDatabase(access) => { let Some(svc) = self.service_for_range(&access.range) else { - HANDLER.with(|h| { - h.span_err( - access.range.to_span(), - "unable to determine which service is accessing this database", - ) - }); + access + .range + .parse_err("cannot determine which service is accessing this database"); continue; }; @@ -414,7 +491,7 @@ impl MetaBuilder<'_> { Usage::CallEndpoint(call) => { let src_service = self .service_for_range(&call.range) - .ok_or(anyhow::anyhow!("unable to determine service for call"))? + .ok_or(call.range.parse_err("unable to determine service for call"))? .name .clone(); let dst_service = call.endpoint.0.clone(); @@ -422,14 +499,17 @@ impl MetaBuilder<'_> { let dst_idx = svc_to_pkg_index .get(&dst_service) - .ok_or(anyhow::anyhow!("missing service: {}", &dst_service))? + .ok_or( + call.range + .parse_err("could not resolve destination service"), + )? .to_owned(); let dst_pkg_rel_path = self.data.pkgs[dst_idx].rel_path.clone(); let src_idx = svc_to_pkg_index .get(&src_service) - .ok_or(anyhow::anyhow!("missing service: {}", src_service))? + .ok_or(call.range.parse_err("could not resolve calling service"))? .to_owned(); let src_pkg = &mut self.data.pkgs[src_idx]; @@ -488,10 +568,14 @@ impl MetaBuilder<'_> { Ok(self.data) } - fn pubsub_topic(&mut self, topic: &pubsub_topic::Topic) -> Result { + fn pubsub_topic(&mut self, topic: &pubsub_topic::Topic) -> PResult { use pubsub_topic::DeliveryGuarantee; - let message_type = self.schema.typ(&topic.message_type)?; - let mut topic = v1::PubSubTopic { + let message_type = self.schema.typ(&topic.message_type).map_err(|e| { + topic + .message_type + .parse_err(format!("could not resolve message type: {}", e)) + })?; + Ok(v1::PubSubTopic { name: topic.name.clone(), doc: topic.doc.clone(), message_type: Some(message_type), @@ -502,36 +586,20 @@ impl MetaBuilder<'_> { ordering_key: topic.ordering_attribute.clone().unwrap_or_default(), publishers: vec![], // filled in below subscriptions: vec![], // filled in below - }; - - let mut seen_publishers = HashSet::new(); - let _add_publisher = |svc_name: &str| { - if !seen_publishers.contains(svc_name) { - topic.publishers.push(v1::pub_sub_topic::Publisher { - service_name: svc_name.to_string(), - }); - seen_publishers.insert(svc_name.to_string()); - } - }; - - // Sort the publishers for deterministic output. - topic - .publishers - .sort_by(|a, b| a.service_name.cmp(&b.service_name)); - - Ok(topic) + }) } fn pubsub_subscription( &self, bind: &Bind, sub: &pubsub_subscription::Subscription, - ) -> Result { + ) -> PResult { let service_name = self - .service_for_range(&bind.range.unwrap()) - .ok_or(anyhow::anyhow!( - "unable to determine service for subscription" - ))? + .service_for_range(&bind.range.unwrap_or(sub.range)) + .ok_or( + sub.range + .parse_err("unable to determine which service the subscription belongs to"), + )? .name .clone(); @@ -549,12 +617,15 @@ impl MetaBuilder<'_> { }) } - fn sql_database(&self, db: &sqldb::SQLDatabase) -> Result { + fn sql_database(&self, db: &sqldb::SQLDatabase) -> PResult { // Transform the migrations into the metadata format. let (migration_rel_path, migrations, allow_non_sequential_migrations) = match &db.migrations { Some(spec) => { - let rel_path = self.rel_path_string(&spec.dir)?; + let rel_path = self + .rel_path_string(&spec.dir) + .ok_or(spec.parse_err("unable to resolve migration directory"))?; + let migrations = spec .migrations .iter() @@ -589,18 +660,14 @@ impl MetaBuilder<'_> { /// Compute the relative path from the app root. /// It reports an error if the path is not under the app root. - fn rel_path<'b>(&self, path: &'b Path) -> Result<&'b Path> { - let suffix = path.strip_prefix(self.app_root)?; - Ok(suffix) + fn rel_path<'b>(&self, path: &'b Path) -> Option<&'b Path> { + path.strip_prefix(self.app_root).ok() } /// Compute the relative path from the app root as a String. - fn rel_path_string(&self, path: &Path) -> Result { + fn rel_path_string(&self, path: &Path) -> Option { let suffix = self.rel_path(path)?; - let s = suffix - .to_str() - .ok_or(anyhow::anyhow!("invalid path: {:?}", path))?; - Ok(s.to_string()) + suffix.to_str().map(|s| s.to_string()) } fn service_for_range(&self, range: &Range) -> Option<&Service> { @@ -624,7 +691,7 @@ impl respath::Path { segments: self .segments .iter() - .map(|seg| match seg { + .map(|seg| match seg.get() { Segment::Literal(lit) => v1::PathSegment { r#type: SegmentType::Literal as i32, value_type: ParamType::String as i32, @@ -677,24 +744,6 @@ fn new_meta() -> v1::Data { } } -fn _parse_app_revision(dir: &Path) -> anyhow::Result { - duct::cmd!( - "git", - "-c", - "log.showsignature=false", - "show", - "-s", - "--format=%H:%ct" - ) - .dir(dir) - .read() - .map_err(|e| anyhow::anyhow!("failed to run git: {}", e)) - .and_then(|s| { - let (hash, _) = s.trim().split_once(':').context("invalid git output")?; - Ok(hash.to_string()) - }) -} - #[cfg(test)] mod tests { use swc_common::errors::{Handler, HANDLER}; @@ -708,7 +757,7 @@ mod tests { use super::*; - fn parse(tmp_dir: &Path, src: &str) -> Result { + fn parse(tmp_dir: &Path, src: &str) -> anyhow::Result { let globals = Globals::new(); let cm: Rc = Default::default(); let errs = Rc::new(Handler::with_tty_emitter( @@ -718,8 +767,8 @@ mod tests { Some(cm.clone()), )); - GLOBALS.set(&globals, || { - HANDLER.set(&errs, || { + GLOBALS.set(&globals, || -> anyhow::Result<_> { + HANDLER.set(&errs, || -> anyhow::Result<_> { let ar = txtar::from_str(src); ar.materialize(tmp_dir)?; @@ -740,14 +789,15 @@ mod tests { Default::default(), ); let parser = Parser::new(&pc, pass1); - let parse = parser.parse()?; - compute_meta(&pc, &parse) + let parse = parser.parse(); + let md = compute_meta(&pc, &parse)?; + Ok(md) }) }) } #[test] - fn test_legacymeta() -> Result<()> { + fn test_legacymeta() -> anyhow::Result<()> { let src = r#" -- foo.ts -- import { Bar } from './bar.ts'; diff --git a/tsparser/src/legacymeta/schema.rs b/tsparser/src/legacymeta/schema.rs index 04665747ac..bd48eb398b 100644 --- a/tsparser/src/legacymeta/schema.rs +++ b/tsparser/src/legacymeta/schema.rs @@ -4,6 +4,7 @@ use std::path::Path; use anyhow::Result; use itertools::Itertools; +use litparser::{ParseResult, ToParseErr}; use swc_common::errors::HANDLER; use crate::encore::parser::schema::v1 as schema; @@ -57,14 +58,14 @@ impl<'a> SchemaBuilder<'a> { ctx.typ(typ) } - pub fn transform_handshake(&mut self, ep: &Endpoint) -> Result> { + pub fn transform_handshake(&mut self, ep: &Endpoint) -> ParseResult> { let mut ctx = BuilderCtx { builder: self, decl_id: None, }; ctx.transform_handshake(ep) } - pub fn transform_request(&mut self, ep: &Endpoint) -> Result> { + pub fn transform_request(&mut self, ep: &Endpoint) -> ParseResult> { let mut ctx = BuilderCtx { builder: self, decl_id: None, @@ -91,6 +92,7 @@ impl BuilderCtx<'_, '_> { typ: Some(styp::Typ::List(Box::new(schema::List { elem: Some(Box::new(elem)), }))), + validation: None, } } Type::Interface(tt) => self.interface(tt)?, @@ -109,19 +111,23 @@ impl BuilderCtx<'_, '_> { EnumValue::Number(n) => schema::literal::Value::Int(n), }), })), + validation: None, }) .collect(), })), + validation: None, }, Type::Union(types) => schema::Type { typ: Some(styp::Typ::Union(schema::Union { types: self.types(types)?, })), + validation: None, }, Type::Tuple(_) => anyhow::bail!("tuple types are not yet supported in schemas"), Type::Literal(tt) => schema::Type { typ: Some(styp::Typ::Literal(self.literal(tt))), + validation: None, }, Type::Class(_) => anyhow::bail!("class types are not yet supported in schemas"), Type::Named(tt) => { @@ -142,6 +148,7 @@ impl BuilderCtx<'_, '_> { } else { schema::Type { typ: Some(styp::Typ::Named(self.named(tt)?)), + validation: None, } } } @@ -157,6 +164,7 @@ impl BuilderCtx<'_, '_> { decl_id, param_idx: param.idx as u32, })), + validation: None, } } @@ -167,12 +175,28 @@ impl BuilderCtx<'_, '_> { ) } }, + + Type::Validation(expr) => { + anyhow::bail!( + "unresolved standalone validation expression not supported in api schema: {:#?}", + expr + ) + } + + Type::Validated((typ, expr)) => { + let mut typ = self.typ(typ)?; + // Simplify the validation expression, if possible. + let expr = expr.clone().simplify(); + typ.validation = Some(expr.to_pb()); + typ + } }) } fn basic(&self, typ: &Basic) -> schema::Type { let b = |b: schema::Builtin| schema::Type { typ: Some(styp::Typ::Builtin(b as i32)), + validation: None, }; match typ { Basic::Any | Basic::Unknown => b(schema::Builtin::Any), @@ -187,6 +211,7 @@ impl BuilderCtx<'_, '_> { typ: Some(styp::Typ::Literal(schema::Literal { value: Some(schema::literal::Value::Null(true)), })), + validation: None, }, Basic::Void @@ -233,6 +258,7 @@ impl BuilderCtx<'_, '_> { key: Some(Box::new(self.typ(key)?)), value: Some(Box::new(self.typ(value)?)), }))), + validation: None, }); } @@ -345,6 +371,7 @@ impl BuilderCtx<'_, '_> { Ok(schema::Type { typ: Some(styp::Typ::Struct(schema::Struct { fields })), + validation: None, }) } @@ -432,19 +459,35 @@ impl BuilderCtx<'_, '_> { Ok(result) } - fn transform_handshake(&mut self, ep: &Endpoint) -> Result> { - self.transform_request_type(ep, &ep.encoding.raw_handshake_schema) + fn transform_handshake(&mut self, ep: &Endpoint) -> ParseResult> { + let schema = ep.encoding.raw_handshake_schema.as_ref().map(|s| s.get()); + self.transform_request_type(ep, schema).map_err(|err| { + let sp = ep + .encoding + .raw_handshake_schema + .as_ref() + .map_or(ep.range.to_span(), |s| s.span()); + sp.parse_err(err.to_string()) + }) } - fn transform_request(&mut self, ep: &Endpoint) -> Result> { - self.transform_request_type(ep, &ep.encoding.raw_req_schema) + fn transform_request(&mut self, ep: &Endpoint) -> ParseResult> { + let schema = ep.encoding.raw_req_schema.as_ref().map(|s| s.get()); + self.transform_request_type(ep, schema).map_err(|err| { + let sp = ep + .encoding + .raw_req_schema + .as_ref() + .map_or(ep.range.to_span(), |s| s.span()); + sp.parse_err(err.to_string()) + }) } fn transform_request_type( &mut self, ep: &Endpoint, - raw_schema: &Option, + raw_schema: Option<&Type>, ) -> Result> { - let Some(typ) = raw_schema.clone() else { + let Some(typ) = raw_schema.cloned() else { return Ok(None); }; @@ -475,6 +518,7 @@ impl BuilderCtx<'_, '_> { return Ok(Some(schema::Type { typ: Some(styp::Typ::Named(named)), + validation: None, })); } else { match drop_empty_or_void(typ) { @@ -513,25 +557,31 @@ fn drop_undefined_union(typ: &Type) -> (Cow<'_, Type>, bool) { (Cow::Borrowed(typ), false) } -pub(super) fn loc_from_range(app_root: &Path, fset: &FileSet, range: Range) -> Result { +pub(super) fn loc_from_range( + app_root: &Path, + fset: &FileSet, + range: Range, +) -> ParseResult { let loc = range.loc(fset)?; let (pkg_path, pkg_name, filename) = match loc.file { - FilePath::Custom(ref str) => anyhow::bail!("unsupported file path in schema: {}", str), + FilePath::Custom(ref str) => { + return Err(range.parse_err(format!("unsupported file path in schema: {}", str))); + } FilePath::Real(buf) => match buf.strip_prefix(app_root) { Ok(rel_path) => { let file_name = rel_path .file_name() .map(|s| s.to_string_lossy().to_string()) - .ok_or(anyhow::anyhow!("missing file name"))?; + .ok_or(range.parse_err("missing file name"))?; let pkg_name = rel_path .parent() .and_then(|p| p.file_name()) .map(|s| s.to_string_lossy().to_string()) - .ok_or(anyhow::anyhow!("missing package name"))?; + .ok_or(range.parse_err("missing package name"))?; let pkg_path = rel_path .parent() .map(|s| s.to_string_lossy().to_string()) - .ok_or(anyhow::anyhow!("missing package path"))?; + .ok_or(range.parse_err("missing package path"))?; (pkg_path, pkg_name, file_name) } Err(_) => { @@ -540,15 +590,14 @@ pub(super) fn loc_from_range(app_root: &Path, fset: &FileSet, range: Range) -> R let file_name = buf .file_name() .map(|s| s.to_string_lossy().to_string()) - .ok_or(anyhow::anyhow!("missing file name: {}", buf.display()))?; + .ok_or(range.parse_err(format!("missing file name: {}", buf.display())))?; let pkg_name = buf .parent() .and_then(|p| p.file_name()) .map(|s| s.to_string_lossy().to_string()) - .ok_or(anyhow::anyhow!( - "missing package name for {}", - buf.display() - ))?; + .ok_or( + range.parse_err(format!("missing package name for {}", buf.display())), + )?; let pkg_path = format!("unknown/{}", pkg_name); (pkg_path, pkg_name, file_name) } diff --git a/tsparser/src/parser/fileset.rs b/tsparser/src/parser/fileset.rs index 3c7e30bf7d..868f1cc519 100644 --- a/tsparser/src/parser/fileset.rs +++ b/tsparser/src/parser/fileset.rs @@ -2,11 +2,11 @@ use std::io; use std::path::{Path, PathBuf}; use crate::parser::doc_comments::doc_comments_before; -use anyhow::Result; +use litparser::{ParseResult, ToParseErr}; use serde::Serialize; use swc_common::errors::HANDLER; use swc_common::sync::Lrc; -use swc_common::SyntaxContext; +use swc_common::{Span, Spanned, SyntaxContext}; pub struct FileSet { source_map: Lrc, @@ -169,13 +169,16 @@ impl Range { } /// Report the file name this range is in. - pub fn loc(&self, fset: &FileSet) -> Result { - Ok(match fset.source_map.span_to_lines(self.to_span()) { + pub fn loc(&self, fset: &FileSet) -> ParseResult { + let sp = self.to_span(); + Ok(match fset.source_map.span_to_lines(sp) { Ok(lines) => { let file = match &lines.file.name { swc_common::FileName::Real(p) => FilePath::Real(p.to_owned()), swc_common::FileName::Custom(s) => FilePath::Custom(s.to_owned()), - _ => anyhow::bail!("expected real file name"), + _ => { + return Err(sp.parse_err("expected real file name")); + } }; match (lines.lines.first(), lines.lines.last()) { (Some(first), Some(last)) => Loc { @@ -187,10 +190,14 @@ impl Range { src_col_start: first.start_col.0, src_col_end: last.end_col.0, }, - (_, _) => anyhow::bail!("missing line information"), + (_, _) => { + return Err(sp.parse_err("missing line information")); + } } } - Err(_) => anyhow::bail!("missing file information"), + Err(_) => { + return Err(sp.parse_err("missing file information")); + } }) } @@ -212,6 +219,12 @@ impl Range { } } +impl Spanned for Range { + fn span(&self) -> Span { + self.to_span() + } +} + pub struct Loc { pub file: FilePath, diff --git a/tsparser/src/parser/module_loader.rs b/tsparser/src/parser/module_loader.rs index 2fe58e4f9c..44b46e5105 100644 --- a/tsparser/src/parser/module_loader.rs +++ b/tsparser/src/parser/module_loader.rs @@ -352,6 +352,12 @@ impl Module { } } +impl Spanned for Module { + fn span(&self) -> Span { + self.ast.span + } +} + /// imports_from_mod returns the import declarations in the given module. fn imports_from_mod(ast: &ast::Module) -> Vec { (ast.body) diff --git a/tsparser/src/parser/parser.rs b/tsparser/src/parser/parser.rs index b3e12a2c05..d0a054d033 100644 --- a/tsparser/src/parser/parser.rs +++ b/tsparser/src/parser/parser.rs @@ -6,7 +6,7 @@ use std::path::{Path, PathBuf}; use anyhow::Result; use swc_common::errors::{Handler, HANDLER}; use swc_common::sync::Lrc; -use swc_common::{SourceMap, Spanned}; +use swc_common::{SourceMap, Spanned, DUMMY_SP}; use swc_ecma_loader::resolve::Resolve; use swc_ecma_loader::resolvers::node::NodeModulesResolver; use swc_ecma_loader::TargetEnv; @@ -22,6 +22,7 @@ use crate::parser::types::TypeChecker; use crate::parser::usageparser::{Usage, UsageResolver}; use crate::parser::{FilePath, FileSet}; use crate::runtimeresolve::{EncoreRuntimeResolver, TsConfigPathResolver}; +use crate::span_err::ErrReporter; use super::resourceparser::bind::ResourceOrPath; use super::resourceparser::UnresolvedBind; @@ -127,7 +128,7 @@ impl<'a> Parser<'a> { } /// Run the parser. - pub fn parse(mut self) -> Result { + pub fn parse(mut self) -> ParseResult { fn ignored(entry: &walkdir::DirEntry) -> bool { match entry.file_name().to_str().unwrap_or_default() { "node_modules" | "encore.gen" | "__tests__" => true, @@ -169,7 +170,13 @@ impl<'a> Parser<'a> { let mut curr_service: Option<(PathBuf, String)> = None; for entry in walker { - let entry = entry?; + let entry = match entry { + Ok(e) => e, + Err(err) => { + HANDLER.with(|h| h.err(&format!("unable to walk filesystem: {}", err))); + continue; + } + }; if entry.file_type().is_dir() { // Is this directory outside the service directory? @@ -211,25 +218,28 @@ impl<'a> Parser<'a> { }; let module_span = module.ast.span(); let service_name = curr_service.as_ref().map(|(_, name)| name.as_str()); - let (resources, binds) = self.pass1.parse(module, service_name)?; + let (resources, binds) = self.pass1.parse(module, service_name); // Is this a service file? If so, make sure there was a service defined. if is_service(&entry) { let found = resources.iter().any(|r| matches!(r, Resource::Service(_))); if !found { - HANDLER.with(|h| { - h.span_err( - module_span.shrink_to_lo(), - "encore.service.ts must define a Service resource", - ); - }); + module_span + .shrink_to_lo() + .err("encore.service.ts must define a Service resource"); } } // Check if we should update the service being parsed. for res in &resources { if let Resource::Service(svc) = res { - let parent = path.parent().expect("have a parent directory"); + let Some(parent) = path.parent() else { + HANDLER.with(|h| { + h.err(&format!("path {:?} does not have a parent directory", path)) + }); + continue; + }; + curr_service = Some((parent.to_path_buf(), svc.name.clone())); break; } @@ -243,10 +253,10 @@ impl<'a> Parser<'a> { }; // Resolve the initial binds. - let mut binds = resolve_binds(&resources, binds)?; + let mut binds = resolve_binds(&resources, binds); // Discover the services we have. - let services = discover_services(&self.pc.file_set, &binds)?; + let services = discover_services(&self.pc.file_set, &binds); // Inject additional binds for the generated services. let (additional_resources, additional_binds) = @@ -261,7 +271,7 @@ impl<'a> Parser<'a> { for module in self.pc.loader.modules() { let exprs = resolver.scan_usage_exprs(&module); - let u = resolver.resolve_usage(&module, &exprs)?; + let u = resolver.resolve_usage(&module, &exprs); usages.extend(u); } @@ -274,7 +284,7 @@ impl<'a> Parser<'a> { services, }; - Ok(result) + result } fn inject_generated_service_clients( @@ -310,7 +320,7 @@ impl<'a> Parser<'a> { } } -fn resolve_binds(resources: &[Resource], binds: Vec) -> Result>> { +fn resolve_binds(resources: &[Resource], binds: Vec) -> Vec> { // Collect the resources we support by path. let resource_paths = resources .iter() @@ -330,9 +340,12 @@ fn resolve_binds(resources: &[Resource], binds: Vec) -> Result res, ResourceOrPath::Path(path) => { - let res = resource_paths - .get(&path) - .ok_or_else(|| anyhow::anyhow!("resource not found: {:?}", path))?; + let Some(res) = resource_paths.get(&path) else { + b.range + .map_or(DUMMY_SP, |r| r.to_span()) + .err(&format!("resource not found: {:?}", path)); + continue; + }; (*res).to_owned() } }; @@ -349,7 +362,7 @@ fn resolve_binds(resources: &[Resource], binds: Vec) -> Result PassOneParser<'a> { &mut self, module: Lrc, service_name: Option<&str>, - ) -> Result<(Vec, Vec)> { + ) -> (Vec, Vec) { let parsers = self.registry.interested_parsers(&module); let mut ctx = ResourceParseContext::new( @@ -89,7 +88,7 @@ impl<'a> PassOneParser<'a> { for parser in parsers { let num_resources = ctx.resources.len(); - (parser.run)(&mut ctx)?; + (parser.run)(&mut ctx); // Look at any new resources to see if we have a new service. // If so, update our ctx so that later parsers have up-to-date information. @@ -116,7 +115,7 @@ impl<'a> PassOneParser<'a> { }); } - Ok((ctx.resources, binds)) + (ctx.resources, binds) } } diff --git a/tsparser/src/parser/resourceparser/resource_parser.rs b/tsparser/src/parser/resourceparser/resource_parser.rs index 05dabcb9ca..3551e88568 100644 --- a/tsparser/src/parser/resourceparser/resource_parser.rs +++ b/tsparser/src/parser/resourceparser/resource_parser.rs @@ -14,7 +14,7 @@ pub struct ResourceParser { pub interesting_pkgs: &'static [PkgPath<'static>], - pub run: fn(&mut ResourceParseContext) -> anyhow::Result<()>, + pub run: fn(&mut ResourceParseContext), } impl PartialEq for ResourceParser { diff --git a/tsparser/src/parser/resources/apis/api.rs b/tsparser/src/parser/resources/apis/api.rs index 7c0062ff37..aff14d34ee 100644 --- a/tsparser/src/parser/resources/apis/api.rs +++ b/tsparser/src/parser/resources/apis/api.rs @@ -1,13 +1,14 @@ use std::path::PathBuf; use std::str::FromStr; -use anyhow::{anyhow, Result}; use swc_common::errors::HANDLER; use swc_common::sync::Lrc; -use swc_common::Spanned; +use swc_common::{Span, Spanned}; use swc_ecma_ast::{self as ast, FnExpr}; -use litparser::{LitParser, LocalRelPath, Nullable, Sp}; +use litparser::{ + report_and_continue, LitParser, LocalRelPath, Nullable, ParseResult, Sp, ToParseErr, +}; use litparser_derive::LitParser; use crate::parser::module_loader::Module; @@ -130,9 +131,19 @@ impl Method { } } +#[derive(Debug, Clone)] +pub struct InvalidMethodError; + +impl std::fmt::Display for InvalidMethodError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "invalid method") + } +} + impl FromStr for Method { - type Err = anyhow::Error; - fn from_str(s: &str) -> Result { + type Err = InvalidMethodError; + + fn from_str(s: &str) -> Result { Ok(match s { "CONNECT" => Self::Connect, "DELETE" => Self::Delete, @@ -143,7 +154,7 @@ impl FromStr for Method { "POST" => Self::Post, "PUT" => Self::Put, "TRACE" => Self::Trace, - _ => anyhow::bail!("invalid method: {}", s), + _ => return Err(InvalidMethodError), }) } } @@ -151,10 +162,10 @@ impl FromStr for Method { #[derive(Debug, Clone)] pub struct StaticAssets { /// Files to serve. - pub dir: PathBuf, + pub dir: Sp, /// File to serve when the path is not found. - pub not_found: Option, + pub not_found: Option>, } pub const ENDPOINT_PARSER: ResourceParser = ResourceParser { @@ -165,7 +176,7 @@ pub const ENDPOINT_PARSER: ResourceParser = ResourceParser { let module = pass.module.clone(); let service_name = match &pass.service_name { - Some(name) => name.to_string(), + Some(name) => Some(name.to_string()), None => { // TODO handle this in a better way. match &module.file_path { @@ -173,13 +184,8 @@ pub const ENDPOINT_PARSER: ResourceParser = ResourceParser { .parent() .and_then(|p| p.file_name()) .and_then(|s| s.to_str()) - .map(|s| s.to_string()) - .ok_or(anyhow::anyhow!( - "unable to determine service name for endpoint" - ))?, - FilePath::Custom(ref str) => { - anyhow::bail!("unsupported file path for service: {}", str) - } + .map(|s| s.to_string()), + FilePath::Custom(_) => None, } } }; @@ -187,21 +193,36 @@ pub const ENDPOINT_PARSER: ResourceParser = ResourceParser { let names = TrackedNames::new(&[("encore.dev/api", "api")]); for r in iter_references::(&module, &names) { - let r = r?; - let path_span = r - .config - .path - .as_ref() - .map_or(r.range.to_span(), |p| p.span()); + let r = report_and_continue!(r); + let Some(service_name) = service_name.as_ref() else { + module.err("unable to determine service name for file"); + continue; + }; - let path_str = r.config.path.unwrap_or_else(|| { - Sp::with_dummy(format!("/{}.{}", &service_name, r.endpoint_name)) - }); + let (config_span, cfg) = r.config.split(); + let path_span = cfg.path.as_ref().map_or(config_span, |p| p.span()); + let path_str = cfg + .path + .as_deref() + .cloned() + .unwrap_or_else(|| format!("/{}.{}", &service_name, r.endpoint_name)); - let path = match Path::parse(&path_str, Default::default()) { + let path = match Path::parse(path_span, &path_str, Default::default()) { Ok(path) => path, Err(err) => { - path_span.err(&err.to_string()); + if cfg.path.is_some() { + err.report(); + } else { + // We don't have an explicit path, so add a note to the error. + HANDLER.with(|h| { + h.struct_span_err(err.span, &err.error.to_string()) + .span_note( + config_span, + &format!("no path provided, so defaulting to {}", path_str), + ) + .emit(); + }); + } continue; } }; @@ -210,7 +231,7 @@ pub const ENDPOINT_PARSER: ResourceParser = ResourceParser { .type_checker .resolve_obj(pass.module.clone(), &ast::Expr::Ident(r.bind_name.clone())); - let methods = r.config.method.unwrap_or(Methods::Some(vec![Method::Post])); + let methods = cfg.method.unwrap_or(Methods::Some(vec![Method::Post])); let raw = matches!(r.kind, EndpointKind::Raw); @@ -229,29 +250,26 @@ pub const ENDPOINT_PARSER: ResourceParser = ResourceParser { Some(t) => Some(pass.type_checker.resolve_type(module.clone(), &t)), }; - match describe_endpoint( + report_and_continue!(describe_endpoint( + r.range.to_span(), pass.type_checker, methods, path, request, response, false, - ) { - Ok(encoding) => encoding, - Err(err) => { - HANDLER.with(|handler| handler.span_err(r.range, &err.to_string())); - continue; - } - } + )) } EndpointKind::Raw => { - match describe_endpoint(pass.type_checker, methods, path, None, None, true) { - Ok(encoding) => encoding, - Err(err) => { - HANDLER.with(|handler| handler.span_err(r.range, &err.to_string())); - continue; - } - } + report_and_continue!(describe_endpoint( + r.range.to_span(), + pass.type_checker, + methods, + path, + None, + None, + true, + )) } EndpointKind::TypedStream { handshake, @@ -273,27 +291,25 @@ pub const ENDPOINT_PARSER: ResourceParser = ResourceParser { let handshake = handshake.map(|t| pass.type_checker.resolve_type(module.clone(), &t)); - match describe_stream_endpoint( + report_and_continue!(describe_stream_endpoint( + r.range.to_span(), pass.type_checker, methods, path, request, response, handshake, - ) { - Ok(encoding) => encoding, - Err(err) => { - HANDLER.with(|handler| handler.span_err(r.range, &err.to_string())); - continue; - } - } + )) } EndpointKind::StaticAssets { dir, not_found } => { // Support HEAD and GET for static assets. let methods = Methods::Some(vec![Method::Head, Method::Get]); let FilePath::Real(module_file_path) = &module.file_path else { - anyhow::bail!("cannot use custom file path for static assets"); + module + .ast + .err("cannot use custom file path for static assets"); + continue; }; // Ensure the path has at most one dynamic segment, at the end. @@ -315,18 +331,17 @@ pub const ENDPOINT_PARSER: ResourceParser = ResourceParser { } } - let assets_dir = module_file_path.parent().unwrap().join(&dir.0); - if let Err(err) = std::fs::read_dir(&assets_dir) { + let assets_dir = dir.with(module_file_path.parent().unwrap().join(&dir.buf)); + if let Err(err) = std::fs::read_dir(assets_dir.as_path()) { dir.err(&format!("unable to read static assets directory: {}", err)); } // Ensure the not_found file exists. - let not_found_path = not_found - .as_ref() - .map(|p| module_file_path.parent().unwrap().join(&p.0)); + let not_found_path = + not_found.map(|p| p.with(module_file_path.parent().unwrap().join(&p.buf))); if let Some(not_found_path) = ¬_found_path { if !not_found_path.is_file() { - not_found.err("file does not exist"); + not_found_path.err("file does not exist"); } } @@ -335,12 +350,12 @@ pub const ENDPOINT_PARSER: ResourceParser = ResourceParser { not_found: not_found_path, }); - describe_static_assets(methods, path) + describe_static_assets(r.range.to_span(), methods, path) } }; // Compute the body limit. Null means no limit. No value means 2MiB. - let body_limit: Option = match r.config.bodyLimit { + let body_limit: Option = match cfg.bodyLimit { Some(Nullable::Present(val)) => Some(val), Some(Nullable::Null) => None, None => Some(2 * 1024 * 1024), @@ -351,8 +366,8 @@ pub const ENDPOINT_PARSER: ResourceParser = ResourceParser { name: r.endpoint_name, service_name: service_name.clone(), doc: r.doc_comment, - expose: r.config.expose.unwrap_or(false), - require_auth: r.config.auth.unwrap_or(false), + expose: cfg.expose.unwrap_or(false), + require_auth: cfg.auth.unwrap_or(false), raw, streaming_request, streaming_response, @@ -370,7 +385,6 @@ pub const ENDPOINT_PARSER: ResourceParser = ResourceParser { ident: Some(r.bind_name), }); } - Ok(()) }, }; @@ -389,16 +403,6 @@ pub struct ReferenceEndpointUsage { pub fn resolve_endpoint_usage(_data: &ResolveUsageData, _endpoint: Lrc) -> Option { // Endpoints are just normal functions in TS, so no usage to resolve. None - // Ok(match &data.expr.kind { - // UsageExprKind::Callee(_) => { - // // Considered just a normal function call. - // }, - // UsageExprKind::Other(_other) => Usage::ReferenceEndpoint(ReferenceEndpointUsage { - // range: data.expr.range, - // endpoint, - // }), - // _ => anyhow::bail!("invalid endpoint usage"), - // }) } #[derive(Debug)] @@ -407,10 +411,16 @@ struct APIEndpointLiteral { pub doc_comment: Option, pub endpoint_name: String, pub bind_name: ast::Ident, - pub config: EndpointConfig, + pub config: Sp, pub kind: EndpointKind, } +impl Spanned for APIEndpointLiteral { + fn span(&self) -> Span { + self.range.to_span() + } +} + #[derive(Debug)] enum ParameterType { Stream(ast::TsType), @@ -468,7 +478,7 @@ impl ReferenceParser for APIEndpointLiteral { fn parse_resource_reference( module: &Module, path: &swc_ecma_visit::AstNodePath, - ) -> Result> { + ) -> ParseResult> { for node in path.iter().rev() { if let swc_ecma_visit::AstParentNodeRef::CallExpr( expr, @@ -477,19 +487,20 @@ impl ReferenceParser for APIEndpointLiteral { { let doc_comment = module.preceding_comments(expr.span.lo.into()); let Some(bind_name) = extract_bind_name(path)? else { - expr.err("API endpoint must be bound to an exported variable"); - continue; + return Err( + expr.parse_err("API endpoint must be bound to an exported variable") + ); }; let Some(config) = expr.args.first() else { - expr.err("API endpoint must have a config object as its first argument"); - continue; + return Err(expr.parse_err( + "API endpoint must have a config object as its first argument", + )); }; - let cfg = EndpointConfig::parse_lit(config.expr.as_ref())?; + let cfg = >::parse_lit(config.expr.as_ref())?; let ast::Callee::Expr(callee) = &expr.callee else { - expr.callee.err("invalid api definition expression"); - continue; + return Err(expr.callee.parse_err("invalid api definition expression")); }; // Determine what kind of endpoint it is. @@ -497,10 +508,9 @@ impl ReferenceParser for APIEndpointLiteral { ast::Expr::Member(member) if member.prop.is_ident_with("raw") => { // Raw endpoint let Some(_) = &expr.args.get(1) else { - expr.args[0].span_hi().err( + return Err(expr.args[0].span_hi().parse_err( "API endpoint must have a handler function as its second argument", - ); - continue; + )); }; Self { @@ -516,15 +526,15 @@ impl ReferenceParser for APIEndpointLiteral { ast::Expr::Member(member) if member.prop.is_ident_with("streamInOut") => { // Bidirectional stream let Some(handler) = &expr.args.get(1) else { - expr.args[0].span_hi().err( + return Err(expr.args[0].span_hi().parse_err( "API endpoint must have a handler function as its second argument", - ); - continue; + )); }; let Some(type_params) = expr.type_args.as_deref() else { - expr.err("missing type parameters in call to streamInOut"); - continue; + return Err( + expr.parse_err("missing type parameters in call to streamInOut") + ); }; let (has_handshake, _return_type) = @@ -534,14 +544,14 @@ impl ReferenceParser for APIEndpointLiteral { let expected_count = if has_handshake { 3 } else { 2 }; if type_params_count != expected_count { - type_params.err(&format!("wrong number of type parameters, expected {expected_count}, found {type_params_count}")); - continue; + return Err(type_params.parse_err(format!("wrong number of type parameters, expected {expected_count}, found {type_params_count}"))); } let handshake = has_handshake .then(|| { - extract_type_param(Some(type_params), 0) - .ok_or_else(|| anyhow!("missing type for handshake")) + extract_type_param(Some(type_params), 0).ok_or_else(|| { + type_params.parse_err("missing type for stream handshake") + }) }) .transpose()?; @@ -549,16 +559,14 @@ impl ReferenceParser for APIEndpointLiteral { Some(type_params), if has_handshake { 1 } else { 0 }, ) else { - type_params.err("missing request type parameter"); - continue; + return Err(type_params.parse_err("missing request type parameter")); }; let Some(response) = extract_type_param( Some(type_params), if has_handshake { 2 } else { 1 }, ) else { - type_params.err("missing response type parameter"); - continue; + return Err(type_params.parse_err("missing response type parameter")); }; Self { @@ -577,15 +585,15 @@ impl ReferenceParser for APIEndpointLiteral { ast::Expr::Member(member) if member.prop.is_ident_with("streamIn") => { // Incoming stream let Some(handler) = &expr.args.get(1) else { - expr.args[0].span_hi().err( + return Err(expr.args[0].span_hi().parse_err( "API endpoint must have a handler function as its second argument", - ); - continue; + )); }; let Some(type_params) = expr.type_args.as_deref() else { - expr.err("missing type parameters in call to streamIn"); - continue; + return Err( + expr.parse_err("missing type parameters in call to streamIn") + ); }; let (has_handshake, return_type) = @@ -595,14 +603,14 @@ impl ReferenceParser for APIEndpointLiteral { let expected_count = if has_handshake { [2, 3] } else { [1, 2] }; if !expected_count.contains(&type_params_count) { - type_params.err(&format!("wrong number of type parameters, expected one of {expected_count:?}, found {type_params_count}")); - continue; + return Err(type_params.parse_err(format!("wrong number of type parameters, expected one of {expected_count:?}, found {type_params_count}"))); } let handshake = has_handshake .then(|| { - extract_type_param(Some(type_params), 0) - .ok_or_else(|| anyhow!("missing type for handshake")) + extract_type_param(Some(type_params), 0).ok_or_else(|| { + type_params.parse_err("missing type for handshake") + }) }) .transpose()?; @@ -610,8 +618,7 @@ impl ReferenceParser for APIEndpointLiteral { Some(type_params), if has_handshake { 1 } else { 0 }, ) else { - type_params.err("missing request type parameter"); - continue; + return Err(type_params.parse_err("missing request type parameter")); }; let response = extract_type_param( @@ -643,15 +650,15 @@ impl ReferenceParser for APIEndpointLiteral { ast::Expr::Member(member) if member.prop.is_ident_with("streamOut") => { // Outgoing stream let Some(handler) = &expr.args.get(1) else { - expr.args[0].span_hi().err( + return Err(expr.args[0].span_hi().parse_err( "API endpoint must have a handler function as its second argument", - ); - continue; + )); }; let Some(type_params) = expr.type_args.as_deref() else { - expr.err("missing type parameters in call to streamOut"); - continue; + return Err( + expr.parse_err("missing type parameters in call to streamOut") + ); }; let (has_handshake, _return_type) = @@ -661,14 +668,15 @@ impl ReferenceParser for APIEndpointLiteral { let expected_count = if has_handshake { 2 } else { 1 }; if type_params_count != expected_count { - type_params.err(&format!("wrong number of type parameters, expected {expected_count}, found {type_params_count}")); - continue; + return Err(type_params.parse_err(format!("wrong number of type parameters, expected {expected_count}, found {type_params_count}"))); } let handshake = if has_handshake { let t = extract_type_param(Some(type_params), 0); if t.is_none() { - type_params.err("missing type parameter for handshake"); + return Err( + type_params.parse_err("missing type parameter for handshake") + ); } t } else { @@ -679,8 +687,9 @@ impl ReferenceParser for APIEndpointLiteral { Some(type_params), if has_handshake { 1 } else { 0 }, ) else { - type_params.err("missing type parameter for response"); - continue; + return Err( + type_params.parse_err("missing type parameter for response") + ); }; Self { @@ -700,10 +709,9 @@ impl ReferenceParser for APIEndpointLiteral { ast::Expr::Member(member) if member.prop.is_ident_with("static") => { // Static assets let Some(dir) = cfg.dir.clone() else { - config + return Err(config .expr - .err("static assets must have the 'dir' field set"); - continue; + .parse_err("static assets must have the 'dir' field set")); }; let not_found = cfg.notFound.clone(); @@ -720,10 +728,9 @@ impl ReferenceParser for APIEndpointLiteral { _ => { // Regular endpoint let Some(handler) = &expr.args.get(1) else { - expr.args[0] + return Err(expr.args[0] .span_hi() - .err("API endpoint must have a handler function"); - continue; + .parse_err("API endpoint must have a handler function")); }; let (mut req, mut resp) = parse_endpoint_signature(&handler.expr)?; @@ -754,7 +761,7 @@ impl ReferenceParser for APIEndpointLiteral { } } -fn parse_stream_endpoint_signature(expr: &ast::Expr) -> Result<(bool, Option<&ast::TsType>)> { +fn parse_stream_endpoint_signature(expr: &ast::Expr) -> ParseResult<(bool, Option<&ast::TsType>)> { let (has_handshake_param, type_params, return_type) = match expr { ast::Expr::Fn(FnExpr { function, .. }) => ( function.params.len() == 2, @@ -770,7 +777,7 @@ fn parse_stream_endpoint_signature(expr: &ast::Expr) -> Result<(bool, Option<&as }; if let Some(type_params) = type_params { - type_params.err("stream endpoint handler cannot have type parameters"); + return Err(type_params.parse_err("stream endpoint handler cannot have type parameters")); } let return_type = return_type.map(|t| t.type_ann.as_ref()); @@ -780,7 +787,7 @@ fn parse_stream_endpoint_signature(expr: &ast::Expr) -> Result<(bool, Option<&as fn parse_endpoint_signature( expr: &ast::Expr, -) -> Result<(Option<&ast::TsType>, Option<&ast::TsType>)> { +) -> ParseResult<(Option<&ast::TsType>, Option<&ast::TsType>)> { let (req_param, type_params, return_type) = match expr { ast::Expr::Fn(func) => ( func.function.params.first().map(|p| &p.pat), @@ -796,7 +803,7 @@ fn parse_endpoint_signature( }; if let Some(type_params) = type_params { - type_params.err("endpoint handler cannot have type parameters"); + return Err(type_params.parse_err("endpoint handler cannot have type parameters")); } let req_type = match req_param { @@ -818,7 +825,7 @@ fn parse_endpoint_signature( } impl LitParser for Methods { - fn parse_lit(expr: &ast::Expr) -> Result { + fn parse_lit(expr: &ast::Expr) -> ParseResult { Ok(match expr { ast::Expr::Lit(ast::Lit::Str(s)) => { if s.value.as_ref() == "*" { @@ -827,8 +834,7 @@ impl LitParser for Methods { match Method::from_str(s.value.as_ref()) { Ok(m) => Self::Some(vec![m]), Err(err) => { - s.err(&format!("invalid method: {err}")); - Self::Some(vec![Method::Get]) + return Err(s.parse_err(format!("invalid method: {err}"))); } } } @@ -838,12 +844,16 @@ impl LitParser for Methods { for ast::ExprOrSpread { expr, .. } in arr.elems.iter().flatten() { if let ast::Expr::Lit(ast::Lit::Str(s)) = expr.as_ref() { if s.value.as_ref() == "*" { - if arr.elems.len() > 1 { - arr.err("invalid methods: cannot mix * and other methods"); - } - return Ok(Self::All); + return if arr.elems.len() > 1 { + Err(arr + .parse_err("invalid methods: cannot mix * and other methods")) + } else { + Ok(Self::All) + }; } - methods.push(Method::from_str(s.value.as_ref())?); + let m = Method::from_str(s.value.as_ref()) + .map_err(|err| s.parse_err(err.to_string()))?; + methods.push(m); } } methods.sort(); @@ -851,8 +861,7 @@ impl LitParser for Methods { Self::Some(methods) } _ => { - expr.err("invalid methods: must be string or array of strings"); - Self::Some(vec![Method::Get]) + return Err(expr.parse_err("invalid methods: must be string or array of strings")); } }) } diff --git a/tsparser/src/parser/resources/apis/authhandler.rs b/tsparser/src/parser/resources/apis/authhandler.rs index ee525ff51d..1fa334b9ca 100644 --- a/tsparser/src/parser/resources/apis/authhandler.rs +++ b/tsparser/src/parser/resources/apis/authhandler.rs @@ -1,5 +1,4 @@ -use anyhow::Result; -use swc_common::errors::HANDLER; +use litparser::{report_and_continue, ParseResult, ToParseErr}; use swc_common::sync::Lrc; use swc_ecma_ast as ast; use swc_ecma_ast::TsTypeParamInstantiation; @@ -14,6 +13,7 @@ use crate::parser::resources::parseutil::{ }; use crate::parser::resources::Resource; use crate::parser::{FilePath, Range}; +use crate::span_err::ErrReporter; use super::encoding::iface_fields; @@ -33,24 +33,30 @@ pub const AUTHHANDLER_PARSER: ResourceParser = ResourceParser { run: |pass| { let module = pass.module.clone(); - // TODO handle this in a better way. - let service_name = match &module.file_path { - FilePath::Real(ref buf) => buf - .parent() - .and_then(|p| p.file_name()) - .and_then(|s| s.to_str()), - FilePath::Custom(ref str) => { - anyhow::bail!("unsupported file path for service: {}", str) + let service_name = match &pass.service_name { + Some(name) => Some(name.to_string()), + None => { + // TODO handle this in a better way. + match &module.file_path { + FilePath::Real(ref buf) => buf + .parent() + .and_then(|p| p.file_name()) + .and_then(|s| s.to_str()) + .map(|s| s.to_string()), + FilePath::Custom(_) => None, + } } }; - let Some(service_name) = service_name else { - return Ok(()); - }; let names = TrackedNames::new(&[("encore.dev/auth", "authHandler")]); - for r in iter_references::(&module, &names) { - let r = r?; + 'RefLoop: for r in iter_references::(&module, &names) { + let r = report_and_continue!(r); + let Some(service_name) = service_name.as_ref() else { + module.err("unable to determine service name for file"); + continue; + }; + let request = pass.type_checker.resolve_type(module.clone(), &r.request); let response = pass.type_checker.resolve_type(module.clone(), &r.response); @@ -64,7 +70,10 @@ pub const AUTHHANDLER_PARSER: ResourceParser = ResourceParser { for (_, v) in fields { if !v.is_custom() { - HANDLER.with(|handler| handler.span_err(v.range(), "authHandler parameter type can only consist of Query and Header fields")); + v.range().to_span().err( + "authHandler parameter type can only consist of Query and Header fields", + ); + continue 'RefLoop; } } @@ -91,7 +100,6 @@ pub const AUTHHANDLER_PARSER: ResourceParser = ResourceParser { ident: Some(r.bind_name), }); } - Ok(()) }, }; @@ -109,7 +117,7 @@ impl ReferenceParser for AuthHandlerLiteral { fn parse_resource_reference( module: &Module, path: &swc_ecma_visit::AstNodePath, - ) -> Result> { + ) -> ParseResult> { for node in path.iter().rev() { if let swc_ecma_visit::AstParentNodeRef::CallExpr( expr, @@ -118,26 +126,31 @@ impl ReferenceParser for AuthHandlerLiteral { { let doc_comment = module.preceding_comments(expr.span.lo.into()); let Some(bind_name) = extract_bind_name(path)? else { - anyhow::bail!("Auth Handler must be bound to a variable") + return Err(expr.parse_err("auth handler must be bound to a variable")); }; let Some(handler) = &expr.args.first() else { - anyhow::bail!("Auth Handler must have a handler function") + return Err(expr.parse_err( + "auth handler must have a handler function as its first argument", + )); }; let (mut req, mut resp) = parse_auth_handler_signature(&handler.expr)?; if req.is_none() { - req = extract_type_param(expr.type_args.as_deref(), 0)?; + req = extract_type_param(expr.type_args.as_deref(), 0); } if resp.is_none() { - resp = extract_type_param(expr.type_args.as_deref(), 1)?; + resp = extract_type_param(expr.type_args.as_deref(), 1); } let Some(req) = req else { - anyhow::bail!("Auth Handler must have an explicitly defined parameter type"); + return Err(expr + .parse_err("auth handler must have an explicitly defined parameter type")); }; let Some(resp) = resp else { - anyhow::bail!("Auth Handler must have an explicitly defined result type"); + return Err( + expr.parse_err("auth handler must have an explicitly defined result type") + ); }; return Ok(Some(Self { @@ -156,7 +169,7 @@ impl ReferenceParser for AuthHandlerLiteral { fn parse_auth_handler_signature( expr: &ast::Expr, -) -> Result<(Option<&ast::TsType>, Option<&ast::TsType>)> { +) -> ParseResult<(Option<&ast::TsType>, Option<&ast::TsType>)> { let (req_param, type_params, return_type) = match expr { ast::Expr::Fn(func) => ( func.function.params.first().map(|p| &p.pat), @@ -171,8 +184,8 @@ fn parse_auth_handler_signature( _ => return Ok((None, None)), }; - if type_params.is_some() { - anyhow::bail!("auth handler cannot have type parameters"); + if let Some(type_params) = &type_params { + return Err(type_params.parse_err("auth handler cannot have type parameters")); } let req_type = match req_param { @@ -196,12 +209,12 @@ fn parse_auth_handler_signature( fn extract_type_param( params: Option<&TsTypeParamInstantiation>, idx: usize, -) -> Result> { +) -> Option<&ast::TsType> { let Some(params) = params else { - return Ok(None); + return None; }; let Some(param) = params.params.get(idx) else { - return Ok(None); + return None; }; - Ok(Some(param.as_ref())) + Some(param.as_ref()) } diff --git a/tsparser/src/parser/resources/apis/encoding.rs b/tsparser/src/parser/resources/apis/encoding.rs index 413e1308c1..a989ebd53e 100644 --- a/tsparser/src/parser/resources/apis/encoding.rs +++ b/tsparser/src/parser/resources/apis/encoding.rs @@ -1,7 +1,7 @@ use std::collections::HashMap; -use anyhow::{bail, Context}; -use litparser::Sp; +use litparser::{ParseResult, Sp, ToParseErr}; +use swc_common::Span; use thiserror::Error; use crate::parser::resources::apis::api::{Method, Methods}; @@ -12,11 +12,14 @@ use crate::parser::types::{ Type, TypeChecker, }; use crate::parser::Range; -use crate::span_err::{ErrReporter, ErrorWithSpanExt, SpErr}; +use crate::span_err::{ErrorWithSpanExt, SpErr}; /// Describes how an API endpoint can be encoded on the wire. #[derive(Debug, Clone)] pub struct EndpointEncoding { + /// The endpoint's definition span + pub span: Span, + /// The endpoint's API path. pub path: Path, @@ -33,9 +36,9 @@ pub struct EndpointEncoding { pub handshake: Option, /// The raw request and schemas, from the source code. - pub raw_handshake_schema: Option, - pub raw_req_schema: Option, - pub raw_resp_schema: Option, + pub raw_handshake_schema: Option>, + pub raw_req_schema: Option>, + pub raw_resp_schema: Option>, } impl EndpointEncoding { @@ -173,13 +176,14 @@ impl ResponseEncoding { } pub fn describe_stream_endpoint( + def_span: Span, tc: &TypeChecker, methods: Methods, path: Path, req: Option>, resp: Option>, handshake: Option>, -) -> anyhow::Result { +) -> ParseResult { let resp = if let Some(resp) = resp { let (span, resp) = resp.split(); drop_empty_or_void(unwrap_promise(tc.state(), &resp).clone()).map(|t| Sp::new(span, t)) @@ -190,6 +194,7 @@ pub fn describe_stream_endpoint( let default_method = default_method(&methods); let (handshake_enc, _req_schema) = describe_req( + def_span, tc, &Methods::Some(vec![Method::Get]), Some(&path), @@ -200,47 +205,46 @@ pub fn describe_stream_endpoint( let handshake_enc = match handshake_enc.as_slice() { [] => None, [ref enc] => Some(enc.clone()), - _ => bail!("unexpected handshake encoding"), + _ => return Err(def_span.parse_err("unexpected handshake encoding")), }; let (req_enc, _req_schema) = if handshake_enc.is_some() { - describe_req(tc, &methods, None, &req, false)? + describe_req(def_span, tc, &methods, None, &req, false)? } else { - describe_req(tc, &methods, Some(&path), &req, false)? + describe_req(def_span, tc, &methods, Some(&path), &req, false)? }; let (resp_enc, _resp_schema) = describe_resp(tc, &methods, &resp)?; let path = if let Some(ref enc) = handshake_enc { - rewrite_path_types(enc, path, false).context("parse path param types")? + rewrite_path_types(enc, path, false)? } else { path }; - let raw_handshake_schema = handshake.map(|sp| sp.take()); - let raw_req_schema = req.map(|sp| sp.take()); - let raw_resp_schema = resp.map(|sp| sp.take()); - Ok(EndpointEncoding { + span: def_span, path, methods, default_method, req: req_enc, resp: resp_enc, handshake: handshake_enc, - raw_handshake_schema, - raw_req_schema, - raw_resp_schema, + raw_handshake_schema: handshake, + raw_req_schema: req, + raw_resp_schema: resp, }) } + pub fn describe_endpoint( + def_span: Span, tc: &TypeChecker, methods: Methods, path: Path, req: Option>, resp: Option>, raw: bool, -) -> anyhow::Result { +) -> ParseResult { let resp = if let Some(resp) = resp { let (span, resp) = resp.split(); drop_empty_or_void(unwrap_promise(tc.state(), &resp).clone()).map(|t| Sp::new(span, t)) @@ -250,15 +254,13 @@ pub fn describe_endpoint( let default_method = default_method(&methods); - let (req_enc, _req_schema) = describe_req(tc, &methods, Some(&path), &req, raw)?; + let (req_enc, _req_schema) = describe_req(def_span, tc, &methods, Some(&path), &req, raw)?; let (resp_enc, _resp_schema) = describe_resp(tc, &methods, &resp)?; - let path = rewrite_path_types(&req_enc[0], path, raw).context("parse path param types")?; - - let raw_req_schema = req.map(|sp| sp.take()); - let raw_resp_schema = resp.map(|sp| sp.take()); + let path = rewrite_path_types(&req_enc[0], path, raw)?; Ok(EndpointEncoding { + span: def_span, path, methods, default_method, @@ -266,13 +268,14 @@ pub fn describe_endpoint( resp: resp_enc, handshake: None, raw_handshake_schema: None, - raw_req_schema, - raw_resp_schema, + raw_req_schema: req, + raw_resp_schema: resp, }) } -pub fn describe_static_assets(methods: Methods, path: Path) -> EndpointEncoding { +pub fn describe_static_assets(def_span: Span, methods: Methods, path: Path) -> EndpointEncoding { EndpointEncoding { + span: def_span, path, methods: methods.clone(), default_method: Method::Get, @@ -289,12 +292,13 @@ pub fn describe_static_assets(methods: Methods, path: Path) -> EndpointEncoding } fn describe_req( + def_span: Span, tc: &TypeChecker, methods: &Methods, path: Option<&Path>, req_schema: &Option>, raw: bool, -) -> anyhow::Result<(Vec, Option)> { +) -> ParseResult<(Vec, Option)> { let Some(req_schema) = req_schema else { // We don't have any request schema. This is valid if and only if // we have no path parameters or it's a raw endpoint. @@ -307,11 +311,14 @@ fn describe_req( None, )); } else { - anyhow::bail!("request schema must be defined when having path parameters"); + return Err( + def_span.parse_err("request schema must be defined when having path parameters") + ); } }; - let mut fields = iface_fields(tc, req_schema)?; + let mut fields = + iface_fields(tc, req_schema).map_err(|err| err.span.parse_err(err.error.to_string()))?; let path_params = if let Some(path) = path { extract_path_params(path, &mut fields)? } else { @@ -334,7 +341,7 @@ fn describe_req( for (loc, methods) in split_by_loc(methods) { let mut params = path_params.clone(); - params.extend(extract_loc_params(&fields, loc)); + params.extend(extract_loc_params(&fields, loc)?); encodings.push(RequestEncoding { methods: Methods::Some(methods), params, @@ -348,13 +355,14 @@ fn describe_resp( tc: &TypeChecker, _methods: &Methods, resp_schema: &Option>, -) -> anyhow::Result<(ResponseEncoding, Option)> { +) -> ParseResult<(ResponseEncoding, Option)> { let Some(resp_schema) = resp_schema else { return Ok((ResponseEncoding { params: vec![] }, None)); }; - let fields = iface_fields(tc, resp_schema)?; - let params = extract_loc_params(&fields, ParamLocation::Body); + let fields = + iface_fields(tc, resp_schema).map_err(|err| err.span.parse_err(err.error.to_string()))?; + let params = extract_loc_params(&fields, ParamLocation::Body)?; let fields = if fields.is_empty() { None @@ -476,12 +484,12 @@ pub(crate) fn iface_fields<'a>( } } -fn extract_path_params(path: &Path, fields: &mut FieldMap) -> anyhow::Result> { +fn extract_path_params(path: &Path, fields: &mut FieldMap) -> ParseResult> { let mut params = Vec::new(); for (index, seg) in path.dynamic_segments().enumerate() { let name = seg.lit_or_name(); let Some(f) = fields.remove(name) else { - anyhow::bail!("path parameter {:?} not found in request schema", name); + return Err(seg.parse_err("path parameter not found in request schema")); }; params.push(Param { name: name.to_string(), @@ -495,7 +503,7 @@ fn extract_path_params(path: &Path, fields: &mut FieldMap) -> anyhow::Result Vec { +fn extract_loc_params(fields: &FieldMap, default_loc: ParamLocation) -> ParseResult> { let mut params = Vec::new(); for f in fields.values() { let name = f.name.clone(); @@ -517,7 +525,12 @@ fn extract_loc_params(fields: &FieldMap, default_loc: ParamLocation) -> Vec panic!("path params are not supported as a default loc"), + ParamLocation::Path => { + return Err(f + .range + .to_span() + .parse_err("path params are not supported as a default loc")) + } }; params.push(Param { @@ -528,10 +541,10 @@ fn extract_loc_params(fields: &FieldMap, default_loc: ParamLocation) -> Vec anyhow::Result { +fn rewrite_path_types(req: &RequestEncoding, path: Path, raw: bool) -> ParseResult { use crate::parser::respath::{Segment, ValueType}; // Get the path params into a map, keyed by name. let path_params = req @@ -540,31 +553,33 @@ fn rewrite_path_types(req: &RequestEncoding, path: Path, raw: bool) -> anyhow::R .collect::>(); let typ_to_value_type = |param: &Param| match ¶m.typ { - Type::Basic(Basic::String) => ValueType::String, - Type::Basic(Basic::Boolean) => ValueType::Bool, - Type::Basic(Basic::Number | Basic::BigInt) => ValueType::Int, + Type::Basic(Basic::String) => Ok(ValueType::String), + Type::Basic(Basic::Boolean) => Ok(ValueType::Bool), + Type::Basic(Basic::Number | Basic::BigInt) => Ok(ValueType::Int), typ => { - param + return Err(param .range .to_span() - .err(&format!("unsupported path parameter type: {:?}", typ)); - ValueType::String + .parse_err(format!("unsupported path parameter type: {:?}", typ))); } }; let mut segments = Vec::with_capacity(path.segments.len()); for seg in path.segments.into_iter() { + let (seg_span, seg) = seg.split(); let seg = match seg { Segment::Param { name, .. } => { // Get the value type of the path parameter. let value_type = match path_params.get(&name) { - Some(param) => typ_to_value_type(param), + Some(param) => typ_to_value_type(param)?, None => { // Raw endpoints assume path params are strings. if raw { ValueType::String } else { - anyhow::bail!("path param {:?} not found in request schema", name); + return Err( + seg_span.parse_err("path parameter not found in request schema") + ); } } }; @@ -573,10 +588,13 @@ fn rewrite_path_types(req: &RequestEncoding, path: Path, raw: bool) -> anyhow::R } Segment::Literal(_) | Segment::Wildcard { .. } | Segment::Fallback { .. } => seg, }; - segments.push(seg); + segments.push(Sp::new(seg_span, seg)); } - Ok(Path { segments }) + Ok(Path { + span: path.span, + segments, + }) } fn rewrite_custom_type_field( diff --git a/tsparser/src/parser/resources/apis/gateway.rs b/tsparser/src/parser/resources/apis/gateway.rs index 705d8837ab..186f9ee9c7 100644 --- a/tsparser/src/parser/resources/apis/gateway.rs +++ b/tsparser/src/parser/resources/apis/gateway.rs @@ -2,7 +2,7 @@ use litparser_derive::LitParser; use swc_common::sync::Lrc; use swc_ecma_ast as ast; -use litparser::LitParser; +use litparser::{report_and_continue, LitParser}; use crate::parser::resourceparser::bind::{BindData, BindKind, ResourceOrPath}; use crate::parser::resourceparser::paths::PkgPath; @@ -11,6 +11,7 @@ use crate::parser::resources::parseutil::{iter_references, TrackedNames, Unnamed use crate::parser::resources::Resource; use crate::parser::types::Object; use crate::parser::Range; +use crate::span_err::ErrReporter; #[derive(Debug, Clone)] pub struct Gateway { @@ -20,7 +21,7 @@ pub struct Gateway { pub auth_handler: Option>, } -#[allow(non_snake_case)] +#[allow(non_snake_case, dead_code)] #[derive(Debug, LitParser)] struct DecodedGatewayConfig { authHandler: Option, @@ -36,7 +37,8 @@ pub const GATEWAY_PARSER: ResourceParser = ResourceParser { let module = pass.module.clone(); type Res = UnnamedClassResource; for r in iter_references::(&module, &names) { - let r = r?; + let r = report_and_continue!(r); + let object = match &r.bind_name { None => None, Some(id) => pass @@ -45,10 +47,10 @@ pub const GATEWAY_PARSER: ResourceParser = ResourceParser { }; let auth_handler = if let Some(expr) = r.config.authHandler { - let obj = pass - .type_checker - .resolve_obj(pass.module.clone(), &expr) - .ok_or(anyhow::anyhow!("can't resolve endpoint"))?; + let Some(obj) = pass.type_checker.resolve_obj(pass.module.clone(), &expr) else { + expr.err("cannot resolve auth handler"); + continue; + }; Some(obj) } else { None @@ -69,6 +71,5 @@ pub const GATEWAY_PARSER: ResourceParser = ResourceParser { ident: r.bind_name, }); } - Ok(()) }, }; diff --git a/tsparser/src/parser/resources/apis/service.rs b/tsparser/src/parser/resources/apis/service.rs index a95f2238ec..97f9024328 100644 --- a/tsparser/src/parser/resources/apis/service.rs +++ b/tsparser/src/parser/resources/apis/service.rs @@ -1,5 +1,6 @@ -use swc_common::errors::HANDLER; +use litparser::report_and_continue; use swc_common::sync::Lrc; +use swc_common::Spanned; use swc_ecma_ast as ast; use litparser_derive::LitParser; @@ -12,6 +13,7 @@ use crate::parser::resources::parseutil::NamedClassResourceOptionalConfig; use crate::parser::resources::parseutil::{iter_references, TrackedNames}; use crate::parser::resources::Resource; use crate::parser::FilePath; +use crate::span_err::ErrReporter; #[derive(Debug, Clone)] pub struct Service { @@ -33,15 +35,11 @@ pub static SERVICE_PARSER: ResourceParser = ResourceParser { { type Res = NamedClassResourceOptionalConfig; for (i, r) in iter_references::(&module, &names).enumerate() { - let r = r?; + let r = report_and_continue!(r); if i > 0 { - HANDLER.with(|h| { - h.span_err( - r.range, - "cannot have multiple service declarations in the same module", - ); - }); + r.span() + .err("cannot have multiple service declarations in the same module"); continue; } @@ -50,12 +48,8 @@ pub static SERVICE_PARSER: ResourceParser = ResourceParser { match &pass.module.file_path { FilePath::Real(buf) if buf.ends_with("encore.service.ts") => {} _ => { - HANDLER.with(|h| { - h.span_err( - r.range, - "service declarations are only allowed in encore.service.ts", - ); - }); + r.span() + .err("service declarations are only allowed in encore.service.ts"); continue; } } @@ -81,7 +75,5 @@ pub static SERVICE_PARSER: ResourceParser = ResourceParser { }); } } - - Ok(()) }, }; diff --git a/tsparser/src/parser/resources/apis/service_client.rs b/tsparser/src/parser/resources/apis/service_client.rs index 185264363a..d072234dbf 100644 --- a/tsparser/src/parser/resources/apis/service_client.rs +++ b/tsparser/src/parser/resources/apis/service_client.rs @@ -1,6 +1,6 @@ use crate::parser::resources::apis::api::CallEndpointUsage; use crate::parser::usageparser::{ResolveUsageData, Usage, UsageExprKind}; -use anyhow::Result; +use crate::span_err::ErrReporter; use swc_common::sync::Lrc; #[derive(Debug, Clone)] @@ -11,16 +11,19 @@ pub struct ServiceClient { pub fn resolve_service_client_usage( data: &ResolveUsageData, client: Lrc, -) -> Result> { +) -> Option { match &data.expr.kind { UsageExprKind::MethodCall(method) => { let method_name = method.method.as_ref(); - Ok(Some(Usage::CallEndpoint(CallEndpointUsage { + Some(Usage::CallEndpoint(CallEndpointUsage { range: data.expr.range, endpoint: (client.service_name.clone(), method_name.to_string()), - }))) + })) + } + _ => { + data.expr.err("invalid service client usage"); + None } - _ => anyhow::bail!("invalid service client usage"), } } diff --git a/tsparser/src/parser/resources/infra/cron.rs b/tsparser/src/parser/resources/infra/cron.rs index d78c0dab7c..cc8b9795eb 100644 --- a/tsparser/src/parser/resources/infra/cron.rs +++ b/tsparser/src/parser/resources/infra/cron.rs @@ -1,15 +1,16 @@ use std::rc::Rc; -use anyhow::Result; use litparser_derive::LitParser; use swc_common::sync::Lrc; +use swc_common::{Span, Spanned}; use swc_ecma_ast as ast; -use litparser::LitParser; +use litparser::{report_and_continue, LitParser, ParseResult, Sp, ToParseErr}; use crate::parser::resourceparser::bind::{BindData, BindKind, ResourceOrPath}; use crate::parser::resourceparser::paths::PkgPath; use crate::parser::resourceparser::resource_parser::ResourceParser; +use crate::parser::resourceparser::ResourceParseContext; use crate::parser::resources::parseutil::{iter_references, NamedClassResource, TrackedNames}; use crate::parser::resources::Resource; use crate::parser::types::Object; @@ -20,7 +21,7 @@ pub struct CronJob { pub title: Option, pub doc: Option, pub schedule: CronJobSchedule, - pub endpoint: Rc, + pub endpoint: Sp>, } #[derive(Debug, Clone)] @@ -36,8 +37,8 @@ pub struct CronExpr(pub String); struct DecodedCronJobConfig { endpoint: ast::Expr, title: Option, - every: Option, - schedule: Option, + every: Option>, + schedule: Option>, } pub const CRON_PARSER: ResourceParser = ResourceParser { @@ -50,75 +51,85 @@ pub const CRON_PARSER: ResourceParser = ResourceParser { let module = pass.module.clone(); type Res = NamedClassResource; for r in iter_references::(&module, &names) { - let r = r?; - let object = match &r.bind_name { - None => None, - Some(id) => pass - .type_checker - .resolve_obj(pass.module.clone(), &ast::Expr::Ident(id.clone())), - }; - - let endpoint = pass - .type_checker - .resolve_obj(pass.module.clone(), &r.config.endpoint) - .ok_or(anyhow::anyhow!("can't resolve endpoint"))?; - - let schedule = r.config.schedule()?; - let resource = Resource::CronJob(Lrc::new(CronJob { - name: r.resource_name.to_owned(), - doc: r.doc_comment, - title: r.config.title, - endpoint, - schedule, - })); - pass.add_resource(resource.clone()); - pass.add_bind(BindData { - range: r.range, - resource: ResourceOrPath::Resource(resource), - object, - kind: BindKind::Create, - ident: r.bind_name, - }); + let r = report_and_continue!(r); + report_and_continue!(parse_cron_job(pass, r)); } - Ok(()) }, }; +fn parse_cron_job( + pass: &mut ResourceParseContext, + r: NamedClassResource, +) -> ParseResult<()> { + let object = match &r.bind_name { + None => None, + Some(id) => pass + .type_checker + .resolve_obj(pass.module.clone(), &ast::Expr::Ident(id.clone())), + }; + + let endpoint = pass + .type_checker + .resolve_obj(pass.module.clone(), &r.config.endpoint) + .ok_or(r.config.endpoint.parse_err("cannot resolve endpoint"))?; + + let schedule = r.config.parse_schedule(r.range.to_span())?; + let resource = Resource::CronJob(Lrc::new(CronJob { + name: r.resource_name.to_owned(), + doc: r.doc_comment, + title: r.config.title, + endpoint: Sp::new(r.config.endpoint.span(), endpoint), + schedule, + })); + pass.add_resource(resource.clone()); + pass.add_bind(BindData { + range: r.range, + resource: ResourceOrPath::Resource(resource), + object, + kind: BindKind::Create, + ident: r.bind_name, + }); + Ok(()) +} + impl LitParser for CronExpr { - fn parse_lit(input: &ast::Expr) -> anyhow::Result { + fn parse_lit(input: &ast::Expr) -> ParseResult { match input { ast::Expr::Lit(ast::Lit::Str(str)) => { // Ensure the cron expression is valid let expr = str.value.as_ref(); - cron_parser::parse(expr, &chrono::Utc::now())?; + cron_parser::parse(expr, &chrono::Utc::now()) + .map_err(|err| input.parse_err(err.to_string()))?; Ok(CronExpr(expr.to_string())) } - _ => anyhow::bail!("expected cron expression, got {:?}", input), + _ => Err(input.parse_err("expected cron expression")), } } } impl DecodedCronJobConfig { - fn schedule(&self) -> Result { + fn parse_schedule(&self, def_span: Span) -> ParseResult { match (self.every, self.schedule.as_ref()) { - (None, Some(schedule)) => Ok(CronJobSchedule::Cron(schedule.clone())), + (None, Some(schedule)) => Ok(CronJobSchedule::Cron(schedule.clone().take())), (Some(every), None) => { // TODO introduce more robust validation and error reporting here. let secs = every.as_secs(); if secs % 60 != 0 { - anyhow::bail!("`every` must be a multiple of 60 seconds"); + return Err(every + .span() + .parse_err("`every` must be a multiple of 60 seconds")); } let mins = secs / 60; if mins > (24 * 60) { - anyhow::bail!("`every` must be at most 24 hours"); + return Err(every.span().parse_err("`every` must be at most 24 hours")); } Ok(CronJobSchedule::Every(mins as u32)) } (None, None) => { - anyhow::bail!("expected either `every` or `schedule` to be set"); + Err(def_span.parse_err("expected either `every` or `schedule` to be set")) } (Some(_), Some(_)) => { - anyhow::bail!("expected either `every` or `schedule` to be set, not both"); + Err(def_span.parse_err("expected either `every` or `schedule` to be set, not both")) } } } diff --git a/tsparser/src/parser/resources/infra/objects.rs b/tsparser/src/parser/resources/infra/objects.rs index bedf621dad..f5337cef39 100644 --- a/tsparser/src/parser/resources/infra/objects.rs +++ b/tsparser/src/parser/resources/infra/objects.rs @@ -1,7 +1,6 @@ use std::ops::Deref; -use anyhow::Result; -use litparser::LitParser; +use litparser::{report_and_continue, LitParser}; use litparser_derive::LitParser; use swc_common::sync::Lrc; use swc_ecma_ast as ast; @@ -44,7 +43,7 @@ pub const OBJECTS_PARSER: ResourceParser = ResourceParser { { type Res = NamedClassResourceOptionalConfig; for r in iter_references::(&module, &names) { - let r = r?; + let r = report_and_continue!(r); let cfg = r.config.unwrap_or_default(); let object = match &r.bind_name { @@ -60,6 +59,7 @@ pub const OBJECTS_PARSER: ResourceParser = ResourceParser { versioned: cfg.versioned.unwrap_or(false), public: cfg.public.unwrap_or(false), })); + pass.add_resource(resource.clone()); pass.add_bind(BindData { range: r.range, @@ -73,7 +73,7 @@ pub const OBJECTS_PARSER: ResourceParser = ResourceParser { { for r in iter_references::(&module, &names) { - let r = r?; + let r = report_and_continue!(r); let object = match &r.bind_name { None => None, Some(id) => pass @@ -92,27 +92,25 @@ pub const OBJECTS_PARSER: ResourceParser = ResourceParser { }); } } - - Ok(()) }, }; -pub fn resolve_bucket_usage(data: &ResolveUsageData, bucket: Lrc) -> Result> { - Ok(match &data.expr.kind { +pub fn resolve_bucket_usage(data: &ResolveUsageData, bucket: Lrc) -> Option { + match &data.expr.kind { UsageExprKind::MethodCall(call) => { if call.method.as_ref() == "ref" { let Some(type_args) = call.call.type_args.as_deref() else { call.call .span .err("expected a type argument in call to Bucket.ref"); - return Ok(None); + return None; }; let Some(type_arg) = type_args.params.first() else { call.call .span .err("expected a type argument in call to Bucket.ref"); - return Ok(None); + return None; }; return parse_bucket_ref(data, bucket, call, type_arg); @@ -141,7 +139,7 @@ pub fn resolve_bucket_usage(data: &ResolveUsageData, bucket: Lrc) -> Res _ => { call.method.err("unsupported bucket operation"); - return Ok(None); + return None; } }; @@ -159,7 +157,7 @@ pub fn resolve_bucket_usage(data: &ResolveUsageData, bucket: Lrc) -> Res .err("invalid use of bucket resource"); None } - }) + } } fn parse_bucket_ref( @@ -167,7 +165,7 @@ fn parse_bucket_ref( bucket: Lrc, _call: &MethodCall, type_arg: &ast::TsType, -) -> Result> { +) -> Option { fn process_type( data: &ResolveUsageData, sp: &swc_common::Span, @@ -253,14 +251,14 @@ fn parse_bucket_ref( .err("cannot use publicUrl on a non-public bucket"); } - Ok(Some(Usage::Bucket(BucketUsage { + Some(Usage::Bucket(BucketUsage { range: data.expr.range, bucket, ops, - }))) + })) } else { typ.err("no bucket permissions found in type argument"); - Ok(None) + None } } diff --git a/tsparser/src/parser/resources/infra/pubsub_subscription.rs b/tsparser/src/parser/resources/infra/pubsub_subscription.rs index 7edc1f0534..65022e122a 100644 --- a/tsparser/src/parser/resources/infra/pubsub_subscription.rs +++ b/tsparser/src/parser/resources/infra/pubsub_subscription.rs @@ -2,9 +2,10 @@ use std::rc::Rc; use litparser_derive::LitParser; use swc_common::sync::Lrc; +use swc_common::Spanned; use swc_ecma_ast as ast; -use litparser::LitParser; +use litparser::{report_and_continue, LitParser, Sp}; use crate::parser::resourceparser::bind::{BindData, BindKind, ResourceOrPath}; use crate::parser::resourceparser::paths::PkgPath; @@ -13,11 +14,12 @@ use crate::parser::resources::parseutil::{iter_references, NamedClassResource, T use crate::parser::resources::Resource; use crate::parser::types::Object; use crate::parser::Range; +use crate::span_err::ErrReporter; #[derive(Debug, Clone)] pub struct Subscription { pub range: Range, - pub topic: Rc, + pub topic: Sp>, pub name: String, pub doc: Option, pub config: SubscriptionConfig, @@ -63,10 +65,11 @@ pub const SUBSCRIPTION_PARSER: ResourceParser = ResourceParser { type Res = NamedClassResource; for r in iter_references::(&module, &names) { - let r = r?; + let r = report_and_continue!(r); let topic_expr = r.constructor_args[0].clone(); - if topic_expr.spread.is_some() { - anyhow::bail!("can't use ... for PubSub topic reference"); + if let Some(spread) = topic_expr.spread.as_ref() { + spread.err("cannot use ... for PubSub topic reference"); + continue; } let object = match &r.bind_name { None => None, @@ -75,14 +78,17 @@ pub const SUBSCRIPTION_PARSER: ResourceParser = ResourceParser { .resolve_obj(pass.module.clone(), &ast::Expr::Ident(id.clone())), }; - let topic = pass + let Some(topic) = pass .type_checker .resolve_obj(pass.module.clone(), &topic_expr.expr) - .ok_or(anyhow::anyhow!("can't resolve topic"))?; + else { + topic_expr.expr.err("cannot resolve topic reference"); + continue; + }; let resource = Resource::PubSubSubscription(Lrc::new(Subscription { range: r.range, - topic, + topic: Sp::new(topic_expr.expr.span(), topic), name: r.resource_name.to_owned(), doc: r.doc_comment, config: SubscriptionConfig { @@ -124,6 +130,5 @@ pub const SUBSCRIPTION_PARSER: ResourceParser = ResourceParser { ident: r.bind_name, }); } - Ok(()) }, }; diff --git a/tsparser/src/parser/resources/infra/pubsub_topic.rs b/tsparser/src/parser/resources/infra/pubsub_topic.rs index edf32af424..f70122eb29 100644 --- a/tsparser/src/parser/resources/infra/pubsub_topic.rs +++ b/tsparser/src/parser/resources/infra/pubsub_topic.rs @@ -1,10 +1,8 @@ -use anyhow::Result; use litparser_derive::LitParser; -use swc_common::errors::HANDLER; use swc_common::sync::Lrc; use swc_ecma_ast as ast; -use litparser::{LitParser, Sp}; +use litparser::{report_and_continue, LitParser, ParseResult, Sp, ToParseErr}; use crate::parser::module_loader::Module; use crate::parser::resourceparser::bind::{BindData, BindKind, ResourceOrPath}; @@ -17,6 +15,7 @@ use crate::parser::resources::Resource; use crate::parser::types::Type; use crate::parser::usageparser::{ResolveUsageData, Usage, UsageExprKind}; use crate::parser::Range; +use crate::span_err::ErrReporter; #[derive(Debug, Clone)] pub struct Topic { @@ -34,14 +33,14 @@ pub enum DeliveryGuarantee { } #[derive(Debug, LitParser)] -#[allow(non_snake_case)] +#[allow(non_snake_case, dead_code)] struct DecodedTopicConfig { - deliveryGuarantee: Option, + deliveryGuarantee: Option>, orderingAttribute: Option, } impl DecodedTopicConfig { - fn delivery_guarantee(&self) -> Result { + fn delivery_guarantee(&self) -> ParseResult { let Some(delivery_guarantee) = &self.deliveryGuarantee else { return Ok(DeliveryGuarantee::AtLeastOnce); }; @@ -49,7 +48,7 @@ impl DecodedTopicConfig { match delivery_guarantee.as_str() { "at-least-once" => Ok(DeliveryGuarantee::AtLeastOnce), "exactly-once" => Ok(DeliveryGuarantee::ExactlyOnce), - _ => anyhow::bail!("invalid delivery guarantee"), + _ => Err(delivery_guarantee.parse_err("invalid delivery guarantee")), } } } @@ -63,7 +62,7 @@ pub const TOPIC_PARSER: ResourceParser = ResourceParser { let module = pass.module.clone(); for r in iter_references::(&module, &names) { - let r = r?; + let r = report_and_continue!(r); let object = match &r.bind_name { None => None, Some(id) => pass @@ -75,7 +74,7 @@ pub const TOPIC_PARSER: ResourceParser = ResourceParser { .type_checker .resolve_type(pass.module.clone(), &r.message_type); - let delivery_guarantee = r.config.delivery_guarantee()?; + let delivery_guarantee = report_and_continue!(r.config.delivery_guarantee()); let resource = Resource::PubSubTopic(Lrc::new(Topic { name: r.resource_name.to_owned(), doc: r.doc_comment, @@ -92,7 +91,6 @@ pub const TOPIC_PARSER: ResourceParser = ResourceParser { ident: r.bind_name, }); } - Ok(()) }, }; @@ -110,7 +108,7 @@ impl ReferenceParser for PubSubTopicDefinition { fn parse_resource_reference( module: &Module, path: &swc_ecma_visit::AstNodePath, - ) -> Result> { + ) -> ParseResult> { let Some(res) = NamedClassResource::::parse_resource_reference(module, path)? else { @@ -118,8 +116,7 @@ impl ReferenceParser for PubSubTopicDefinition { }; let Some(message_type) = extract_type_param(res.expr.type_args.as_deref(), 0) else { - HANDLER.with(|h| h.span_err(res.expr.span, "missing message type parameter")); - return Ok(None); + return Err(res.expr.parse_err("missing message type parameter")); }; Ok(Some(Self { @@ -139,8 +136,8 @@ pub struct PublishUsage { pub topic: Lrc, } -pub fn resolve_topic_usage(data: &ResolveUsageData, topic: Lrc) -> Result> { - Ok(match &data.expr.kind { +pub fn resolve_topic_usage(data: &ResolveUsageData, topic: Lrc) -> Option { + match &data.expr.kind { UsageExprKind::MethodCall(method) => { if method.method.as_ref() == "publish" { Some(Usage::PublishTopic(PublishUsage { @@ -155,6 +152,9 @@ pub fn resolve_topic_usage(data: &ResolveUsageData, topic: Lrc) -> Result // TODO validate: used as a subscription arg most likely None } - _ => anyhow::bail!("invalid topic usage"), - }) + _ => { + data.expr.err("invalid topic usage"); + None + } + } } diff --git a/tsparser/src/parser/resources/infra/secret.rs b/tsparser/src/parser/resources/infra/secret.rs index 964a59ac7c..4697055965 100644 --- a/tsparser/src/parser/resources/infra/secret.rs +++ b/tsparser/src/parser/resources/infra/secret.rs @@ -7,8 +7,8 @@ use crate::parser::resources::parseutil::{ }; use crate::parser::resources::Resource; use crate::parser::Range; -use anyhow::Result; -use litparser::LitParser; +use crate::span_err::ErrReporter; +use litparser::{report_and_continue, LitParser, ParseResult}; use swc_common::errors::HANDLER; use swc_common::sync::Lrc; use swc_common::Span; @@ -30,7 +30,7 @@ pub const SECRET_PARSER: ResourceParser = ResourceParser { let names = TrackedNames::new(&[("encore.dev/config", "secret")]); for r in iter_references::(&module, &names) { - let r = r?; + let r = report_and_continue!(r); let resource = Resource::Secret(Lrc::new(Secret { range: r.range, name: r.secret_name, @@ -50,7 +50,6 @@ pub const SECRET_PARSER: ResourceParser = ResourceParser { ident: Some(r.bind_name), }); } - Ok(()) }, }; @@ -78,7 +77,7 @@ impl ReferenceParser for SecretLiteral { fn parse_resource_reference( module: &Module, path: &swc_ecma_visit::AstNodePath, - ) -> Result> { + ) -> ParseResult> { for node in path.iter().rev() { if let swc_ecma_visit::AstParentNodeRef::CallExpr( expr, @@ -97,16 +96,12 @@ impl ReferenceParser for SecretLiteral { let doc_comment = module.preceding_comments(expr.span.lo.into()); let Some(bind_name) = extract_bind_name(path)? else { - HANDLER.with(|handler| { - handler.span_err(expr.span, "secrets must be bound to a variable") - }); + expr.span.err("secrets must be bound to a variable"); continue; }; let Some(secret_name) = &expr.args.first() else { - HANDLER.with(|handler| { - handler.span_err(expr.span, "secret() takes a single argument, the name of the secret as a string literal") - }); + expr.span.err("secret() takes a single argument, the name of the secret as a string literal"); continue; }; let secret_name = String::parse_lit(secret_name.expr.as_ref())?; diff --git a/tsparser/src/parser/resources/infra/sqldb.rs b/tsparser/src/parser/resources/infra/sqldb.rs index 038be27fb5..9ad9cc8f3b 100644 --- a/tsparser/src/parser/resources/infra/sqldb.rs +++ b/tsparser/src/parser/resources/infra/sqldb.rs @@ -1,18 +1,16 @@ use std::path::{Path, PathBuf}; use std::str::FromStr; -use anyhow::Context; -use anyhow::{anyhow, Result}; use itertools::Either; use litparser_derive::LitParser; use once_cell::sync::Lazy; use regex::Regex; -use swc_common::errors::HANDLER; use swc_common::sync::Lrc; +use swc_common::{Span, Spanned}; use swc_ecma_ast as ast; -use litparser::LitParser; -use litparser::LocalRelPath; +use litparser::{report_and_continue, LitParser, Sp, ToParseErr}; +use litparser::{LocalRelPath, ParseResult}; use crate::parser::resourceparser::bind::ResourceOrPath; use crate::parser::resourceparser::bind::{BindData, BindKind}; @@ -24,13 +22,14 @@ use crate::parser::resources::Resource; use crate::parser::resources::ResourcePath; use crate::parser::usageparser::{ResolveUsageData, Usage, UsageExprKind}; use crate::parser::{FilePath, Range}; -use crate::span_err::ErrorWithSpanExt; +use crate::span_err::{ErrReporter, ErrorWithSpanExt}; #[derive(Debug, Clone)] pub struct SQLDatabase { + pub span: Span, pub name: String, pub doc: Option, - pub migrations: Option, + pub migrations: Option>, } #[derive(Clone, Debug)] @@ -39,15 +38,21 @@ pub enum MigrationFileSource { Drizzle, } +#[derive(Debug, thiserror::Error)] +pub enum MigrationFileSourceParseError { + #[error("unexpected value for migration file source: {0}")] + UnexpectedValue(String), +} + impl FromStr for MigrationFileSource { - type Err = anyhow::Error; + type Err = MigrationFileSourceParseError; fn from_str(input: &str) -> Result { match input { "prisma" => Ok(MigrationFileSource::Prisma), "drizzle" => Ok(MigrationFileSource::Drizzle), - _ => Err(anyhow!( - "unexpected value for migration file source: {input}" + _ => Err(MigrationFileSourceParseError::UnexpectedValue( + input.to_string(), )), } } @@ -89,25 +94,34 @@ pub const SQLDB_PARSER: ResourceParser = ResourceParser { { type Res = NamedClassResourceOptionalConfig; for r in iter_references::(&module, &names) { - let r = r?; + let r = report_and_continue!(r); let cfg = r.config.unwrap_or_default(); let migrations = match (cfg.migrations, &pass.module.file_path) { (None, _) => None, (_, FilePath::Custom(_)) => { - anyhow::bail!("cannot use custom file path for db migrations") + pass.module + .ast + .span() + .shrink_to_lo() + .err("cannot use custom file path for db migrations"); + continue; } (Some(Either::Left(rel)), FilePath::Real(path)) => { - let dir = path.parent().unwrap().join(rel.0); - let migrations = parse_migrations(&dir, None)?; - Some(DBMigrations { - dir, - migrations, - non_seq_migrations: false, - }) + let dir = path.parent().unwrap().join(rel.buf); + let migrations = + report_and_continue!(parse_migrations(rel.span, &dir, None)); + Some(Sp::new( + rel.span, + DBMigrations { + dir, + migrations, + non_seq_migrations: false, + }, + )) } (Some(Either::Right(cfg)), FilePath::Real(path)) => { - let dir = path.parent().unwrap().join(cfg.path.0); + let dir = path.parent().unwrap().join(cfg.path.buf); let source = if let Some(ref string) = cfg.source { match MigrationFileSource::from_str(string) { Ok(source) => Some(source), @@ -120,14 +134,21 @@ pub const SQLDB_PARSER: ResourceParser = ResourceParser { None }; - let migrations = parse_migrations(&dir, source.as_ref())?; + let migrations = report_and_continue!(parse_migrations( + cfg.path.span, + &dir, + source.as_ref() + )); let non_seq_migrations = matches!(source, Some(MigrationFileSource::Prisma)); - Some(DBMigrations { - dir, - migrations, - non_seq_migrations, - }) + Some(Sp::new( + cfg.path.span, + DBMigrations { + dir, + migrations, + non_seq_migrations, + }, + )) } }; @@ -139,6 +160,7 @@ pub const SQLDB_PARSER: ResourceParser = ResourceParser { }; let resource = Resource::SQLDatabase(Lrc::new(SQLDatabase { + span: r.range.to_span(), name: r.resource_name, doc: r.doc_comment, migrations, @@ -156,7 +178,7 @@ pub const SQLDB_PARSER: ResourceParser = ResourceParser { { for r in iter_references::(&module, &names) { - let r = r?; + let r = report_and_continue!(r); let object = match &r.bind_name { None => None, Some(id) => pass @@ -175,43 +197,41 @@ pub const SQLDB_PARSER: ResourceParser = ResourceParser { }); } } - - Ok(()) }, }; fn visit_dirs( + span: Span, dir: &Path, depth: i8, max_depth: i8, - cb: &mut dyn FnMut(&std::fs::DirEntry) -> Result<()>, -) -> Result<()> { - if dir.is_dir() { - for entry in std::fs::read_dir(dir)? { - let entry = entry?; - let path = entry.path(); - if path.is_dir() && depth < max_depth { - visit_dirs(&path, depth + 1, max_depth, cb)?; - } else { - cb(&entry)?; - } + cb: &mut dyn FnMut(&std::fs::DirEntry) -> ParseResult<()>, +) -> ParseResult<()> { + let entries = std::fs::read_dir(dir).map_err(|err| span.parse_err(err.to_string()))?; + for entry in entries { + let entry = entry.map_err(|err| span.parse_err(err.to_string()))?; + let path = entry.path(); + if path.is_dir() && depth < max_depth { + visit_dirs(span, &path, depth + 1, max_depth, cb)?; + } else { + cb(&entry)?; } } Ok(()) } -fn parse_default(dir: &Path) -> Result> { +fn parse_default(span: Span, dir: &Path) -> ParseResult> { let mut migrations = vec![]; static FILENAME_RE: Lazy = Lazy::new(|| Regex::new(r"^(\d+)_([^.]+)\.(up|down).sql$").unwrap()); - visit_dirs(dir, 0, 0, &mut |entry| -> Result<()> { + visit_dirs(span, dir, 0, 0, &mut |entry| -> ParseResult<()> { let path = entry.path(); let name = entry.file_name(); - let name = name.to_str().ok_or(anyhow!( + let name = name.to_str().ok_or(span.parse_err(format!( "invalid migration filename: {}", name.to_string_lossy() - ))?; + )))?; // If the file is not an SQL file ignore it, to allow for other files to be present // in the migration directory. For SQL files we want to ensure they're properly named @@ -225,12 +245,14 @@ fn parse_default(dir: &Path) -> Result> { // Ensure the file name matches the regex. let captures = FILENAME_RE .captures(name) - .ok_or(anyhow!("invalid migration filename: {}", name))?; + .ok_or(span.parse_err(format!("invalid migration filename: {}", name)))?; if captures[3].eq("up") { migrations.push(DBMigration { file_name: name.to_string(), description: captures[2].to_string(), - number: captures[1].parse()?, + number: captures[1] + .parse::() + .map_err(|err| span.parse_err(err.to_string()))?, }); } Ok(()) @@ -240,18 +262,18 @@ fn parse_default(dir: &Path) -> Result> { Ok(migrations) } -fn parse_drizzle(dir: &Path) -> Result> { +fn parse_drizzle(span: Span, dir: &Path) -> ParseResult> { let mut migrations = vec![]; static FILENAME_RE: Lazy = Lazy::new(|| Regex::new(r"^(\d+)_([^.]+)\.sql$").unwrap()); - visit_dirs(dir, 0, 0, &mut |entry| -> Result<()> { + visit_dirs(span, dir, 0, 0, &mut |entry| -> ParseResult<()> { let path = entry.path(); let name = entry.file_name(); - let name = name.to_str().ok_or(anyhow!( + let name = name.to_str().ok_or(span.parse_err(format!( "invalid migration filename: {}", name.to_string_lossy() - ))?; + )))?; // If the file is not an SQL file ignore it, to allow for other files to be present // in the migration directory. For SQL files we want to ensure they're properly named @@ -265,11 +287,13 @@ fn parse_drizzle(dir: &Path) -> Result> { // Ensure the file name matches the regex. let captures = FILENAME_RE .captures(name) - .ok_or(anyhow!("invalid migration filename: {}", name))?; + .ok_or(span.parse_err(format!("invalid migration filename: {}", name)))?; migrations.push(DBMigration { file_name: name.to_string(), description: captures[2].to_string(), - number: captures[1].parse()?, + number: captures[1] + .parse::() + .map_err(|err| span.parse_err(err.to_string()))?, }); Ok(()) @@ -277,66 +301,76 @@ fn parse_drizzle(dir: &Path) -> Result> { Ok(migrations) } -fn parse_prisma(dir: &Path) -> Result> { +fn parse_prisma(span: Span, dir: &Path) -> ParseResult> { let mut migrations = vec![]; static FILENAME_RE: Lazy = Lazy::new(|| Regex::new(r"^(\d+)_(.*)$").unwrap()); - visit_dirs(dir, 0, 1, &mut |entry| -> Result<()> { + visit_dirs(span, dir, 0, 1, &mut |entry| -> ParseResult<()> { let path = entry.path(); let name = entry.file_name(); - let name = name.to_str().ok_or(anyhow!( + let name = name.to_str().ok_or(span.parse_err(format!( "invalid migration filename: {}", name.to_string_lossy() - ))?; + )))?; if name != "migration.sql" { return Ok(()); } let dir_name = path .parent() - .context(anyhow!("migration directory has no parent"))? + .ok_or(span.parse_err("migration directory has no parent"))? .file_name() - .context(anyhow!("migration directory has no file name"))? + .ok_or(span.parse_err("migration directory has no name"))? .to_str() - .context(anyhow!("migration directory has invalid file name"))?; + .ok_or(span.parse_err("migration directory has invalid name"))?; // Ensure the file name matches the regex. let captures = FILENAME_RE .captures(dir_name) - .ok_or(anyhow!("invalid migration directory name: {}", dir_name))?; + .ok_or(span.parse_err(format!("invalid migration directory name: {}", dir_name)))?; migrations.push(DBMigration { file_name: path .strip_prefix(dir) - .context(anyhow!( - "migration directory is not a subdirectory of {}", - dir.display() - ))? + .map_err(|_| { + span.parse_err(format!( + "migration directory is not a subdirectory of {}", + dir.display() + )) + })? .to_string_lossy() .to_string(), description: captures[2].to_string(), - number: captures[1].parse()?, + number: captures[1] + .parse::() + .map_err(|err| span.parse_err(err.to_string()))?, }); Ok(()) })?; Ok(migrations) } -fn parse_migrations(dir: &Path, source: Option<&MigrationFileSource>) -> Result> { - let mut migrations = match source { - Some(MigrationFileSource::Drizzle) => parse_drizzle(dir), - Some(MigrationFileSource::Prisma) => parse_prisma(dir), - _ => parse_default(dir), +fn parse_migrations( + span: Span, + dir: &Path, + source: Option<&MigrationFileSource>, +) -> ParseResult> { + if !dir.exists() { + return Err(span.parse_err("migrations directory does not exist")); + } else if !dir.is_dir() { + return Err(span.parse_err("migrations path is not a directory")); } - .context("failed to parse migrations")?; + + let mut migrations = match source { + Some(MigrationFileSource::Drizzle) => parse_drizzle(span, dir), + Some(MigrationFileSource::Prisma) => parse_prisma(span, dir), + _ => parse_default(span, dir), + }?; migrations.sort_by_key(|m| m.number); Ok(migrations) } -pub fn resolve_database_usage( - data: &ResolveUsageData, - db: Lrc, -) -> Result> { - Ok(match &data.expr.kind { +pub fn resolve_database_usage(data: &ResolveUsageData, db: Lrc) -> Option { + match &data.expr.kind { UsageExprKind::MethodCall(_) | UsageExprKind::FieldAccess(_) | UsageExprKind::CallArg(_) @@ -346,15 +380,10 @@ pub fn resolve_database_usage( })), _ => { - HANDLER.with(|h| { - h.span_err( - data.expr.range.to_span(), - "invalid use of database resource", - ) - }); + data.expr.err("invalid use of database resource"); None } - }) + } } #[derive(Debug)] diff --git a/tsparser/src/parser/resources/parseutil.rs b/tsparser/src/parser/resources/parseutil.rs index ceb9e25ec2..c56e5ee9cd 100644 --- a/tsparser/src/parser/resources/parseutil.rs +++ b/tsparser/src/parser/resources/parseutil.rs @@ -1,9 +1,7 @@ use std::collections::{HashMap, HashSet}; -use anyhow::Result; -use litparser::LitParser; -use swc_common::errors::HANDLER; -use swc_common::Spanned; +use litparser::{LitParser, ParseResult, ToParseErr}; +use swc_common::{Span, Spanned}; use swc_ecma_ast::{self as ast, TsTypeParamInstantiation}; use swc_ecma_visit::VisitWithPath; @@ -17,7 +15,7 @@ where fn parse_resource_reference( module: &Module, path: &swc_ecma_visit::AstNodePath, - ) -> Result>; + ) -> ParseResult>; } pub struct NamedClassResource { @@ -30,35 +28,41 @@ pub struct NamedClassResource Spanned + for NamedClassResource +{ + fn span(&self) -> Span { + self.range.to_span() + } +} + impl ReferenceParser for NamedClassResource { fn parse_resource_reference( module: &Module, path: &swc_ecma_visit::AstNodePath, - ) -> Result> { - let res= NamedClassResourceOptionalConfig::::parse_resource_reference(module, path)?; - match res { - None => Ok(None), - Some(res) => { - let Some(config) = res.config else { - HANDLER.with(|handler| { - handler.span_err(res.range.to_span(), "missing required config object"); - }); - return Ok(None); - }; + ) -> ParseResult> { + let res = match NamedClassResourceOptionalConfig::::parse_resource_reference(module, path)? { + None => return Ok(None), + Some(res) => res, + }; + let Some(config) = res.config else { + return Err(res + .range + .to_span() + .parse_err("missing required config object")); + }; - Ok(Some(Self { - range: res.range, - constructor_args: res.constructor_args, - doc_comment: res.doc_comment, - resource_name: res.resource_name, - bind_name: res.bind_name, - config, - expr: res.expr, - })) - } - } + Ok(Some(Self { + range: res.range, + constructor_args: res.constructor_args, + doc_comment: res.doc_comment, + resource_name: res.resource_name, + bind_name: res.bind_name, + config, + expr: res.expr, + })) } } @@ -76,13 +80,21 @@ pub struct NamedClassResourceOptionalConfig< pub expr: ast::NewExpr, } +impl Spanned + for NamedClassResourceOptionalConfig +{ + fn span(&self) -> Span { + self.range.to_span() + } +} + impl ReferenceParser for NamedClassResourceOptionalConfig { fn parse_resource_reference( module: &Module, path: &swc_ecma_visit::AstNodePath, - ) -> Result> { + ) -> ParseResult> { for node in path.iter().rev() { if let swc_ecma_visit::AstParentNodeRef::NewExpr( expr, @@ -90,14 +102,11 @@ impl Referenc ) = node { let Some(args) = &expr.args else { - HANDLER.with(|h| h.span_err(expr.span, "missing constructor arguments")); - continue; + return Err(expr.span.parse_err("missing constructor arguments")); }; let bind_name = extract_bind_name(path)?; - let Some(resource_name) = extract_resource_name(expr.span, args, NAME_IDX) else { - continue; - }; + let resource_name = extract_resource_name(expr.span, args, NAME_IDX)?; let doc_comment = module.preceding_comments(expr.span.lo.into()); let config = args @@ -129,13 +138,19 @@ pub struct UnnamedClassResource { pub config: Config, } +impl Spanned for UnnamedClassResource { + fn span(&self) -> Span { + self.range.to_span() + } +} + impl ReferenceParser for UnnamedClassResource { fn parse_resource_reference( module: &Module, path: &swc_ecma_visit::AstNodePath, - ) -> Result> { + ) -> ParseResult> { for node in path.iter().rev() { if let swc_ecma_visit::AstParentNodeRef::NewExpr( expr, @@ -143,7 +158,7 @@ impl ReferenceParser ) = node { let Some(args) = &expr.args else { - anyhow::bail!("missing constructor arguments") + return Err(expr.span.parse_err("missing constructor arguments")); }; let bind_name = extract_bind_name(path)?; @@ -177,7 +192,7 @@ impl ReferenceParser for NamedStaticMethod { fn parse_resource_reference( module: &Module, path: &swc_ecma_visit::AstNodePath, - ) -> Result> { + ) -> ParseResult> { for (idx, node) in path.iter().rev().enumerate() { if let swc_ecma_visit::AstParentNodeRef::MemberExpr( expr, @@ -209,10 +224,7 @@ impl ReferenceParser for NamedStaticMethod { }; let bind_name = extract_bind_name(path)?; - let Some(resource_name) = extract_resource_name(call.span, &call.args, NAME_IDX) - else { - continue; - }; + let resource_name = extract_resource_name(call.span, &call.args, NAME_IDX)?; let doc_comment = module.preceding_comments(call.span.lo.into()); return Ok(Some(Self { @@ -229,42 +241,42 @@ impl ReferenceParser for NamedStaticMethod { } /// Extracts the name of a resource. -/// Returns None if the parse failed. pub fn extract_resource_name( span: swc_common::Span, args: &[ast::ExprOrSpread], idx: usize, -) -> Option<&str> { +) -> ParseResult<&str> { let Some(val) = args.get(idx) else { - HANDLER.with(|h| h.span_err(span, &format!("missing resource name as argument[{}]", idx))); - return None; + return Err(span.parse_err(format!("missing resource name as argument[{}]", idx))); }; if val.spread.is_none() { if let ast::Expr::Lit(ast::Lit::Str(str)) = val.expr.as_ref() { - return Some(str.value.as_ref()); + return Ok(str.value.as_ref()); } } - HANDLER.with(|h| h.span_err(val.span(), "expected string literal")); - None + Err(span.parse_err("expected string literal")) } -pub fn extract_bind_name(path: &swc_ecma_visit::AstNodePath) -> Result> { +pub fn extract_bind_name(path: &swc_ecma_visit::AstNodePath) -> ParseResult> { for node in path.iter().rev() { if let swc_ecma_visit::AstParentNodeRef::VarDecl( var, swc_ecma_visit::fields::VarDeclField::Decls(idx), ) = node { - let decl = var - .decls - .get(*idx) - .ok_or(anyhow::anyhow!("missing declaration at index {}", idx))?; + let Some(decl) = var.decls.get(*idx) else { + return Err(var + .span + .parse_err(format!("missing declaration at index {}", idx))); + }; match &decl.name { ast::Pat::Ident(bind_name) => { return Ok(Some(bind_name.id.clone())); } - _ => anyhow::bail!("expected identifier as bind name"), + _ => { + return Err(decl.name.parse_err("expected identifier as bind name")); + } } } } @@ -343,7 +355,7 @@ fn collect_import_idents<'a>( pub fn iter_references( module: &Module, names: &TrackedNames, -) -> impl Iterator> { +) -> impl Iterator> { let (local_ids, _module_ids) = collect_import_idents(module, names); let mut visitor = >::new(module, local_ids); module @@ -355,7 +367,7 @@ pub fn iter_references( struct IterReferenceVisitor<'a, R> { module: &'a Module, local_ids: HashSet, - results: Vec>, + results: Vec>, } impl<'a, R> IterReferenceVisitor<'a, R> { diff --git a/tsparser/src/parser/respath.rs b/tsparser/src/parser/respath.rs index e8346ef274..99cd9a278d 100644 --- a/tsparser/src/parser/respath.rs +++ b/tsparser/src/parser/respath.rs @@ -1,15 +1,21 @@ -use std::fmt::Write; +use std::{fmt::Write, ops::Deref}; + +use litparser::Sp; +use swc_common::{BytePos, Span}; + +use crate::span_err::{ErrorWithSpanExt, SpErr}; #[derive(Debug, Clone)] pub struct Path { - pub segments: Vec, + pub span: Span, + pub segments: Vec>, } impl Path { - pub fn dynamic_segments(&self) -> impl Iterator { + pub fn dynamic_segments(&self) -> impl Iterator> { self.segments .iter() - .filter(|s| !matches!(s, Segment::Literal(_))) + .filter(|s| !matches!(s.get(), Segment::Literal(_))) } pub fn has_dynamic_segments(&self) -> bool { @@ -89,47 +95,70 @@ impl Default for ParseOptions { } } +#[derive(Debug, thiserror::Error, Clone, PartialEq, Eq)] +pub enum PathParseError { + #[error("empty path")] + EmptyPath, + #[error("path must start with '/'")] + MustStartWithSlash, + #[error("path must not start with '/'")] + MustNotStartWithSlash, + #[error("path cannot contain empty path segment")] + EmptySegment, + #[error("path parameters must have a name")] + UnnamedParam, + #[error("wildcard segment must be at the end of the path")] + WildcardNotAtEnd, + #[error("fallback segment must be at the end of the path")] + FallbackNotAtEnd, + #[error("path cannot contain query parameters (the '?' character)")] + ContainsQuery, + #[error("path cannot contain url fragment (the '#' character)")] + ContainsFragment, + #[error("path cannot contain url scheme")] + ContainsScheme, + #[error("path cannot contain url authority")] + ContainsAuthority, + #[error("path cannot contain hostname")] + ContainsHostname, + #[error("path is invalid: {0}")] + Invalid(String), +} + impl Path { - pub fn parse(path: &str, opts: ParseOptions) -> anyhow::Result { + pub fn parse( + span: Span, + path: &str, + opts: ParseOptions, + ) -> Result> { if path.is_empty() { - anyhow::bail!("empty path"); + return Err(PathParseError::EmptyPath.with_span(span)); } else if !path.starts_with('/') && opts.prefix_slash { - anyhow::bail!("path must start with '/'"); + return Err(PathParseError::MustStartWithSlash.with_span(span)); } else if path.starts_with('/') && !opts.prefix_slash { - anyhow::bail!("path must not start with '/'"); + return Err(PathParseError::MustNotStartWithSlash.with_span(span)); } // Ensure this is a valid url path. - parse_url_path(path)?; + parse_url_path(path).map_err(|err| err.with_span(span))?; let mut segments = vec![]; - let mut path = path; - while !path.is_empty() { + let path_end = path.len(); + let mut idx = 0; + while idx < path_end { if opts.prefix_slash || !segments.is_empty() { - path = &path[1..]; // drop leading slash + idx += 1; // drop leading slash } - // Find the next path segment. - let val = match path.find('/') { - Some(0) => { - // Empty segment. - anyhow::bail!("invalid path: cannot contain empty path segment"); - } - Some(idx) => { - // Non-empty segment. - let val = &path[..idx]; - path = &path[idx..]; - val - } - None => { - // Last segment. - let val = path; - path = ""; - val - } + let seg_start = idx; + let seg_end = { + let remainder = &path[idx..]; + idx + remainder.find('/').unwrap_or(remainder.len()) }; + // Find the next path segment. + let val = &path[seg_start..seg_end]; let seg: Segment = match val.chars().next() { Some(':') => Segment::Param { name: val[1..].to_string(), @@ -144,72 +173,81 @@ impl Path { _ => Segment::Literal(val.to_string()), }; - segments.push(seg); + let span = span + .with_lo(span.lo + BytePos(seg_start as u32)) + .with_hi(span.hi + BytePos(seg_end as u32)); + + segments.push(Sp::new(span, seg)); + idx = seg_end; } // Validate the segments. for (idx, seg) in segments.iter().enumerate() { - match seg { + match seg.deref() { Segment::Literal(lit) if lit.is_empty() && segments.len() > 1 => { - anyhow::bail!("invalid path: literal cannot be empty"); + return Err(PathParseError::EmptySegment.with_span(seg.span())); } Segment::Param { name, .. } if name.is_empty() => { - anyhow::bail!("path parameters must have a name"); + return Err(PathParseError::UnnamedParam.with_span(seg.span())); } Segment::Wildcard { name } if name.is_empty() => { - anyhow::bail!("path parameters must have a name"); + return Err(PathParseError::UnnamedParam.with_span(seg.span())); } Segment::Wildcard { .. } if idx != segments.len() - 1 => { - anyhow::bail!("path wildcards must be the last segment in the path"); + return Err(PathParseError::WildcardNotAtEnd.with_span(seg.span())); } Segment::Fallback { .. } if idx != segments.len() - 1 => { - anyhow::bail!("path fallbacks must be the last segment in the path"); + return Err(PathParseError::FallbackNotAtEnd.with_span(seg.span())); } _ => {} } } - Ok(Path { segments }) + Ok(Path { span, segments }) } } -fn parse_url_path(path: &str) -> anyhow::Result<()> { +fn parse_url_path(path: &str) -> Result<(), PathParseError> { // The url crate only supports parsing absolute urls, so use a dummy base // and ensure it is the same after parsing. - let base = url::Url::parse("base://url.here")?; + let base = url::Url::parse("base://url.here").expect("internal error: invalid base url"); + let url = url::Url::options() .base_url(Some(&base)) .parse(path) - .map_err(|err| anyhow::anyhow!("invalid path: {}", err))?; + .map_err(|err| PathParseError::Invalid(err.to_string()))?; if url.scheme() != base.scheme() { - anyhow::bail!("invalid path: cannot contain scheme") + return Err(PathParseError::ContainsScheme); } else if url.authority() != base.authority() { - anyhow::bail!("invalid path: cannot contain authority") + return Err(PathParseError::ContainsAuthority); } + match url.host_str() { None => { // We should always have a host since the base url has one. - anyhow::bail!("invalid path: cannot contain host") + return Err(PathParseError::ContainsHostname); } Some(host) => { if host != base.host_str().unwrap() { - anyhow::bail!("invalid path: cannot contain host") + return Err(PathParseError::ContainsHostname); } } } if url.query().is_some() { - anyhow::bail!("path must not contain query parameters (the '?' character)") + Err(PathParseError::ContainsQuery) } else if url.fragment().is_some() { - anyhow::bail!("path must not contain url fragments (the '#' character)") + Err(PathParseError::ContainsFragment) + } else { + Ok(()) } - - Ok(()) } #[cfg(test)] mod tests { + use swc_common::DUMMY_SP; + use super::*; #[test] @@ -236,26 +274,14 @@ mod tests { }, ]), ), - ( - "/:foo/*", - Err("path parameters must have a name".to_string()), - ), - ( - "/:foo/*/bar", - Err("path parameters must have a name".to_string()), - ), - ( - "/:foo/*bar/baz", - Err("path wildcards must be the last segment in the path".to_string()), - ), - ( - "/foo?bar=baz", - Err("path must not contain query parameters (the '?' character)".to_string()), - ), - ( - "/foo#bar", - Err("path must not contain url fragments (the '#' character)".to_string()), - ), + ("", Err(PathParseError::EmptyPath)), + ("/foo//bar", Err(PathParseError::EmptySegment)), + ("/foo/", Err(PathParseError::EmptySegment)), + ("/:foo/*", Err(PathParseError::UnnamedParam)), + ("/:foo/*/bar", Err(PathParseError::UnnamedParam)), + ("/:foo/*bar/baz", Err(PathParseError::WildcardNotAtEnd)), + ("/foo?bar=baz", Err(PathParseError::ContainsQuery)), + ("/foo#bar", Err(PathParseError::ContainsFragment)), ( "/foo/!fallback", Ok(vec![ @@ -268,13 +294,14 @@ mod tests { ]; for (path, want) in tests { - let got = Path::parse(path, Default::default()); + let got = Path::parse(DUMMY_SP, path, Default::default()); match (got, want) { (Ok(got), Ok(want)) => { - assert_eq!(got.segments, want, "path {:?}", path); + let segments: Vec<_> = got.segments.into_iter().map(|s| s.take()).collect(); + assert_eq!(segments, want, "path {:?}", path); } (Err(got), Err(want)) => { - assert_eq!(got.to_string(), want, "path {:?}", path); + assert_eq!(got.error, want, "path {:?}", path); } (Ok(got), Err(want)) => { panic!("got {:?}, want err {:?}, path {:?}", got, want, path); diff --git a/tsparser/src/parser/service_discovery.rs b/tsparser/src/parser/service_discovery.rs index ec1e11465b..59110294fb 100644 --- a/tsparser/src/parser/service_discovery.rs +++ b/tsparser/src/parser/service_discovery.rs @@ -1,7 +1,6 @@ use std::collections::{HashMap, HashSet}; use std::path::PathBuf; -use anyhow::Result; use swc_common::errors::HANDLER; use swc_common::sync::Lrc; @@ -13,7 +12,7 @@ use crate::parser::{FilePath, FileSet}; pub fn discover_services<'a>( file_set: &'a FileSet, binds: &'a Vec>, -) -> Result> { +) -> Vec { let sd = ServiceDiscoverer { file_set, binds, @@ -40,7 +39,7 @@ struct ServiceDiscoverer<'a> { } impl ServiceDiscoverer<'_> { - fn discover(mut self) -> Result> { + fn discover(mut self) -> Vec { for b in self.binds { match &b.resource { Resource::Service(svc) => { @@ -84,7 +83,7 @@ impl ServiceDiscoverer<'_> { // Sort the services by name for deterministic output. svcs.sort_by(|a, b| a.name.cmp(&b.name)); - Ok(svcs) + svcs } fn possible_service_root(&mut self, bind: &Bind, strong: bool, service_name: Option) { @@ -180,7 +179,7 @@ mod tests { use super::*; - fn parse(tmp_dir: &Path, src: &str) -> Result> { + fn parse(tmp_dir: &Path, src: &str) -> anyhow::Result> { let globals = Globals::new(); let cm: Rc = Default::default(); let errs = Rc::new(Handler::with_tty_emitter( @@ -212,8 +211,8 @@ mod tests { Default::default(), ); let parser = Parser::new(&pc, pass1); - let result = parser.parse()?; - discover_services(&pc.file_set, &result.binds) + let result = parser.parse(); + Ok(discover_services(&pc.file_set, &result.binds)) }) }) } diff --git a/tsparser/src/parser/types/mod.rs b/tsparser/src/parser/types/mod.rs index 52de7e8c8b..d67805195c 100644 --- a/tsparser/src/parser/types/mod.rs +++ b/tsparser/src/parser/types/mod.rs @@ -9,6 +9,7 @@ mod utils; mod resolved; #[cfg(test)] mod tests; +pub mod validation; pub use object::{Object, ObjectId, ObjectKind, ResolveState}; pub use typ::{ diff --git a/tsparser/src/parser/types/resolved.rs b/tsparser/src/parser/types/resolved.rs index 599cd7c304..e2dc00e7d2 100644 --- a/tsparser/src/parser/types/resolved.rs +++ b/tsparser/src/parser/types/resolved.rs @@ -55,6 +55,14 @@ impl Resolved<'_, B> { _ => self, } } + /// + /// Converts `Same` to `Changed`. + pub fn into_new(self) -> Resolved<'static, B> { + match self { + Same(borrowed) | Changed(borrowed) => New(borrowed.to_owned()), + New(typ) => New(typ), + } + } /// Extracts the owned data. /// diff --git a/tsparser/src/parser/types/snapshots/encore_tsparser__parser__types__tests__resolve_types@generics.ts.snap b/tsparser/src/parser/types/snapshots/encore_tsparser__parser__types__tests__resolve_types@generics.ts.snap index b61b97c90a..1b322a73db 100644 --- a/tsparser/src/parser/types/snapshots/encore_tsparser__parser__types__tests__resolve_types@generics.ts.snap +++ b/tsparser/src/parser/types/snapshots/encore_tsparser__parser__types__tests__resolve_types@generics.ts.snap @@ -192,6 +192,59 @@ input_file: tsparser/src/parser/types/testdata/generics.ts }, ), }, + InterfaceField { + name: String( + "five", + ), + optional: false, + typ: Named( + Named { + obj: Object { + name: Some( + "GenericIface", + ), + }, + type_arguments: [ + Named( + Named { + obj: Object { + name: Some( + "Generic1", + ), + }, + type_arguments: [ + Basic( + Boolean, + ), + ], + }, + ), + ], + }, + ), + }, + ], + index: None, + call: None, + }, + ), + "GenericIface": Interface( + Interface { + fields: [ + InterfaceField { + name: String( + "foo", + ), + optional: false, + typ: Generic( + TypeParam( + TypeParam { + idx: 0, + constraint: None, + }, + ), + ), + }, ], index: None, call: None, diff --git a/tsparser/src/parser/types/snapshots/encore_tsparser__parser__types__tests__resolve_types@infer.txt.snap b/tsparser/src/parser/types/snapshots/encore_tsparser__parser__types__tests__resolve_types@infer.txt.snap new file mode 100644 index 0000000000..d80036cebc --- /dev/null +++ b/tsparser/src/parser/types/snapshots/encore_tsparser__parser__types__tests__resolve_types@infer.txt.snap @@ -0,0 +1,57 @@ +--- +source: tsparser/src/parser/types/tests.rs +expression: result +input_file: tsparser/src/parser/types/testdata/infer.txt +--- +{ + "Iface": Generic( + Conditional( + Conditional { + check_type: Generic( + TypeParam( + TypeParam { + idx: 0, + constraint: None, + }, + ), + ), + extends_type: Interface( + Interface { + fields: [ + InterfaceField { + name: String( + "X", + ), + optional: false, + typ: Generic( + Inferred( + 0, + ), + ), + }, + ], + index: None, + call: None, + }, + ), + true_type: Generic( + Inferred( + 0, + ), + ), + false_type: Basic( + Never, + ), + }, + ), + ), + "Infer1": Basic( + String, + ), + "Infer2": Basic( + Never, + ), + "Infer3": Basic( + String, + ), +} diff --git a/tsparser/src/parser/types/snapshots/encore_tsparser__parser__types__tests__resolve_types@validation.ts.snap.new b/tsparser/src/parser/types/snapshots/encore_tsparser__parser__types__tests__resolve_types@validation.ts.snap.new new file mode 100644 index 0000000000..16770106ca --- /dev/null +++ b/tsparser/src/parser/types/snapshots/encore_tsparser__parser__types__tests__resolve_types@validation.ts.snap.new @@ -0,0 +1,83 @@ +--- +source: tsparser/src/parser/types/tests.rs +assertion_line: 103 +expression: result +input_file: tsparser/src/parser/types/testdata/validation.ts +--- +{ + "Validate1": Validated( + ( + Basic( + Number, + ), + Rule( + MinVal( + 3, + ), + ), + ), + ), + "Validate2": Validated( + ( + Basic( + Number, + ), + And( + [ + Rule( + MinVal( + 3, + ), + ), + Rule( + MinVal( + 5, + ), + ), + ], + ), + ), + ), + "Validate3": Validated( + ( + Basic( + Number, + ), + And( + [ + Rule( + MinVal( + 3, + ), + ), + Rule( + MinVal( + 5, + ), + ), + ], + ), + ), + ), + "Validate4": Validated( + ( + Basic( + String, + ), + And( + [ + Rule( + MinLen( + 3, + ), + ), + Rule( + MaxLen( + 10, + ), + ), + ], + ), + ), + ), +} diff --git a/tsparser/src/parser/types/testdata/generics.ts b/tsparser/src/parser/types/testdata/generics.ts index f1bcfd540f..cc7dd09481 100644 --- a/tsparser/src/parser/types/testdata/generics.ts +++ b/tsparser/src/parser/types/testdata/generics.ts @@ -1,15 +1,20 @@ export type Generic1 = { - cond: T extends string ? "literal" : number; + cond: T extends string ? "literal" : number; } export type Generic2 = { - value: T; - cond: T extends string ? "literal" : number; + value: T; + cond: T extends string ? "literal" : number; } export type Concrete1 = { - one: Generic1; - two: Generic1<"test">; - three: Generic2; - four: Generic2>; + one: Generic1; + two: Generic1<"test">; + three: Generic2; + four: Generic2>; + five: GenericIface>; +} + +export interface GenericIface { + foo: T; } diff --git a/tsparser/src/parser/types/testdata/infer.txt b/tsparser/src/parser/types/testdata/infer.txt new file mode 100644 index 0000000000..d274981a7d --- /dev/null +++ b/tsparser/src/parser/types/testdata/infer.txt @@ -0,0 +1,5 @@ +export type Iface = T extends { X: infer A } ? A : never; + +export type Infer1 = Iface<{ X: string }>; // string +export type Infer2 = Iface<{ Y: string }>; // never +export type Infer3 = { X: string } extends { X: infer A } ? A : never; // string diff --git a/tsparser/src/parser/types/testdata/validation.ts b/tsparser/src/parser/types/testdata/validation.ts new file mode 100644 index 0000000000..98edcc10dc --- /dev/null +++ b/tsparser/src/parser/types/testdata/validation.ts @@ -0,0 +1,6 @@ +import { Min, Max, MinLen, MaxLen } from "encore.dev/validate"; + +export type Validate1 = number & Min<3>; +export type Validate2 = number & Min<3> & Max<5>; +export type Validate3 = number & Min<3> & Min<5>; +export type Validate4 = MinLen<3> & string & MaxLen<10>; diff --git a/tsparser/src/parser/types/typ.rs b/tsparser/src/parser/types/typ.rs index 0e9e2bd072..6ff8019db2 100644 --- a/tsparser/src/parser/types/typ.rs +++ b/tsparser/src/parser/types/typ.rs @@ -1,7 +1,8 @@ use crate::parser::types::type_resolve::Ctx; -use crate::parser::types::{object, Object, ResolveState}; +use crate::parser::types::{object, validation, Object, ResolveState}; use crate::parser::Range; use indexmap::IndexMap; +use itertools::Itertools; use serde::Serialize; use std::borrow::Cow; use std::collections::HashMap; @@ -42,6 +43,12 @@ pub enum Type { This, Generic(Generic), + + /// A standalone validation expression. + Validation(validation::Expr), + + /// A type with validation applied to it. + Validated((Box, validation::Expr)), } impl Type { @@ -71,6 +78,7 @@ impl Type { /// Returns a union type that merges `self` and `other`, if possible. /// If the types cannot be merged, it returns None. + #[tracing::instrument(ret, level = "trace")] pub(super) fn union_merge(&self, other: &Type) -> Option { match (self, other) { // 'any' and any type unify to 'any'. @@ -85,6 +93,15 @@ impl Type { Some(Type::Basic(*basic)) } + // Unify validation. + (Type::Validation(a), Type::Validation(b)) => { + Some(Type::Validation(a.clone().or(b.clone()))) + } + (Type::Validated((typ, a)), Type::Validation(b)) + | (Type::Validation(a), Type::Validated((typ, b))) => { + Some(Type::Validated((typ.to_owned(), a.clone().or(b.clone())))) + } + // TODO more rules? // Identical types unify. @@ -387,6 +404,10 @@ pub enum Generic { // An intersection type. Intersection(Intersection), + + /// A reference to an inferred type parameter, + /// referencing its index in infer_type_params. + Inferred(usize), } #[derive(Debug, Clone, Hash, Serialize)] @@ -472,6 +493,7 @@ impl Type { tt.iter_unions() .chain(std::iter::once(&Type::Basic(Basic::Undefined))), ), + Type::Validated((inner, _)) => inner.iter_unions(), _ => Box::new(std::iter::once(self)), } } @@ -483,6 +505,7 @@ impl Type { tt.into_iter_unions() .chain(std::iter::once(Type::Basic(Basic::Undefined))), ), + Type::Validated((inner, _)) => inner.into_iter_unions(), _ => Box::new(std::iter::once(self)), } } @@ -517,6 +540,10 @@ impl Type { | (Literal::BigInt(_), Basic::BigInt) )), + (Type::Validated((inner, _)), _) | (_, Type::Validated((inner, _))) => { + inner.assignable(state, other) + } + (this, Type::Optional(other)) => { if matches!(this, Type::Basic(Basic::Undefined)) { Some(true) @@ -659,6 +686,239 @@ impl Type { } } +pub enum Extends<'a> { + Yes(Vec<(usize, Cow<'a, Type>)>), + No, + Unknown, +} + +impl<'a> Extends<'a> { + pub fn into_static(self) -> Extends<'static> { + match self { + Extends::Yes(v) => Extends::Yes( + v.into_iter() + .map(|(idx, t)| (idx, Cow::Owned(t.into_owned()))) + .collect(), + ), + Extends::No => Extends::No, + Extends::Unknown => Extends::Unknown, + } + } +} + +impl Type { + /// Reports whether `self` is assignable to `other`. + /// If the result is indeterminate due to an unresolved type, it reports None. + pub fn extends<'a>(&'a self, state: &'_ ResolveState, other: &'_ Type) -> Extends<'a> { + use Extends::*; + + fn empty_yes_or_no(val: bool) -> Extends<'static> { + if val { + Yes(vec![]) + } else { + No + } + } + + match (self, other) { + (this, Type::Generic(Generic::Inferred(idx))) => Yes(vec![(*idx, Cow::Borrowed(this))]), + + (_, Type::Basic(Basic::Any)) => Yes(vec![]), + (_, Type::Basic(Basic::Never)) => No, + (Type::Generic(_), _) | (_, Type::Generic(_)) => Unknown, + + // Unwrap named types. + (Type::Named(a), b) => { + let a = a.underlying(state); + a.extends(state, b).into_static() + } + (a, Type::Named(b)) => { + let b = b.underlying(state); + a.extends(state, &b) + } + + (Type::Basic(a), Type::Basic(b)) => empty_yes_or_no(a == b), + (Type::Literal(a), Type::Basic(b)) => empty_yes_or_no(matches!( + (a, b), + (_, Basic::Any) + | (Literal::String(_), Basic::String) + | (Literal::Boolean(_), Basic::Boolean) + | (Literal::Number(_), Basic::Number) + | (Literal::BigInt(_), Basic::BigInt) + )), + + (Type::Validated((inner, _)), _) | (_, Type::Validated((inner, _))) => { + inner.extends(state, other).into_static() + } + + (this, Type::Optional(other)) => { + if matches!(this, Type::Basic(Basic::Undefined)) { + Yes(vec![]) + } else { + this.extends(state, other) + } + } + + (Type::Tuple(this), other) => match other { + Type::Tuple(other) => { + if this.len() != other.len() { + return No; + } + + let mut found_unknown = false; + let mut inferred = vec![]; + for (this, other) in this.iter().zip(other) { + match this.extends(state, other) { + Yes(inf) => { + inferred.extend(inf); + } + No => return No, + Unknown => found_unknown = true, + } + } + if found_unknown { + Unknown + } else { + Yes(inferred) + } + } + + Type::Array(other) => { + // Ensure every element in `this` is a subtype of `other`. + let mut inferred = vec![]; + for this in this { + match this.extends(state, other) { + Yes(infer) => inferred.extend(infer), + No => return No, + Unknown => return Unknown, + } + } + + // Since `this` is a tuple but `other` is a single array type, + // it's possible we'll have multiple inferred types for the same index. + // Group them by index and turn them into a union type if necessary. + inferred.sort_by_key(|(idx, _)| *idx); + + let inferred = inferred + .into_iter() + .chunk_by(|(idx, _)| *idx) + .into_iter() + .map(|(idx, types)| { + let types = types.map(|(_, t)| t.into_owned()).collect(); + let typ = simplify_union(types); + (idx, Cow::Owned(typ)) + }) + .collect(); + + Yes(inferred) + } + _ => No, + }, + + (Type::Enum(a), other) => { + let this_fields: HashMap<&str, &EnumValue> = + HashMap::from_iter(a.members.iter().map(|m| (m.name.as_str(), &m.value))); + match other { + Type::Enum(other) => { + // Does every field in `other` exist in `this_fields`? + for mem in &other.members { + if let Some(this_field) = this_fields.get(mem.name.as_str()) { + if **this_field == mem.value { + continue; + } + } + return No; + } + Yes(vec![]) + } + + Type::Interface(other) => { + // Does every field in `other` exist in `iface`? + let mut found_none = false; + for field in &other.fields { + if let FieldName::String(name) = &field.name { + if let Some(this_field) = this_fields.get(name.as_str()) { + let this_typ = (*this_field).clone().to_type(); + match this_typ.assignable(state, &field.typ) { + Some(true) => continue, + Some(false) => return No, + None => found_none = true, + } + } + } + } + if found_none { + Unknown + } else { + Yes(vec![]) + } + } + _ => No, + } + } + + (Type::Interface(iface), other) => { + #[allow(clippy::mutable_key_type)] + let this_fields: HashMap<&FieldName, &InterfaceField> = + HashMap::from_iter(iface.fields.iter().map(|f| (&f.name, f))); + match other { + Type::Interface(other) => { + // Does every field in `other` exist in `iface`? + let mut found_unknown = false; + let mut inferred = vec![]; + for field in &other.fields { + if let Some(this_field) = this_fields.get(&field.name) { + match this_field.typ.extends(state, &field.typ) { + Yes(inf) => inferred.extend(inf), + No => return No, + Unknown => found_unknown = true, + } + } else { + return No; + } + } + if found_unknown { + Unknown + } else { + Yes(inferred) + } + } + _ => No, + } + } + + (this, Type::Union(other)) => { + // Is every element in `this` assignable to `other`? + let mut found_yes = false; + let mut found_unknown = false; + let mut inferred = Vec::new(); + for t in this.iter_unions() { + for o in other { + match t.extends(state, o) { + Yes(inf) => { + found_yes = true; + inferred.extend(inf); + } + No => {} + Unknown => found_unknown = true, + } + } + } + + if found_yes { + Yes(inferred) + } else if found_unknown { + Unknown + } else { + No + } + } + + (a, b) => empty_yes_or_no(a.identical(b)), + } + } +} + pub fn simplify_union(types: Vec) -> Type { let mut results: Vec = Vec::with_capacity(types.len()); @@ -819,6 +1079,37 @@ pub fn intersect<'a: 'b, 'b>( (Type::This, Type::This) => Cow::Owned(Type::This), + // Combine validation expressions into a validated type. + (Type::Validated(_), Type::Validation(_)) => { + let (Type::Validated((typ, a)), Type::Validation(b)) = (a.into_owned(), b.into_owned()) + else { + unreachable!() + }; + Cow::Owned(Type::Validated((typ, a.and(b)))) + } + (Type::Validation(_), Type::Validated(_)) => { + let (Type::Validated((typ, a)), Type::Validation(b)) = (b.into_owned(), a.into_owned()) + else { + unreachable!() + }; + Cow::Owned(Type::Validated((typ, a.and(b)))) + } + + // Merge validation expressions together. + (Type::Validation(_), Type::Validation(_)) => Cow::Owned(Type::Validation({ + let (Type::Validation(a), Type::Validation(b)) = (a.into_owned(), b.into_owned()) + else { + unreachable!() + }; + a.and(b) + })), + (_, Type::Validation(expr)) => { + Cow::Owned(Type::Validated((Box::new(a.into_owned()), expr.clone()))) + } + (Type::Validation(expr), _) => { + Cow::Owned(Type::Validated((Box::new(b.into_owned()), expr.clone()))) + } + (Type::Generic(_), _) | (_, Type::Generic(_)) => { Cow::Owned(Type::Generic(Generic::Intersection(Intersection { x: Box::new(a.into_owned()), diff --git a/tsparser/src/parser/types/type_resolve.rs b/tsparser/src/parser/types/type_resolve.rs index 93bd58abdd..01c3cec513 100644 --- a/tsparser/src/parser/types/type_resolve.rs +++ b/tsparser/src/parser/types/type_resolve.rs @@ -1,5 +1,6 @@ use std::borrow::Cow; use std::borrow::Cow::{Borrowed, Owned}; +use std::cell::RefCell; use std::fmt::Debug; use std::ops::Deref; use std::rc::Rc; @@ -7,12 +8,12 @@ use std::rc::Rc; use litparser::Sp; use swc_common::errors::HANDLER; use swc_common::sync::Lrc; -use swc_common::{Span, Spanned}; +use swc_common::{BytePos, Span, Spanned}; use swc_ecma_ast as ast; use crate::parser::module_loader::ModuleId; use crate::parser::types::object::{CheckState, ObjectKind, ResolveState, TypeNameDecl}; -use crate::parser::types::Object; +use crate::parser::types::{validation, Object}; use crate::parser::{module_loader, Range}; use crate::span_err::ErrReporter; @@ -100,6 +101,13 @@ pub struct Ctx<'a> { /// The mapped key type to substitute when concretising, if any. mapped_key_type: Option<&'a Type>, + + /// Encountered "infer Type" type parameters in the current scope. + /// Rc> so we can mutate it in nested contexts. + infer_type_params: Option>>>, + + /// Type arguments to fill in for inferred type parameters. + infer_type_args: &'a [Cow<'a, Type>], } impl<'a> Ctx<'a> { @@ -111,6 +119,8 @@ impl<'a> Ctx<'a> { type_args: &[], mapped_key_id: None, mapped_key_type: None, + infer_type_params: None, + infer_type_args: &[], } } @@ -138,6 +148,20 @@ impl<'a> Ctx<'a> { ..self } } + + fn with_infer_type_params(self, infer_type_params: Rc>>) -> Self { + Self { + infer_type_params: Some(infer_type_params), + ..self + } + } + + fn with_infer_type_args(self, infer_type_args: &'a [Cow<'a, Type>]) -> Self { + Self { + infer_type_args, + ..self + } + } } impl Ctx<'_> { @@ -160,12 +184,13 @@ impl Ctx<'_> { ast::TsType::TsTypeOperator(tt) => self.type_op(tt), ast::TsType::TsMappedType(tt) => self.mapped(tt), ast::TsType::TsIndexedAccessType(tt) => self.indexed_access(tt), + ast::TsType::TsInferType(tt) => self.infer(tt), ast::TsType::TsFnOrConstructorType(_) | ast::TsType::TsRestType(_) // same? | ast::TsType::TsTypePredicate(_) // https://www.typescriptlang.org/docs/handbook/2/narrowing.html#using-type-predicates, https://www.typescriptlang.org/docs/handbook/2/classes.html#this-based-type-guards | ast::TsType::TsImportType(_) // ?? - | ast::TsType::TsInferType(_) => { + => { HANDLER.with(|handler| handler.span_err(typ.span(), &format!("unsupported: {:#?}", typ))); Type::Basic(Basic::Never) }, // typeof @@ -293,6 +318,11 @@ impl Ctx<'_> { } }, + (Type::Validated((inner, expr)), idx) => { + let typ = self.type_index(span, inner, idx); + Type::Validated((Box::new(typ), expr.clone())) + } + (obj, idx) => { HANDLER.with(|handler| { handler.span_err( @@ -308,6 +338,22 @@ impl Ctx<'_> { } } + fn infer(&self, tt: &ast::TsInferType) -> Type { + // Do we have an infer context? + if let Some(params) = self.infer_type_params.as_ref() { + let id = tt.type_param.name.to_id(); + let mut params = params.borrow_mut(); + let idx = params.len(); + params.push(id); + Type::Generic(Generic::Inferred(idx)) + } else { + tt.span.err("infer type outside of infer context"); + Type::Basic(Basic::Never) + } + + // TODO figure out what type to return here + } + /// Given a type, produces a union type of the underlying keys, /// e.g. `keyof {foo: string; bar: number}` yields `"foo" | "bar"`. fn keyof(&self, typ: &Type) -> Type { @@ -384,6 +430,11 @@ impl Ctx<'_> { Type::Generic(generic) => { Type::Generic(Generic::Keyof(Box::new(Type::Generic(generic.clone())))) } + Type::Validated((inner, _)) => self.keyof(inner), + Type::Validation(_) => { + HANDLER.with(|handler| handler.err("keyof ValidationExpr unsupported")); + Type::Basic(Basic::Never) + } } } @@ -529,12 +580,13 @@ impl Ctx<'_> { fn type_ref(&self, typ: &ast::TsTypeRef) -> Type { let obj = match &typ.type_name { ast::TsEntityName::Ident(ident) => { + let ident_id = ident.to_id(); // Is this a reference to a type parameter? let type_param = self .type_params .iter() .enumerate() - .find(|tp| tp.1.name.to_id() == ident.to_id()) + .find(|tp| tp.1.name.to_id() == ident_id) .map(|tp| (tp.0, *tp.1)); if let Some((idx, type_param)) = type_param { return if let Some(type_arg) = self.type_args.get(idx) { @@ -557,6 +609,23 @@ impl Ctx<'_> { } } + // Otherwise, is this a reference to an inferred type parameter? + if let Some(infer_type_params) = &self.infer_type_params { + let inferred_type_param = infer_type_params + .borrow() + .iter() + .enumerate() + .find(|tp| *tp.1 == ident_id) + .map(|tp| tp.0); + if let Some(idx) = inferred_type_param { + return if let Some(type_arg) = self.infer_type_args.get(idx) { + type_arg.clone().into_owned() + } else { + Type::Generic(Generic::Inferred(idx)) + }; + } + } + let Some(obj) = self.ident_obj(ident) else { HANDLER.with(|handler| handler.span_err(ident.span, "unknown identifier")); return Type::Basic(Basic::Never); @@ -589,6 +658,15 @@ impl Ctx<'_> { ObjectKind::TypeName(_) => { let named = Named::new(obj, type_arguments); + if self + .state + .is_module_path(named.obj.module_id, "encore.dev/validate") + { + if let Some(expr) = self.parse_validation(typ.span, &named) { + return Type::Validation(expr); + } + } + // Don't reference named types in the universe, // otherwise we try to find them on disk. // if self.state.is_universe(named.obj.module_id) { @@ -699,15 +777,18 @@ impl Ctx<'_> { } fn union(&self, union_type: &ast::TsUnionType) -> Type { - // TODO handle unifying e.g. "string | 'foo'" into "string" let types = self.types(union_type.types.iter().map(|t| t.as_ref())); - Type::Union(types) + simplify_union(types) } // https://www.typescriptlang.org/docs/handbook/2/conditional-types.html fn conditional(&self, tt: &ast::TsConditionalType) -> Type { let check = self.typ(&tt.check_type); - let extends = self.typ(&tt.extends_type); + let infer_params: Rc>> = Default::default(); + let extends = self + .clone() + .with_infer_type_params(infer_params.clone()) + .typ(&tt.extends_type); // Do we have a union type in `check`, and the AST is a naked type parameter? // If so, we need to treat it as a distributive conditional type. @@ -742,13 +823,33 @@ impl Ctx<'_> { } } - match check.assignable(self.state, &extends) { - Some(true) => self.typ(&tt.true_type), - Some(false) => self.typ(&tt.false_type), - None => Type::Generic(Generic::Conditional(Conditional { + match check.extends(self.state, &extends) { + Extends::Yes(mut inferred) => { + // Convert the inferred types to a vector with the gaps + // filled in with the `unknown` type. + inferred.sort_by_key(|(i, _)| *i); + let mut inf = Vec::new(); + for (idx, typ) in inferred { + while inf.len() < idx { + inf.push(Cow::Owned(Type::Basic(Basic::Unknown))); + } + inf.push(typ); + } + + self.clone() + .with_infer_type_params(infer_params) + .with_infer_type_args(&inf[..]) + .typ(&tt.true_type) + } + + Extends::No => self.typ(&tt.false_type), + Extends::Unknown => Type::Generic(Generic::Conditional(Conditional { check_type: Box::new(check), extends_type: Box::new(extends), - true_type: self.btyp(&tt.true_type), + true_type: self + .clone() + .with_infer_type_params(infer_params) + .btyp(&tt.true_type), false_type: self.btyp(&tt.false_type), })), } @@ -891,8 +992,14 @@ impl Ctx<'_> { } ast::Expr::New(expr) => { // The type of a class instance is the same as the class itself. - // TODO type args - self.expr(&expr.callee) + if let Some(type_args) = &expr.type_args { + let type_args: Vec<_> = self.types(type_args.params.iter().map(|t| t.as_ref())); + self.clone() + .with_type_args(&type_args[..]) + .expr(&expr.callee) + } else { + self.expr(&expr.callee) + } } ast::Expr::Seq(expr) => match expr.exprs.last() { Some(expr) => self.expr(expr), @@ -997,8 +1104,13 @@ impl Ctx<'_> { ast::Expr::TsSatisfies(expr) => self.expr(&expr.expr), ast::Expr::TsInstantiation(expr) => { - // TODO handle type args - self.expr(&expr.expr) + if !expr.type_args.params.is_empty() { + let type_args: Vec<_> = + self.types(expr.type_args.params.iter().map(|t| t.as_ref())); + self.clone().with_type_args(&type_args[..]).expr(&expr.expr) + } else { + self.expr(&expr.expr) + } } // The "foo!" operator @@ -1163,7 +1275,8 @@ impl Ctx<'_> { | Type::Optional(_) | Type::This | Type::Generic(_) - | Type::Class(_) => { + | Type::Class(_) + | Type::Validation(_) => { HANDLER.with(|handler| handler.span_err(prop.span(), "unsupported member on type")); Type::Basic(Basic::Never) } @@ -1218,27 +1331,28 @@ impl Ctx<'_> { let underlying = self.underlying(obj_type); self.resolve_member_prop(&underlying, prop) } + Type::Validated((inner, _)) => self.resolve_member_prop(inner, prop), } } /// Resolves a prop name to the underlying string literal. fn prop_name_to_string<'b>(&self, prop: &'b ast::PropName) -> Cow<'b, str> { match prop { - ast::PropName::Ident(id) => Cow::Borrowed(id.sym.as_ref()), - ast::PropName::Str(str) => Cow::Borrowed(str.value.as_ref()), - ast::PropName::Num(num) => Cow::Owned(num.value.to_string()), - ast::PropName::BigInt(bigint) => Cow::Owned(bigint.value.to_string()), + ast::PropName::Ident(id) => Borrowed(id.sym.as_ref()), + ast::PropName::Str(str) => Borrowed(str.value.as_ref()), + ast::PropName::Num(num) => Owned(num.value.to_string()), + ast::PropName::BigInt(bigint) => Owned(bigint.value.to_string()), ast::PropName::Computed(expr) => { if let Type::Literal(lit) = self.expr(&expr.expr) { match lit { - Literal::String(str) => return Cow::Owned(str), - Literal::Number(num) => return Cow::Owned(num.to_string()), + Literal::String(str) => return Owned(str), + Literal::Number(num) => return Owned(num.to_string()), _ => {} } } HANDLER.with(|handler| handler.span_err(expr.span, "unsupported computed prop")); - Cow::Borrowed("") + Borrowed("") } } } @@ -1529,6 +1643,16 @@ impl Ctx<'_> { } } + Generic::Inferred(idx) => { + // If we have a concrete inferred type, return that. + if let Some(arg) = self.infer_type_args.get(*idx) { + Changed(arg) + } else { + // We don't have a concrete type, so return the original type. + Same(typ) + } + } + Generic::Keyof(source) => { let concrete_source = self.concrete(source); let keys = self.keyof(&concrete_source); @@ -1588,18 +1712,40 @@ impl Ctx<'_> { } // Otherwise just check the single element. - (_, check) => match check.assignable(self.state, &extends) { - Some(true) => self.concrete(&cond.true_type).same_to_changed(), - Some(false) => self.concrete(&cond.false_type).same_to_changed(), + (_, check) => match check.extends(self.state, &extends).into_static() { + Extends::Yes(mut inferred) => { + // Convert the inferred types to a vector with the gaps + // filled in with the `unknown` type. + inferred.sort_by_key(|(i, _)| *i); + let mut inf = Vec::new(); + for (idx, typ) in inferred { + while inf.len() < idx { + inf.push(Cow::Owned(Type::Basic(Basic::Unknown))); + } + inf.push(typ); + } + + self.clone() + .with_infer_type_args(&inf[..]) + .concrete(&cond.true_type) + .into_new() + } + Extends::No => self.concrete(&cond.false_type).same_to_changed(), // We don't yet have enough type information to resolve the conditional. // Still, return a new type with the concretized types we have. - None => New(Type::Generic(Generic::Conditional(Conditional { - check_type: Box::new(check), - extends_type: Box::new(extends.into_owned()), - true_type: Box::new(self.concrete(&cond.true_type).into_owned()), - false_type: Box::new(self.concrete(&cond.false_type).into_owned()), - }))), + Extends::Unknown => { + New(Type::Generic(Generic::Conditional(Conditional { + check_type: Box::new(check), + extends_type: Box::new(extends.into_owned()), + true_type: Box::new( + self.concrete(&cond.true_type).into_owned(), + ), + false_type: Box::new( + self.concrete(&cond.false_type).into_owned(), + ), + }))) + } }, } } @@ -1706,6 +1852,14 @@ impl Ctx<'_> { None => Same(typ), }, }, + + Type::Validated((inner, expr)) => match self.concrete(inner) { + New(inner) => New(Type::Validated((Box::new(inner), expr.clone()))), + Changed(inner) => New(Type::Validated((Box::new(inner.clone()), expr.clone()))), + Same(_) => Same(typ), + }, + + Type::Validation(_) => Same(typ), } } @@ -1763,4 +1917,113 @@ impl Ctx<'_> { // All types are the same, so we can just return the original list. Same(v) } + + #[allow(dead_code)] + fn doc_comment(&self, pos: BytePos) -> Option { + self.state + .lookup_module(self.module) + .and_then(|m| m.base.preceding_comments(pos.into())) + } + + fn parse_validation(&self, sp: Span, named: &Named) -> Option { + let name = named.obj.name.as_deref()?; + + #[allow(dead_code)] + fn i64_lit(typ: &Type) -> Option { + if let Type::Literal(Literal::Number(n)) = typ { + let i = *n as i64; + if i as f64 == *n { + return Some(i); + } + } + None + } + + fn u64_lit(typ: &Type) -> Option { + if let Type::Literal(Literal::Number(n)) = typ { + let u = *n as u64; + if u as f64 == *n { + return Some(u); + } + } + None + } + + fn f64_lit(typ: &Type) -> Option { + if let Type::Literal(Literal::Number(n)) = typ { + return Some(*n); + } + None + } + + fn str_lit(typ: &Type) -> Option { + if let Type::Literal(Literal::String(s)) = typ { + return Some(s.clone()); + } + None + } + + use validation::{Expr, Is, Rule, N}; + match name { + "Min" => { + if let Some(num) = named.type_arguments.first().and_then(f64_lit) { + Some(Expr::Rule(Rule::MinVal(N(num)))) + } else { + sp.err("Min requires a number literal as its first type argument"); + None + } + } + "Max" => { + if let Some(num) = named.type_arguments.first().and_then(f64_lit) { + Some(Expr::Rule(Rule::MaxVal(validation::N(num)))) + } else { + sp.err("Max requires a number literal as its first type argument"); + None + } + } + "MinLen" => { + if let Some(num) = named.type_arguments.first().and_then(u64_lit) { + Some(Expr::Rule(Rule::MinLen(num))) + } else { + sp.err("MinLen requires a number literal as its first type argument"); + None + } + } + "MaxLen" => { + if let Some(num) = named.type_arguments.first().and_then(u64_lit) { + Some(Expr::Rule(Rule::MaxLen(num))) + } else { + sp.err("MaxLen requires a number literal as its first type argument"); + None + } + } + "StartsWith" => { + if let Some(str) = named.type_arguments.first().and_then(str_lit) { + Some(Expr::Rule(Rule::StartsWith(str))) + } else { + sp.err("StartsWith requires a string literal as its first type argument"); + None + } + } + "EndsWith" => { + if let Some(str) = named.type_arguments.first().and_then(str_lit) { + Some(Expr::Rule(Rule::EndsWith(str))) + } else { + sp.err("EndsWith requires a string literal as its first type argument"); + None + } + } + "MatchesRegexp" => { + if let Some(str) = named.type_arguments.first().and_then(str_lit) { + Some(Expr::Rule(Rule::MatchesRegexp(str))) + } else { + sp.err("MatchesRegexp requires a string literal as its first type argument"); + None + } + } + "IsEmail" => Some(Expr::Rule(Rule::Is(Is::Email))), + "IsURL" => Some(Expr::Rule(Rule::Is(Is::Url))), + _ => None, + } + } } diff --git a/tsparser/src/parser/types/validation.rs b/tsparser/src/parser/types/validation.rs new file mode 100644 index 0000000000..5c1cc002f1 --- /dev/null +++ b/tsparser/src/parser/types/validation.rs @@ -0,0 +1,272 @@ +use crate::encore::parser::schema::v1 as schema; +use core::hash::{Hash, Hasher}; +use serde::Serialize; +use std::ops::Deref; + +#[derive(Debug, Clone, Hash, Serialize, PartialEq, Eq)] +pub enum Expr { + Rule(Rule), + And(Vec), + Or(Vec), +} + +#[derive(Debug, Clone, Hash, Serialize, PartialEq, Eq)] +pub enum Rule { + MinLen(u64), + MaxLen(u64), + MinVal(N), + MaxVal(N), + StartsWith(String), + EndsWith(String), + MatchesRegexp(String), + Is(Is), +} + +#[derive(Debug, Clone, Hash, Serialize, PartialEq, Eq)] +pub enum Is { + Email, + Url, +} + +impl Rule { + pub fn merge_and(&self, other: &Self) -> Option { + use Rule::*; + Some(match (self, other) { + (MinLen(a), MinLen(b)) => MinLen((*a).min(*b)), + (MaxLen(a), MaxLen(b)) => MaxLen((*a).max(*b)), + (MinVal(a), MinVal(b)) => MinVal(N((*a).min(**b))), + (MaxVal(a), MaxVal(b)) => MaxVal(N((*a).max(**b))), + _ => return None, + }) + } + + pub fn merge_or(&self, other: &Self) -> Option { + use Rule::*; + Some(match (self, other) { + (MinLen(a), MinLen(b)) => MinLen((*a).max(*b)), + (MaxLen(a), MaxLen(b)) => MaxLen((*a).min(*b)), + (MinVal(a), MinVal(b)) => MinVal(N((*a).max(**b))), + (MaxVal(a), MaxVal(b)) => MaxVal(N((*a).min(**b))), + _ => return None, + }) + } + + pub fn to_pb(&self) -> schema::validation_rule::Rule { + use schema::validation_rule::Rule as VR; + match self { + Rule::MinLen(n) => VR::MinLen(*n), + Rule::MaxLen(n) => VR::MaxLen(*n), + Rule::MinVal(n) => VR::MinVal(**n), + Rule::MaxVal(n) => VR::MaxVal(**n), + Rule::StartsWith(str) => VR::StartsWith(str.clone()), + Rule::EndsWith(str) => VR::EndsWith(str.clone()), + Rule::MatchesRegexp(str) => VR::MatchesRegexp(str.clone()), + Rule::Is(is) => VR::Is(match is { + Is::Email => schema::validation_rule::Is::Email, + Is::Url => schema::validation_rule::Is::Url, + } as i32), + } + } +} + +impl Expr { + pub fn and(self, other: Self) -> Self { + match (self, other) { + (Expr::And(mut a), Expr::And(mut b)) => { + // Can we merge any of the rules into a? + a.append(&mut b); + Expr::And(a) + } + (Expr::And(mut a), b) => { + a.push(b); + Expr::And(a) + } + (a, Expr::And(mut b)) => { + b.insert(0, a); + Expr::And(b) + } + (a, b) => Expr::And(vec![a, b]), + } + } + + pub fn or(self, other: Self) -> Self { + match (self, other) { + (Expr::Or(mut a), Expr::Or(mut b)) => { + a.append(&mut b); + Expr::Or(a) + } + (Expr::Or(mut a), b) => { + a.push(b); + Expr::Or(a) + } + (a, Expr::Or(mut b)) => { + b.insert(0, a); + Expr::Or(b) + } + (a, b) => Expr::Or(vec![a, b]), + } + } + + pub fn rule(rule: Rule) -> Self { + Expr::Rule(rule) + } + + pub fn simplify(self) -> Self { + match self { + Self::And(mut exprs) => { + let mut i = 0; + let mut size = exprs.len(); + while i < size { + if !matches!(&exprs[i], Expr::Rule(_)) { + i += 1; + continue; + }; + + let j = i + 1; + let (a, b) = exprs.split_at_mut(j); + let Expr::Rule(i_rule) = &mut a[i] else { + panic!("logic error"); + }; + + let mut b_size = b.len(); + let mut b_idx = 0; + 'outer: while b_idx < b_size { + if let Expr::Rule(other) = &b[b_idx] { + if let Some(merged) = i_rule.merge_and(other) { + *i_rule = merged; + + // Swap this element to the end of b + // and update the sizes. + b.swap(b_idx, b_size - 1); + size -= 1; + b_size -= 1; + + // Don't increment the index since we now have + // a new element at the current index. + continue 'outer; + } + } + b_idx += 1; + } + + i += 1; + } + + exprs.truncate(size); + Self::And(exprs) + } + + Self::Or(mut exprs) => { + let mut i = 0; + let mut size = exprs.len(); + while i < size { + if !matches!(&exprs[i], Expr::Rule(_)) { + i += 1; + continue; + }; + + let j = i + 1; + let (a, b) = exprs.split_at_mut(j); + let Expr::Rule(i_rule) = &mut a[i] else { + panic!("logic error"); + }; + + let mut b_size = b.len(); + let mut b_idx = 0; + 'outer: while b_idx < b_size { + if let Expr::Rule(other) = &b[b_idx] { + if let Some(merged) = i_rule.merge_or(other) { + *i_rule = merged; + + // Swap this element to the end of b + // and update the sizes. + b.swap(b_idx, b_size - 1); + size -= 1; + b_size -= 1; + + // Don't increment the index since we now have + // a new element at the current index. + continue 'outer; + } + } + b_idx += 1; + } + + i += 1; + } + + exprs.truncate(size); + Self::Or(exprs) + } + + _ => self, + } + } + + pub fn to_pb(&self) -> schema::ValidationExpr { + use schema::validation_expr::Expr as VE; + + schema::ValidationExpr { + expr: Some(match self { + Expr::Rule(r) => VE::Rule(schema::ValidationRule { + rule: Some(r.to_pb()), + }), + Expr::And(exprs) => VE::And(schema::validation_expr::And { + exprs: exprs.iter().map(Self::to_pb).collect(), + }), + Expr::Or(exprs) => VE::Or(schema::validation_expr::Or { + exprs: exprs.iter().map(Self::to_pb).collect(), + }), + }), + } + } +} + +#[derive(Debug, Clone, Copy, Serialize)] +pub struct N(pub f64); + +impl Deref for N { + type Target = f64; + + fn deref(&self) -> &f64 { + &self.0 + } +} + +impl PartialEq for N { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } +} + +impl Eq for N {} + +impl Hash for N { + fn hash(&self, h: &mut H) { + if self.0 == 0.0f64 { + // There are 2 zero representations, +0 and -0, which + // compare equal but have different bits. We use the +0 hash + // for both so that hash(+0) == hash(-0). + 0.0f64.to_bits().hash(h); + } else { + self.0.to_bits().hash(h); + } + } +} + +#[cfg(test)] +mod tests { + #[test] + fn test_simplify() { + use super::*; + + let expr = Expr::Or(vec![ + Expr::Rule(Rule::MinLen(10)), + Expr::Rule(Rule::MinLen(20)), + Expr::Rule(Rule::MinLen(30)), + ]); + + let simplified = expr.simplify(); + assert_eq!(simplified, Expr::Rule(Rule::MinLen(10))); + } +} diff --git a/tsparser/src/parser/usageparser/mod.rs b/tsparser/src/parser/usageparser/mod.rs index 2218685cbe..b1ad648aae 100644 --- a/tsparser/src/parser/usageparser/mod.rs +++ b/tsparser/src/parser/usageparser/mod.rs @@ -1,6 +1,5 @@ use std::collections::HashMap; -use anyhow::Result; use swc_common::errors::HANDLER; use swc_common::sync::Lrc; use swc_common::Spanned; @@ -22,6 +21,12 @@ pub struct UsageExpr { pub kind: UsageExprKind, } +impl Spanned for UsageExpr { + fn span(&self) -> swc_common::Span { + self.range.to_span() + } +} + #[derive(Debug)] pub enum UsageExprKind { /// A field on a resource being accessed. @@ -233,7 +238,7 @@ pub struct ResolveUsageData<'a> { } impl UsageResolver<'_> { - pub fn resolve_usage(&self, module: &Lrc, exprs: &[UsageExpr]) -> Result> { + pub fn resolve_usage(&self, module: &Lrc, exprs: &[UsageExpr]) -> Vec { let mut usages = Vec::new(); for expr in exprs { let data = ResolveUsageData { @@ -250,24 +255,24 @@ impl UsageResolver<'_> { } Resource::ServiceClient(client) => { if let Some(u) = - apis::service_client::resolve_service_client_usage(&data, client.clone())? + apis::service_client::resolve_service_client_usage(&data, client.clone()) { usages.push(u) } } Resource::PubSubTopic(topic) => { - if let Some(u) = infra::pubsub_topic::resolve_topic_usage(&data, topic.clone())? + if let Some(u) = infra::pubsub_topic::resolve_topic_usage(&data, topic.clone()) { usages.push(u) } } Resource::SQLDatabase(db) => { - if let Some(u) = infra::sqldb::resolve_database_usage(&data, db.clone())? { + if let Some(u) = infra::sqldb::resolve_database_usage(&data, db.clone()) { usages.push(u) } } Resource::Bucket(bkt) => { - if let Some(u) = infra::objects::resolve_bucket_usage(&data, bkt.clone())? { + if let Some(u) = infra::objects::resolve_bucket_usage(&data, bkt.clone()) { usages.push(u) } } @@ -275,7 +280,7 @@ impl UsageResolver<'_> { } } - Ok(usages) + usages } } @@ -472,7 +477,7 @@ mod tests { use assert_matches::assert_matches; use swc_common::errors::Handler; - use swc_common::{Globals, SourceMap, GLOBALS}; + use swc_common::{Globals, SourceMap, DUMMY_SP, GLOBALS}; use crate::parser::parser::ParseContext; use crate::parser::resourceparser::bind::BindKind; @@ -529,6 +534,7 @@ export const Bar = 5; require_auth: false, body_limit: None, encoding: EndpointEncoding { + span: DUMMY_SP, default_method: Method::Post, methods: Methods::Some(vec![Method::Post]), handshake: None, @@ -537,7 +543,7 @@ export const Bar = 5; params: vec![], }], resp: ResponseEncoding { params: vec![] }, - path: Path::parse("/svc.Bar", Default::default()).unwrap(), + path: Path::parse(DUMMY_SP, "/svc.Bar", Default::default()).unwrap(), raw_handshake_schema: None, raw_req_schema: None, raw_resp_schema: None, @@ -623,6 +629,7 @@ export const Bar = 5; require_auth: false, body_limit: None, encoding: EndpointEncoding { + span: DUMMY_SP, default_method: Method::Post, methods: Methods::Some(vec![Method::Post]), handshake: None, @@ -633,7 +640,7 @@ export const Bar = 5; resp: ResponseEncoding { params: vec![], }, - path: Path::parse("/svc.Bar", Default::default()).unwrap(), + path: Path::parse(DUMMY_SP, "/svc.Bar", Default::default()).unwrap(), raw_handshake_schema: None, raw_req_schema: None, raw_resp_schema: None, diff --git a/tsparser/src/span_err.rs b/tsparser/src/span_err.rs index ab7b8d5dfc..ea44178e47 100644 --- a/tsparser/src/span_err.rs +++ b/tsparser/src/span_err.rs @@ -15,8 +15,8 @@ where #[derive(Debug)] pub struct SpErr { - span: Span, - error: E, + pub span: Span, + pub error: E, } impl SpErr diff --git a/tsparser/tests/parse_tests.rs b/tsparser/tests/parse_tests.rs index fbe4eb5ee5..f8a780ae5f 100644 --- a/tsparser/tests/parse_tests.rs +++ b/tsparser/tests/parse_tests.rs @@ -67,7 +67,7 @@ fn parse_txtar(app_root: &Path) -> Result { parse_tests: false, }; - builder.parse(&pp) + builder.parse(&pp).ok_or(anyhow::anyhow!("parse failed")) }) }) }