ci: new publish method (#1451)

This commit is contained in:
Dax
2025-07-31 01:00:29 -04:00
committed by GitHub
parent b09ebf4645
commit 33cef075d2
190 changed files with 16142 additions and 13342 deletions

View File

@@ -0,0 +1,53 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
package apierror
import (
"fmt"
"net/http"
"net/http/httputil"
"github.com/sst/opencode-sdk-go/internal/apijson"
)
// Error represents an error that originates from the API, i.e. when a request is
// made and the API returns a response with a HTTP status code. Other errors are
// not wrapped by this SDK.
type Error struct {
JSON errorJSON `json:"-"`
StatusCode int
Request *http.Request
Response *http.Response
}
// errorJSON contains the JSON metadata for the struct [Error]
type errorJSON struct {
raw string
ExtraFields map[string]apijson.Field
}
func (r *Error) UnmarshalJSON(data []byte) (err error) {
return apijson.UnmarshalRoot(data, r)
}
func (r errorJSON) RawJSON() string {
return r.raw
}
func (r *Error) Error() string {
// Attempt to re-populate the response body
return fmt.Sprintf("%s \"%s\": %d %s %s", r.Request.Method, r.Request.URL, r.Response.StatusCode, http.StatusText(r.Response.StatusCode), r.JSON.RawJSON())
}
func (r *Error) DumpRequest(body bool) []byte {
if r.Request.GetBody != nil {
r.Request.Body, _ = r.Request.GetBody()
}
out, _ := httputil.DumpRequestOut(r.Request, body)
return out
}
func (r *Error) DumpResponse(body bool) []byte {
out, _ := httputil.DumpResponse(r.Response, body)
return out
}

View File

@@ -0,0 +1,383 @@
package apiform
import (
"fmt"
"io"
"mime/multipart"
"net/textproto"
"path"
"reflect"
"sort"
"strconv"
"strings"
"sync"
"time"
"github.com/sst/opencode-sdk-go/internal/param"
)
var encoders sync.Map // map[encoderEntry]encoderFunc
func Marshal(value interface{}, writer *multipart.Writer) error {
e := &encoder{dateFormat: time.RFC3339}
return e.marshal(value, writer)
}
func MarshalRoot(value interface{}, writer *multipart.Writer) error {
e := &encoder{root: true, dateFormat: time.RFC3339}
return e.marshal(value, writer)
}
type encoder struct {
dateFormat string
root bool
}
type encoderFunc func(key string, value reflect.Value, writer *multipart.Writer) error
type encoderField struct {
tag parsedStructTag
fn encoderFunc
idx []int
}
type encoderEntry struct {
reflect.Type
dateFormat string
root bool
}
func (e *encoder) marshal(value interface{}, writer *multipart.Writer) error {
val := reflect.ValueOf(value)
if !val.IsValid() {
return nil
}
typ := val.Type()
enc := e.typeEncoder(typ)
return enc("", val, writer)
}
func (e *encoder) typeEncoder(t reflect.Type) encoderFunc {
entry := encoderEntry{
Type: t,
dateFormat: e.dateFormat,
root: e.root,
}
if fi, ok := encoders.Load(entry); ok {
return fi.(encoderFunc)
}
// To deal with recursive types, populate the map with an
// indirect func before we build it. This type waits on the
// real func (f) to be ready and then calls it. This indirect
// func is only used for recursive types.
var (
wg sync.WaitGroup
f encoderFunc
)
wg.Add(1)
fi, loaded := encoders.LoadOrStore(entry, encoderFunc(func(key string, v reflect.Value, writer *multipart.Writer) error {
wg.Wait()
return f(key, v, writer)
}))
if loaded {
return fi.(encoderFunc)
}
// Compute the real encoder and replace the indirect func with it.
f = e.newTypeEncoder(t)
wg.Done()
encoders.Store(entry, f)
return f
}
func (e *encoder) newTypeEncoder(t reflect.Type) encoderFunc {
if t.ConvertibleTo(reflect.TypeOf(time.Time{})) {
return e.newTimeTypeEncoder()
}
if t.ConvertibleTo(reflect.TypeOf((*io.Reader)(nil)).Elem()) {
return e.newReaderTypeEncoder()
}
e.root = false
switch t.Kind() {
case reflect.Pointer:
inner := t.Elem()
innerEncoder := e.typeEncoder(inner)
return func(key string, v reflect.Value, writer *multipart.Writer) error {
if !v.IsValid() || v.IsNil() {
return nil
}
return innerEncoder(key, v.Elem(), writer)
}
case reflect.Struct:
return e.newStructTypeEncoder(t)
case reflect.Slice, reflect.Array:
return e.newArrayTypeEncoder(t)
case reflect.Map:
return e.newMapEncoder(t)
case reflect.Interface:
return e.newInterfaceEncoder()
default:
return e.newPrimitiveTypeEncoder(t)
}
}
func (e *encoder) newPrimitiveTypeEncoder(t reflect.Type) encoderFunc {
switch t.Kind() {
// Note that we could use `gjson` to encode these types but it would complicate our
// code more and this current code shouldn't cause any issues
case reflect.String:
return func(key string, v reflect.Value, writer *multipart.Writer) error {
return writer.WriteField(key, v.String())
}
case reflect.Bool:
return func(key string, v reflect.Value, writer *multipart.Writer) error {
if v.Bool() {
return writer.WriteField(key, "true")
}
return writer.WriteField(key, "false")
}
case reflect.Int, reflect.Int16, reflect.Int32, reflect.Int64:
return func(key string, v reflect.Value, writer *multipart.Writer) error {
return writer.WriteField(key, strconv.FormatInt(v.Int(), 10))
}
case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return func(key string, v reflect.Value, writer *multipart.Writer) error {
return writer.WriteField(key, strconv.FormatUint(v.Uint(), 10))
}
case reflect.Float32:
return func(key string, v reflect.Value, writer *multipart.Writer) error {
return writer.WriteField(key, strconv.FormatFloat(v.Float(), 'f', -1, 32))
}
case reflect.Float64:
return func(key string, v reflect.Value, writer *multipart.Writer) error {
return writer.WriteField(key, strconv.FormatFloat(v.Float(), 'f', -1, 64))
}
default:
return func(key string, v reflect.Value, writer *multipart.Writer) error {
return fmt.Errorf("unknown type received at primitive encoder: %s", t.String())
}
}
}
func (e *encoder) newArrayTypeEncoder(t reflect.Type) encoderFunc {
itemEncoder := e.typeEncoder(t.Elem())
return func(key string, v reflect.Value, writer *multipart.Writer) error {
if key != "" {
key = key + "."
}
for i := 0; i < v.Len(); i++ {
err := itemEncoder(key+strconv.Itoa(i), v.Index(i), writer)
if err != nil {
return err
}
}
return nil
}
}
func (e *encoder) newStructTypeEncoder(t reflect.Type) encoderFunc {
if t.Implements(reflect.TypeOf((*param.FieldLike)(nil)).Elem()) {
return e.newFieldTypeEncoder(t)
}
encoderFields := []encoderField{}
extraEncoder := (*encoderField)(nil)
// This helper allows us to recursively collect field encoders into a flat
// array. The parameter `index` keeps track of the access patterns necessary
// to get to some field.
var collectEncoderFields func(r reflect.Type, index []int)
collectEncoderFields = func(r reflect.Type, index []int) {
for i := 0; i < r.NumField(); i++ {
idx := append(index, i)
field := t.FieldByIndex(idx)
if !field.IsExported() {
continue
}
// If this is an embedded struct, traverse one level deeper to extract
// the field and get their encoders as well.
if field.Anonymous {
collectEncoderFields(field.Type, idx)
continue
}
// If json tag is not present, then we skip, which is intentionally
// different behavior from the stdlib.
ptag, ok := parseFormStructTag(field)
if !ok {
continue
}
// We only want to support unexported field if they're tagged with
// `extras` because that field shouldn't be part of the public API. We
// also want to only keep the top level extras
if ptag.extras && len(index) == 0 {
extraEncoder = &encoderField{ptag, e.typeEncoder(field.Type.Elem()), idx}
continue
}
if ptag.name == "-" {
continue
}
dateFormat, ok := parseFormatStructTag(field)
oldFormat := e.dateFormat
if ok {
switch dateFormat {
case "date-time":
e.dateFormat = time.RFC3339
case "date":
e.dateFormat = "2006-01-02"
}
}
encoderFields = append(encoderFields, encoderField{ptag, e.typeEncoder(field.Type), idx})
e.dateFormat = oldFormat
}
}
collectEncoderFields(t, []int{})
// Ensure deterministic output by sorting by lexicographic order
sort.Slice(encoderFields, func(i, j int) bool {
return encoderFields[i].tag.name < encoderFields[j].tag.name
})
return func(key string, value reflect.Value, writer *multipart.Writer) error {
if key != "" {
key = key + "."
}
for _, ef := range encoderFields {
field := value.FieldByIndex(ef.idx)
err := ef.fn(key+ef.tag.name, field, writer)
if err != nil {
return err
}
}
if extraEncoder != nil {
err := e.encodeMapEntries(key, value.FieldByIndex(extraEncoder.idx), writer)
if err != nil {
return err
}
}
return nil
}
}
func (e *encoder) newFieldTypeEncoder(t reflect.Type) encoderFunc {
f, _ := t.FieldByName("Value")
enc := e.typeEncoder(f.Type)
return func(key string, value reflect.Value, writer *multipart.Writer) error {
present := value.FieldByName("Present")
if !present.Bool() {
return nil
}
null := value.FieldByName("Null")
if null.Bool() {
return nil
}
raw := value.FieldByName("Raw")
if !raw.IsNil() {
return e.typeEncoder(raw.Type())(key, raw, writer)
}
return enc(key, value.FieldByName("Value"), writer)
}
}
func (e *encoder) newTimeTypeEncoder() encoderFunc {
format := e.dateFormat
return func(key string, value reflect.Value, writer *multipart.Writer) error {
return writer.WriteField(key, value.Convert(reflect.TypeOf(time.Time{})).Interface().(time.Time).Format(format))
}
}
func (e encoder) newInterfaceEncoder() encoderFunc {
return func(key string, value reflect.Value, writer *multipart.Writer) error {
value = value.Elem()
if !value.IsValid() {
return nil
}
return e.typeEncoder(value.Type())(key, value, writer)
}
}
var quoteEscaper = strings.NewReplacer("\\", "\\\\", `"`, "\\\"")
func escapeQuotes(s string) string {
return quoteEscaper.Replace(s)
}
func (e *encoder) newReaderTypeEncoder() encoderFunc {
return func(key string, value reflect.Value, writer *multipart.Writer) error {
reader := value.Convert(reflect.TypeOf((*io.Reader)(nil)).Elem()).Interface().(io.Reader)
filename := "anonymous_file"
contentType := "application/octet-stream"
if named, ok := reader.(interface{ Filename() string }); ok {
filename = named.Filename()
} else if named, ok := reader.(interface{ Name() string }); ok {
filename = path.Base(named.Name())
}
if typed, ok := reader.(interface{ ContentType() string }); ok {
contentType = typed.ContentType()
}
// Below is taken almost 1-for-1 from [multipart.CreateFormFile]
h := make(textproto.MIMEHeader)
h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="%s"; filename="%s"`, escapeQuotes(key), escapeQuotes(filename)))
h.Set("Content-Type", contentType)
filewriter, err := writer.CreatePart(h)
if err != nil {
return err
}
_, err = io.Copy(filewriter, reader)
return err
}
}
// Given a []byte of json (may either be an empty object or an object that already contains entries)
// encode all of the entries in the map to the json byte array.
func (e *encoder) encodeMapEntries(key string, v reflect.Value, writer *multipart.Writer) error {
type mapPair struct {
key string
value reflect.Value
}
if key != "" {
key = key + "."
}
pairs := []mapPair{}
iter := v.MapRange()
for iter.Next() {
if iter.Key().Type().Kind() == reflect.String {
pairs = append(pairs, mapPair{key: iter.Key().String(), value: iter.Value()})
} else {
return fmt.Errorf("cannot encode a map with a non string key")
}
}
// Ensure deterministic output
sort.Slice(pairs, func(i, j int) bool {
return pairs[i].key < pairs[j].key
})
elementEncoder := e.typeEncoder(v.Type().Elem())
for _, p := range pairs {
err := elementEncoder(key+string(p.key), p.value, writer)
if err != nil {
return err
}
}
return nil
}
func (e *encoder) newMapEncoder(t reflect.Type) encoderFunc {
return func(key string, value reflect.Value, writer *multipart.Writer) error {
return e.encodeMapEntries(key, value, writer)
}
}

View File

@@ -0,0 +1,5 @@
package apiform
type Marshaler interface {
MarshalMultipart() ([]byte, string, error)
}

View File

@@ -0,0 +1,440 @@
package apiform
import (
"bytes"
"mime/multipart"
"strings"
"testing"
"time"
)
func P[T any](v T) *T { return &v }
type Primitives struct {
A bool `form:"a"`
B int `form:"b"`
C uint `form:"c"`
D float64 `form:"d"`
E float32 `form:"e"`
F []int `form:"f"`
}
type PrimitivePointers struct {
A *bool `form:"a"`
B *int `form:"b"`
C *uint `form:"c"`
D *float64 `form:"d"`
E *float32 `form:"e"`
F *[]int `form:"f"`
}
type Slices struct {
Slice []Primitives `form:"slices"`
}
type DateTime struct {
Date time.Time `form:"date" format:"date"`
DateTime time.Time `form:"date-time" format:"date-time"`
}
type AdditionalProperties struct {
A bool `form:"a"`
Extras map[string]interface{} `form:"-,extras"`
}
type TypedAdditionalProperties struct {
A bool `form:"a"`
Extras map[string]int `form:"-,extras"`
}
type EmbeddedStructs struct {
AdditionalProperties
A *int `form:"number2"`
Extras map[string]interface{} `form:"-,extras"`
}
type Recursive struct {
Name string `form:"name"`
Child *Recursive `form:"child"`
}
type UnknownStruct struct {
Unknown interface{} `form:"unknown"`
}
type UnionStruct struct {
Union Union `form:"union" format:"date"`
}
type Union interface {
union()
}
type UnionInteger int64
func (UnionInteger) union() {}
type UnionStructA struct {
Type string `form:"type"`
A string `form:"a"`
B string `form:"b"`
}
func (UnionStructA) union() {}
type UnionStructB struct {
Type string `form:"type"`
A string `form:"a"`
}
func (UnionStructB) union() {}
type UnionTime time.Time
func (UnionTime) union() {}
type ReaderStruct struct {
}
var tests = map[string]struct {
buf string
val interface{}
}{
"map_string": {
`--xxx
Content-Disposition: form-data; name="foo"
bar
--xxx--
`,
map[string]string{"foo": "bar"},
},
"map_interface": {
`--xxx
Content-Disposition: form-data; name="a"
1
--xxx
Content-Disposition: form-data; name="b"
str
--xxx
Content-Disposition: form-data; name="c"
false
--xxx--
`,
map[string]interface{}{"a": float64(1), "b": "str", "c": false},
},
"primitive_struct": {
`--xxx
Content-Disposition: form-data; name="a"
false
--xxx
Content-Disposition: form-data; name="b"
237628372683
--xxx
Content-Disposition: form-data; name="c"
654
--xxx
Content-Disposition: form-data; name="d"
9999.43
--xxx
Content-Disposition: form-data; name="e"
43.76
--xxx
Content-Disposition: form-data; name="f.0"
1
--xxx
Content-Disposition: form-data; name="f.1"
2
--xxx
Content-Disposition: form-data; name="f.2"
3
--xxx
Content-Disposition: form-data; name="f.3"
4
--xxx--
`,
Primitives{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}},
},
"slices": {
`--xxx
Content-Disposition: form-data; name="slices.0.a"
false
--xxx
Content-Disposition: form-data; name="slices.0.b"
237628372683
--xxx
Content-Disposition: form-data; name="slices.0.c"
654
--xxx
Content-Disposition: form-data; name="slices.0.d"
9999.43
--xxx
Content-Disposition: form-data; name="slices.0.e"
43.76
--xxx
Content-Disposition: form-data; name="slices.0.f.0"
1
--xxx
Content-Disposition: form-data; name="slices.0.f.1"
2
--xxx
Content-Disposition: form-data; name="slices.0.f.2"
3
--xxx
Content-Disposition: form-data; name="slices.0.f.3"
4
--xxx--
`,
Slices{
Slice: []Primitives{{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}}},
},
},
"primitive_pointer_struct": {
`--xxx
Content-Disposition: form-data; name="a"
false
--xxx
Content-Disposition: form-data; name="b"
237628372683
--xxx
Content-Disposition: form-data; name="c"
654
--xxx
Content-Disposition: form-data; name="d"
9999.43
--xxx
Content-Disposition: form-data; name="e"
43.76
--xxx
Content-Disposition: form-data; name="f.0"
1
--xxx
Content-Disposition: form-data; name="f.1"
2
--xxx
Content-Disposition: form-data; name="f.2"
3
--xxx
Content-Disposition: form-data; name="f.3"
4
--xxx
Content-Disposition: form-data; name="f.4"
5
--xxx--
`,
PrimitivePointers{
A: P(false),
B: P(237628372683),
C: P(uint(654)),
D: P(9999.43),
E: P(float32(43.76)),
F: &[]int{1, 2, 3, 4, 5},
},
},
"datetime_struct": {
`--xxx
Content-Disposition: form-data; name="date"
2006-01-02
--xxx
Content-Disposition: form-data; name="date-time"
2006-01-02T15:04:05Z
--xxx--
`,
DateTime{
Date: time.Date(2006, time.January, 2, 0, 0, 0, 0, time.UTC),
DateTime: time.Date(2006, time.January, 2, 15, 4, 5, 0, time.UTC),
},
},
"additional_properties": {
`--xxx
Content-Disposition: form-data; name="a"
true
--xxx
Content-Disposition: form-data; name="bar"
value
--xxx
Content-Disposition: form-data; name="foo"
true
--xxx--
`,
AdditionalProperties{
A: true,
Extras: map[string]interface{}{
"bar": "value",
"foo": true,
},
},
},
"recursive_struct": {
`--xxx
Content-Disposition: form-data; name="child.name"
Alex
--xxx
Content-Disposition: form-data; name="name"
Robert
--xxx--
`,
Recursive{Name: "Robert", Child: &Recursive{Name: "Alex"}},
},
"unknown_struct_number": {
`--xxx
Content-Disposition: form-data; name="unknown"
12
--xxx--
`,
UnknownStruct{
Unknown: 12.,
},
},
"unknown_struct_map": {
`--xxx
Content-Disposition: form-data; name="unknown.foo"
bar
--xxx--
`,
UnknownStruct{
Unknown: map[string]interface{}{
"foo": "bar",
},
},
},
"union_integer": {
`--xxx
Content-Disposition: form-data; name="union"
12
--xxx--
`,
UnionStruct{
Union: UnionInteger(12),
},
},
"union_struct_discriminated_a": {
`--xxx
Content-Disposition: form-data; name="union.a"
foo
--xxx
Content-Disposition: form-data; name="union.b"
bar
--xxx
Content-Disposition: form-data; name="union.type"
typeA
--xxx--
`,
UnionStruct{
Union: UnionStructA{
Type: "typeA",
A: "foo",
B: "bar",
},
},
},
"union_struct_discriminated_b": {
`--xxx
Content-Disposition: form-data; name="union.a"
foo
--xxx
Content-Disposition: form-data; name="union.type"
typeB
--xxx--
`,
UnionStruct{
Union: UnionStructB{
Type: "typeB",
A: "foo",
},
},
},
"union_struct_time": {
`--xxx
Content-Disposition: form-data; name="union"
2010-05-23
--xxx--
`,
UnionStruct{
Union: UnionTime(time.Date(2010, 05, 23, 0, 0, 0, 0, time.UTC)),
},
},
}
func TestEncode(t *testing.T) {
for name, test := range tests {
t.Run(name, func(t *testing.T) {
buf := bytes.NewBuffer(nil)
writer := multipart.NewWriter(buf)
writer.SetBoundary("xxx")
err := Marshal(test.val, writer)
if err != nil {
t.Errorf("serialization of %v failed with error %v", test.val, err)
}
err = writer.Close()
if err != nil {
t.Errorf("serialization of %v failed with error %v", test.val, err)
}
raw := buf.Bytes()
if string(raw) != strings.ReplaceAll(test.buf, "\n", "\r\n") {
t.Errorf("expected %+#v to serialize to '%s' but got '%s'", test.val, test.buf, string(raw))
}
})
}
}

View File

@@ -0,0 +1,48 @@
package apiform
import (
"reflect"
"strings"
)
const jsonStructTag = "json"
const formStructTag = "form"
const formatStructTag = "format"
type parsedStructTag struct {
name string
required bool
extras bool
metadata bool
}
func parseFormStructTag(field reflect.StructField) (tag parsedStructTag, ok bool) {
raw, ok := field.Tag.Lookup(formStructTag)
if !ok {
raw, ok = field.Tag.Lookup(jsonStructTag)
}
if !ok {
return
}
parts := strings.Split(raw, ",")
if len(parts) == 0 {
return tag, false
}
tag.name = parts[0]
for _, part := range parts[1:] {
switch part {
case "required":
tag.required = true
case "extras":
tag.extras = true
case "metadata":
tag.metadata = true
}
}
return
}
func parseFormatStructTag(field reflect.StructField) (format string, ok bool) {
format, ok = field.Tag.Lookup(formatStructTag)
return
}

View File

@@ -0,0 +1,670 @@
package apijson
import (
"encoding/json"
"errors"
"fmt"
"reflect"
"strconv"
"sync"
"time"
"unsafe"
"github.com/tidwall/gjson"
)
// decoders is a synchronized map with roughly the following type:
// map[reflect.Type]decoderFunc
var decoders sync.Map
// Unmarshal is similar to [encoding/json.Unmarshal] and parses the JSON-encoded
// data and stores it in the given pointer.
func Unmarshal(raw []byte, to any) error {
d := &decoderBuilder{dateFormat: time.RFC3339}
return d.unmarshal(raw, to)
}
// UnmarshalRoot is like Unmarshal, but doesn't try to call MarshalJSON on the
// root element. Useful if a struct's UnmarshalJSON is overrode to use the
// behavior of this encoder versus the standard library.
func UnmarshalRoot(raw []byte, to any) error {
d := &decoderBuilder{dateFormat: time.RFC3339, root: true}
return d.unmarshal(raw, to)
}
// decoderBuilder contains the 'compile-time' state of the decoder.
type decoderBuilder struct {
// Whether or not this is the first element and called by [UnmarshalRoot], see
// the documentation there to see why this is necessary.
root bool
// The dateFormat (a format string for [time.Format]) which is chosen by the
// last struct tag that was seen.
dateFormat string
}
// decoderState contains the 'run-time' state of the decoder.
type decoderState struct {
strict bool
exactness exactness
}
// Exactness refers to how close to the type the result was if deserialization
// was successful. This is useful in deserializing unions, where you want to try
// each entry, first with strict, then with looser validation, without actually
// having to do a lot of redundant work by marshalling twice (or maybe even more
// times).
type exactness int8
const (
// Some values had to fudged a bit, for example by converting a string to an
// int, or an enum with extra values.
loose exactness = iota
// There are some extra arguments, but other wise it matches the union.
extras
// Exactly right.
exact
)
type decoderFunc func(node gjson.Result, value reflect.Value, state *decoderState) error
type decoderField struct {
tag parsedStructTag
fn decoderFunc
idx []int
goname string
}
type decoderEntry struct {
reflect.Type
dateFormat string
root bool
}
func (d *decoderBuilder) unmarshal(raw []byte, to any) error {
value := reflect.ValueOf(to).Elem()
result := gjson.ParseBytes(raw)
if !value.IsValid() {
return fmt.Errorf("apijson: cannot marshal into invalid value")
}
return d.typeDecoder(value.Type())(result, value, &decoderState{strict: false, exactness: exact})
}
func (d *decoderBuilder) typeDecoder(t reflect.Type) decoderFunc {
entry := decoderEntry{
Type: t,
dateFormat: d.dateFormat,
root: d.root,
}
if fi, ok := decoders.Load(entry); ok {
return fi.(decoderFunc)
}
// To deal with recursive types, populate the map with an
// indirect func before we build it. This type waits on the
// real func (f) to be ready and then calls it. This indirect
// func is only used for recursive types.
var (
wg sync.WaitGroup
f decoderFunc
)
wg.Add(1)
fi, loaded := decoders.LoadOrStore(entry, decoderFunc(func(node gjson.Result, v reflect.Value, state *decoderState) error {
wg.Wait()
return f(node, v, state)
}))
if loaded {
return fi.(decoderFunc)
}
// Compute the real decoder and replace the indirect func with it.
f = d.newTypeDecoder(t)
wg.Done()
decoders.Store(entry, f)
return f
}
func indirectUnmarshalerDecoder(n gjson.Result, v reflect.Value, state *decoderState) error {
return v.Addr().Interface().(json.Unmarshaler).UnmarshalJSON([]byte(n.Raw))
}
func unmarshalerDecoder(n gjson.Result, v reflect.Value, state *decoderState) error {
if v.Kind() == reflect.Pointer && v.CanSet() {
v.Set(reflect.New(v.Type().Elem()))
}
return v.Interface().(json.Unmarshaler).UnmarshalJSON([]byte(n.Raw))
}
func (d *decoderBuilder) newTypeDecoder(t reflect.Type) decoderFunc {
if t.ConvertibleTo(reflect.TypeOf(time.Time{})) {
return d.newTimeTypeDecoder(t)
}
if !d.root && t.Implements(reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()) {
return unmarshalerDecoder
}
if !d.root && reflect.PointerTo(t).Implements(reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()) {
if _, ok := unionVariants[t]; !ok {
return indirectUnmarshalerDecoder
}
}
d.root = false
if _, ok := unionRegistry[t]; ok {
return d.newUnionDecoder(t)
}
switch t.Kind() {
case reflect.Pointer:
inner := t.Elem()
innerDecoder := d.typeDecoder(inner)
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
if !v.IsValid() {
return fmt.Errorf("apijson: unexpected invalid reflection value %+#v", v)
}
newValue := reflect.New(inner).Elem()
err := innerDecoder(n, newValue, state)
if err != nil {
return err
}
v.Set(newValue.Addr())
return nil
}
case reflect.Struct:
return d.newStructTypeDecoder(t)
case reflect.Array:
fallthrough
case reflect.Slice:
return d.newArrayTypeDecoder(t)
case reflect.Map:
return d.newMapDecoder(t)
case reflect.Interface:
return func(node gjson.Result, value reflect.Value, state *decoderState) error {
if !value.IsValid() {
return fmt.Errorf("apijson: unexpected invalid value %+#v", value)
}
if node.Value() != nil && value.CanSet() {
value.Set(reflect.ValueOf(node.Value()))
}
return nil
}
default:
return d.newPrimitiveTypeDecoder(t)
}
}
// newUnionDecoder returns a decoderFunc that deserializes into a union using an
// algorithm roughly similar to Pydantic's [smart algorithm].
//
// Conceptually this is equivalent to choosing the best schema based on how 'exact'
// the deserialization is for each of the schemas.
//
// If there is a tie in the level of exactness, then the tie is broken
// left-to-right.
//
// [smart algorithm]: https://docs.pydantic.dev/latest/concepts/unions/#smart-mode
func (d *decoderBuilder) newUnionDecoder(t reflect.Type) decoderFunc {
unionEntry, ok := unionRegistry[t]
if !ok {
panic("apijson: couldn't find union of type " + t.String() + " in union registry")
}
decoders := []decoderFunc{}
for _, variant := range unionEntry.variants {
decoder := d.typeDecoder(variant.Type)
decoders = append(decoders, decoder)
}
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
// If there is a discriminator match, circumvent the exactness logic entirely
for idx, variant := range unionEntry.variants {
decoder := decoders[idx]
if variant.TypeFilter != n.Type {
continue
}
if len(unionEntry.discriminatorKey) != 0 {
discriminatorValue := n.Get(unionEntry.discriminatorKey).Value()
if discriminatorValue == variant.DiscriminatorValue {
inner := reflect.New(variant.Type).Elem()
err := decoder(n, inner, state)
v.Set(inner)
return err
}
}
}
// Set bestExactness to worse than loose
bestExactness := loose - 1
for idx, variant := range unionEntry.variants {
decoder := decoders[idx]
if variant.TypeFilter != n.Type {
continue
}
sub := decoderState{strict: state.strict, exactness: exact}
inner := reflect.New(variant.Type).Elem()
err := decoder(n, inner, &sub)
if err != nil {
continue
}
if sub.exactness == exact {
v.Set(inner)
return nil
}
if sub.exactness > bestExactness {
v.Set(inner)
bestExactness = sub.exactness
}
}
if bestExactness < loose {
return errors.New("apijson: was not able to coerce type as union")
}
if guardStrict(state, bestExactness != exact) {
return errors.New("apijson: was not able to coerce type as union strictly")
}
return nil
}
}
func (d *decoderBuilder) newMapDecoder(t reflect.Type) decoderFunc {
keyType := t.Key()
itemType := t.Elem()
itemDecoder := d.typeDecoder(itemType)
return func(node gjson.Result, value reflect.Value, state *decoderState) (err error) {
mapValue := reflect.MakeMapWithSize(t, len(node.Map()))
node.ForEach(func(key, value gjson.Result) bool {
// It's fine for us to just use `ValueOf` here because the key types will
// always be primitive types so we don't need to decode it using the standard pattern
keyValue := reflect.ValueOf(key.Value())
if !keyValue.IsValid() {
if err == nil {
err = fmt.Errorf("apijson: received invalid key type %v", keyValue.String())
}
return false
}
if keyValue.Type() != keyType {
if err == nil {
err = fmt.Errorf("apijson: expected key type %v but got %v", keyType, keyValue.Type())
}
return false
}
itemValue := reflect.New(itemType).Elem()
itemerr := itemDecoder(value, itemValue, state)
if itemerr != nil {
if err == nil {
err = itemerr
}
return false
}
mapValue.SetMapIndex(keyValue, itemValue)
return true
})
if err != nil {
return err
}
value.Set(mapValue)
return nil
}
}
func (d *decoderBuilder) newArrayTypeDecoder(t reflect.Type) decoderFunc {
itemDecoder := d.typeDecoder(t.Elem())
return func(node gjson.Result, value reflect.Value, state *decoderState) (err error) {
if !node.IsArray() {
return fmt.Errorf("apijson: could not deserialize to an array")
}
arrayNode := node.Array()
arrayValue := reflect.MakeSlice(reflect.SliceOf(t.Elem()), len(arrayNode), len(arrayNode))
for i, itemNode := range arrayNode {
err = itemDecoder(itemNode, arrayValue.Index(i), state)
if err != nil {
return err
}
}
value.Set(arrayValue)
return nil
}
}
func (d *decoderBuilder) newStructTypeDecoder(t reflect.Type) decoderFunc {
// map of json field name to struct field decoders
decoderFields := map[string]decoderField{}
anonymousDecoders := []decoderField{}
extraDecoder := (*decoderField)(nil)
inlineDecoder := (*decoderField)(nil)
for i := 0; i < t.NumField(); i++ {
idx := []int{i}
field := t.FieldByIndex(idx)
if !field.IsExported() {
continue
}
// If this is an embedded struct, traverse one level deeper to extract
// the fields and get their encoders as well.
if field.Anonymous {
anonymousDecoders = append(anonymousDecoders, decoderField{
fn: d.typeDecoder(field.Type),
idx: idx[:],
})
continue
}
// If json tag is not present, then we skip, which is intentionally
// different behavior from the stdlib.
ptag, ok := parseJSONStructTag(field)
if !ok {
continue
}
// We only want to support unexported fields if they're tagged with
// `extras` because that field shouldn't be part of the public API.
if ptag.extras {
extraDecoder = &decoderField{ptag, d.typeDecoder(field.Type.Elem()), idx, field.Name}
continue
}
if ptag.inline {
inlineDecoder = &decoderField{ptag, d.typeDecoder(field.Type), idx, field.Name}
continue
}
if ptag.metadata {
continue
}
oldFormat := d.dateFormat
dateFormat, ok := parseFormatStructTag(field)
if ok {
switch dateFormat {
case "date-time":
d.dateFormat = time.RFC3339
case "date":
d.dateFormat = "2006-01-02"
}
}
decoderFields[ptag.name] = decoderField{ptag, d.typeDecoder(field.Type), idx, field.Name}
d.dateFormat = oldFormat
}
return func(node gjson.Result, value reflect.Value, state *decoderState) (err error) {
if field := value.FieldByName("JSON"); field.IsValid() {
if raw := field.FieldByName("raw"); raw.IsValid() {
setUnexportedField(raw, node.Raw)
}
}
for _, decoder := range anonymousDecoders {
// ignore errors
decoder.fn(node, value.FieldByIndex(decoder.idx), state)
}
if inlineDecoder != nil {
var meta Field
dest := value.FieldByIndex(inlineDecoder.idx)
isValid := false
if dest.IsValid() && node.Type != gjson.Null {
err = inlineDecoder.fn(node, dest, state)
if err == nil {
isValid = true
}
}
if node.Type == gjson.Null {
meta = Field{
raw: node.Raw,
status: null,
}
} else if !isValid {
meta = Field{
raw: node.Raw,
status: invalid,
}
} else if isValid {
meta = Field{
raw: node.Raw,
status: valid,
}
}
if metadata := getSubField(value, inlineDecoder.idx, inlineDecoder.goname); metadata.IsValid() {
metadata.Set(reflect.ValueOf(meta))
}
return err
}
typedExtraType := reflect.Type(nil)
typedExtraFields := reflect.Value{}
if extraDecoder != nil {
typedExtraType = value.FieldByIndex(extraDecoder.idx).Type()
typedExtraFields = reflect.MakeMap(typedExtraType)
}
untypedExtraFields := map[string]Field{}
for fieldName, itemNode := range node.Map() {
df, explicit := decoderFields[fieldName]
var (
dest reflect.Value
fn decoderFunc
meta Field
)
if explicit {
fn = df.fn
dest = value.FieldByIndex(df.idx)
}
if !explicit && extraDecoder != nil {
dest = reflect.New(typedExtraType.Elem()).Elem()
fn = extraDecoder.fn
}
isValid := false
if dest.IsValid() && itemNode.Type != gjson.Null {
err = fn(itemNode, dest, state)
if err == nil {
isValid = true
}
}
if itemNode.Type == gjson.Null {
meta = Field{
raw: itemNode.Raw,
status: null,
}
} else if !isValid {
meta = Field{
raw: itemNode.Raw,
status: invalid,
}
} else if isValid {
meta = Field{
raw: itemNode.Raw,
status: valid,
}
}
if explicit {
if metadata := getSubField(value, df.idx, df.goname); metadata.IsValid() {
metadata.Set(reflect.ValueOf(meta))
}
}
if !explicit {
untypedExtraFields[fieldName] = meta
}
if !explicit && extraDecoder != nil {
typedExtraFields.SetMapIndex(reflect.ValueOf(fieldName), dest)
}
}
if extraDecoder != nil && typedExtraFields.Len() > 0 {
value.FieldByIndex(extraDecoder.idx).Set(typedExtraFields)
}
// Set exactness to 'extras' if there are untyped, extra fields.
if len(untypedExtraFields) > 0 && state.exactness > extras {
state.exactness = extras
}
if metadata := getSubField(value, []int{-1}, "ExtraFields"); metadata.IsValid() && len(untypedExtraFields) > 0 {
metadata.Set(reflect.ValueOf(untypedExtraFields))
}
return nil
}
}
func (d *decoderBuilder) newPrimitiveTypeDecoder(t reflect.Type) decoderFunc {
switch t.Kind() {
case reflect.String:
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
v.SetString(n.String())
if guardStrict(state, n.Type != gjson.String) {
return fmt.Errorf("apijson: failed to parse string strictly")
}
// Everything that is not an object can be loosely stringified.
if n.Type == gjson.JSON {
return fmt.Errorf("apijson: failed to parse string")
}
if guardUnknown(state, v) {
return fmt.Errorf("apijson: failed string enum validation")
}
return nil
}
case reflect.Bool:
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
v.SetBool(n.Bool())
if guardStrict(state, n.Type != gjson.True && n.Type != gjson.False) {
return fmt.Errorf("apijson: failed to parse bool strictly")
}
// Numbers and strings that are either 'true' or 'false' can be loosely
// deserialized as bool.
if n.Type == gjson.String && (n.Raw != "true" && n.Raw != "false") || n.Type == gjson.JSON {
return fmt.Errorf("apijson: failed to parse bool")
}
if guardUnknown(state, v) {
return fmt.Errorf("apijson: failed bool enum validation")
}
return nil
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
v.SetInt(n.Int())
if guardStrict(state, n.Type != gjson.Number || n.Num != float64(int(n.Num))) {
return fmt.Errorf("apijson: failed to parse int strictly")
}
// Numbers, booleans, and strings that maybe look like numbers can be
// loosely deserialized as numbers.
if n.Type == gjson.JSON || (n.Type == gjson.String && !canParseAsNumber(n.Str)) {
return fmt.Errorf("apijson: failed to parse int")
}
if guardUnknown(state, v) {
return fmt.Errorf("apijson: failed int enum validation")
}
return nil
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
v.SetUint(n.Uint())
if guardStrict(state, n.Type != gjson.Number || n.Num != float64(int(n.Num)) || n.Num < 0) {
return fmt.Errorf("apijson: failed to parse uint strictly")
}
// Numbers, booleans, and strings that maybe look like numbers can be
// loosely deserialized as uint.
if n.Type == gjson.JSON || (n.Type == gjson.String && !canParseAsNumber(n.Str)) {
return fmt.Errorf("apijson: failed to parse uint")
}
if guardUnknown(state, v) {
return fmt.Errorf("apijson: failed uint enum validation")
}
return nil
}
case reflect.Float32, reflect.Float64:
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
v.SetFloat(n.Float())
if guardStrict(state, n.Type != gjson.Number) {
return fmt.Errorf("apijson: failed to parse float strictly")
}
// Numbers, booleans, and strings that maybe look like numbers can be
// loosely deserialized as floats.
if n.Type == gjson.JSON || (n.Type == gjson.String && !canParseAsNumber(n.Str)) {
return fmt.Errorf("apijson: failed to parse float")
}
if guardUnknown(state, v) {
return fmt.Errorf("apijson: failed float enum validation")
}
return nil
}
default:
return func(node gjson.Result, v reflect.Value, state *decoderState) error {
return fmt.Errorf("unknown type received at primitive decoder: %s", t.String())
}
}
}
func (d *decoderBuilder) newTimeTypeDecoder(t reflect.Type) decoderFunc {
format := d.dateFormat
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
parsed, err := time.Parse(format, n.Str)
if err == nil {
v.Set(reflect.ValueOf(parsed).Convert(t))
return nil
}
if guardStrict(state, true) {
return err
}
layouts := []string{
"2006-01-02",
"2006-01-02T15:04:05Z07:00",
"2006-01-02T15:04:05Z0700",
"2006-01-02T15:04:05",
"2006-01-02 15:04:05Z07:00",
"2006-01-02 15:04:05Z0700",
"2006-01-02 15:04:05",
}
for _, layout := range layouts {
parsed, err := time.Parse(layout, n.Str)
if err == nil {
v.Set(reflect.ValueOf(parsed).Convert(t))
return nil
}
}
return fmt.Errorf("unable to leniently parse date-time string: %s", n.Str)
}
}
func setUnexportedField(field reflect.Value, value interface{}) {
reflect.NewAt(field.Type(), unsafe.Pointer(field.UnsafeAddr())).Elem().Set(reflect.ValueOf(value))
}
func guardStrict(state *decoderState, cond bool) bool {
if !cond {
return false
}
if state.strict {
return true
}
state.exactness = loose
return false
}
func canParseAsNumber(str string) bool {
_, err := strconv.ParseFloat(str, 64)
return err == nil
}
func guardUnknown(state *decoderState, v reflect.Value) bool {
if have, ok := v.Interface().(interface{ IsKnown() bool }); guardStrict(state, ok && !have.IsKnown()) {
return true
}
return false
}

View File

@@ -0,0 +1,398 @@
package apijson
import (
"bytes"
"encoding/json"
"fmt"
"reflect"
"sort"
"strconv"
"strings"
"sync"
"time"
"github.com/tidwall/sjson"
"github.com/sst/opencode-sdk-go/internal/param"
)
var encoders sync.Map // map[encoderEntry]encoderFunc
func Marshal(value interface{}) ([]byte, error) {
e := &encoder{dateFormat: time.RFC3339}
return e.marshal(value)
}
func MarshalRoot(value interface{}) ([]byte, error) {
e := &encoder{root: true, dateFormat: time.RFC3339}
return e.marshal(value)
}
type encoder struct {
dateFormat string
root bool
}
type encoderFunc func(value reflect.Value) ([]byte, error)
type encoderField struct {
tag parsedStructTag
fn encoderFunc
idx []int
}
type encoderEntry struct {
reflect.Type
dateFormat string
root bool
}
func (e *encoder) marshal(value interface{}) ([]byte, error) {
val := reflect.ValueOf(value)
if !val.IsValid() {
return nil, nil
}
typ := val.Type()
enc := e.typeEncoder(typ)
return enc(val)
}
func (e *encoder) typeEncoder(t reflect.Type) encoderFunc {
entry := encoderEntry{
Type: t,
dateFormat: e.dateFormat,
root: e.root,
}
if fi, ok := encoders.Load(entry); ok {
return fi.(encoderFunc)
}
// To deal with recursive types, populate the map with an
// indirect func before we build it. This type waits on the
// real func (f) to be ready and then calls it. This indirect
// func is only used for recursive types.
var (
wg sync.WaitGroup
f encoderFunc
)
wg.Add(1)
fi, loaded := encoders.LoadOrStore(entry, encoderFunc(func(v reflect.Value) ([]byte, error) {
wg.Wait()
return f(v)
}))
if loaded {
return fi.(encoderFunc)
}
// Compute the real encoder and replace the indirect func with it.
f = e.newTypeEncoder(t)
wg.Done()
encoders.Store(entry, f)
return f
}
func marshalerEncoder(v reflect.Value) ([]byte, error) {
return v.Interface().(json.Marshaler).MarshalJSON()
}
func indirectMarshalerEncoder(v reflect.Value) ([]byte, error) {
return v.Addr().Interface().(json.Marshaler).MarshalJSON()
}
func (e *encoder) newTypeEncoder(t reflect.Type) encoderFunc {
if t.ConvertibleTo(reflect.TypeOf(time.Time{})) {
return e.newTimeTypeEncoder()
}
if !e.root && t.Implements(reflect.TypeOf((*json.Marshaler)(nil)).Elem()) {
return marshalerEncoder
}
if !e.root && reflect.PointerTo(t).Implements(reflect.TypeOf((*json.Marshaler)(nil)).Elem()) {
return indirectMarshalerEncoder
}
e.root = false
switch t.Kind() {
case reflect.Pointer:
inner := t.Elem()
innerEncoder := e.typeEncoder(inner)
return func(v reflect.Value) ([]byte, error) {
if !v.IsValid() || v.IsNil() {
return nil, nil
}
return innerEncoder(v.Elem())
}
case reflect.Struct:
return e.newStructTypeEncoder(t)
case reflect.Array:
fallthrough
case reflect.Slice:
return e.newArrayTypeEncoder(t)
case reflect.Map:
return e.newMapEncoder(t)
case reflect.Interface:
return e.newInterfaceEncoder()
default:
return e.newPrimitiveTypeEncoder(t)
}
}
func (e *encoder) newPrimitiveTypeEncoder(t reflect.Type) encoderFunc {
switch t.Kind() {
// Note that we could use `gjson` to encode these types but it would complicate our
// code more and this current code shouldn't cause any issues
case reflect.String:
return func(v reflect.Value) ([]byte, error) {
return json.Marshal(v.Interface())
}
case reflect.Bool:
return func(v reflect.Value) ([]byte, error) {
if v.Bool() {
return []byte("true"), nil
}
return []byte("false"), nil
}
case reflect.Int, reflect.Int16, reflect.Int32, reflect.Int64:
return func(v reflect.Value) ([]byte, error) {
return []byte(strconv.FormatInt(v.Int(), 10)), nil
}
case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return func(v reflect.Value) ([]byte, error) {
return []byte(strconv.FormatUint(v.Uint(), 10)), nil
}
case reflect.Float32:
return func(v reflect.Value) ([]byte, error) {
return []byte(strconv.FormatFloat(v.Float(), 'f', -1, 32)), nil
}
case reflect.Float64:
return func(v reflect.Value) ([]byte, error) {
return []byte(strconv.FormatFloat(v.Float(), 'f', -1, 64)), nil
}
default:
return func(v reflect.Value) ([]byte, error) {
return nil, fmt.Errorf("unknown type received at primitive encoder: %s", t.String())
}
}
}
func (e *encoder) newArrayTypeEncoder(t reflect.Type) encoderFunc {
itemEncoder := e.typeEncoder(t.Elem())
return func(value reflect.Value) ([]byte, error) {
json := []byte("[]")
for i := 0; i < value.Len(); i++ {
var value, err = itemEncoder(value.Index(i))
if err != nil {
return nil, err
}
if value == nil {
// Assume that empty items should be inserted as `null` so that the output array
// will be the same length as the input array
value = []byte("null")
}
json, err = sjson.SetRawBytes(json, "-1", value)
if err != nil {
return nil, err
}
}
return json, nil
}
}
func (e *encoder) newStructTypeEncoder(t reflect.Type) encoderFunc {
if t.Implements(reflect.TypeOf((*param.FieldLike)(nil)).Elem()) {
return e.newFieldTypeEncoder(t)
}
encoderFields := []encoderField{}
extraEncoder := (*encoderField)(nil)
// This helper allows us to recursively collect field encoders into a flat
// array. The parameter `index` keeps track of the access patterns necessary
// to get to some field.
var collectEncoderFields func(r reflect.Type, index []int)
collectEncoderFields = func(r reflect.Type, index []int) {
for i := 0; i < r.NumField(); i++ {
idx := append(index, i)
field := t.FieldByIndex(idx)
if !field.IsExported() {
continue
}
// If this is an embedded struct, traverse one level deeper to extract
// the field and get their encoders as well.
if field.Anonymous {
collectEncoderFields(field.Type, idx)
continue
}
// If json tag is not present, then we skip, which is intentionally
// different behavior from the stdlib.
ptag, ok := parseJSONStructTag(field)
if !ok {
continue
}
// We only want to support unexported field if they're tagged with
// `extras` because that field shouldn't be part of the public API. We
// also want to only keep the top level extras
if ptag.extras && len(index) == 0 {
extraEncoder = &encoderField{ptag, e.typeEncoder(field.Type.Elem()), idx}
continue
}
if ptag.name == "-" {
continue
}
dateFormat, ok := parseFormatStructTag(field)
oldFormat := e.dateFormat
if ok {
switch dateFormat {
case "date-time":
e.dateFormat = time.RFC3339
case "date":
e.dateFormat = "2006-01-02"
}
}
encoderFields = append(encoderFields, encoderField{ptag, e.typeEncoder(field.Type), idx})
e.dateFormat = oldFormat
}
}
collectEncoderFields(t, []int{})
// Ensure deterministic output by sorting by lexicographic order
sort.Slice(encoderFields, func(i, j int) bool {
return encoderFields[i].tag.name < encoderFields[j].tag.name
})
return func(value reflect.Value) (json []byte, err error) {
json = []byte("{}")
for _, ef := range encoderFields {
field := value.FieldByIndex(ef.idx)
encoded, err := ef.fn(field)
if err != nil {
return nil, err
}
if encoded == nil {
continue
}
json, err = sjson.SetRawBytes(json, ef.tag.name, encoded)
if err != nil {
return nil, err
}
}
if extraEncoder != nil {
json, err = e.encodeMapEntries(json, value.FieldByIndex(extraEncoder.idx))
if err != nil {
return nil, err
}
}
return
}
}
func (e *encoder) newFieldTypeEncoder(t reflect.Type) encoderFunc {
f, _ := t.FieldByName("Value")
enc := e.typeEncoder(f.Type)
return func(value reflect.Value) (json []byte, err error) {
present := value.FieldByName("Present")
if !present.Bool() {
return nil, nil
}
null := value.FieldByName("Null")
if null.Bool() {
return []byte("null"), nil
}
raw := value.FieldByName("Raw")
if !raw.IsNil() {
return e.typeEncoder(raw.Type())(raw)
}
return enc(value.FieldByName("Value"))
}
}
func (e *encoder) newTimeTypeEncoder() encoderFunc {
format := e.dateFormat
return func(value reflect.Value) (json []byte, err error) {
return []byte(`"` + value.Convert(reflect.TypeOf(time.Time{})).Interface().(time.Time).Format(format) + `"`), nil
}
}
func (e encoder) newInterfaceEncoder() encoderFunc {
return func(value reflect.Value) ([]byte, error) {
value = value.Elem()
if !value.IsValid() {
return nil, nil
}
return e.typeEncoder(value.Type())(value)
}
}
// Given a []byte of json (may either be an empty object or an object that already contains entries)
// encode all of the entries in the map to the json byte array.
func (e *encoder) encodeMapEntries(json []byte, v reflect.Value) ([]byte, error) {
type mapPair struct {
key []byte
value reflect.Value
}
pairs := []mapPair{}
keyEncoder := e.typeEncoder(v.Type().Key())
iter := v.MapRange()
for iter.Next() {
var encodedKeyString string
if iter.Key().Type().Kind() == reflect.String {
encodedKeyString = iter.Key().String()
} else {
var err error
encodedKeyBytes, err := keyEncoder(iter.Key())
if err != nil {
return nil, err
}
encodedKeyString = string(encodedKeyBytes)
}
encodedKey := []byte(sjsonReplacer.Replace(encodedKeyString))
pairs = append(pairs, mapPair{key: encodedKey, value: iter.Value()})
}
// Ensure deterministic output
sort.Slice(pairs, func(i, j int) bool {
return bytes.Compare(pairs[i].key, pairs[j].key) < 0
})
elementEncoder := e.typeEncoder(v.Type().Elem())
for _, p := range pairs {
encodedValue, err := elementEncoder(p.value)
if err != nil {
return nil, err
}
if len(encodedValue) == 0 {
continue
}
json, err = sjson.SetRawBytes(json, string(p.key), encodedValue)
if err != nil {
return nil, err
}
}
return json, nil
}
func (e *encoder) newMapEncoder(t reflect.Type) encoderFunc {
return func(value reflect.Value) ([]byte, error) {
json := []byte("{}")
var err error
json, err = e.encodeMapEntries(json, value)
if err != nil {
return nil, err
}
return json, nil
}
}
// If we want to set a literal key value into JSON using sjson, we need to make sure it doesn't have
// special characters that sjson interprets as a path.
var sjsonReplacer *strings.Replacer = strings.NewReplacer(".", "\\.", ":", "\\:", "*", "\\*")

View File

@@ -0,0 +1,41 @@
package apijson
import "reflect"
type status uint8
const (
missing status = iota
null
invalid
valid
)
type Field struct {
raw string
status status
}
// Returns true if the field is explicitly `null` _or_ if it is not present at all (ie, missing).
// To check if the field's key is present in the JSON with an explicit null value,
// you must check `f.IsNull() && !f.IsMissing()`.
func (j Field) IsNull() bool { return j.status <= null }
func (j Field) IsMissing() bool { return j.status == missing }
func (j Field) IsInvalid() bool { return j.status == invalid }
func (j Field) Raw() string { return j.raw }
func getSubField(root reflect.Value, index []int, name string) reflect.Value {
strct := root.FieldByIndex(index[:len(index)-1])
if !strct.IsValid() {
panic("couldn't find encapsulating struct for field " + name)
}
meta := strct.FieldByName("JSON")
if !meta.IsValid() {
return reflect.Value{}
}
field := meta.FieldByName(name)
if !field.IsValid() {
return reflect.Value{}
}
return field
}

View File

@@ -0,0 +1,66 @@
package apijson
import (
"testing"
"time"
"github.com/sst/opencode-sdk-go/internal/param"
)
type Struct struct {
A string `json:"a"`
B int64 `json:"b"`
}
type FieldStruct struct {
A param.Field[string] `json:"a"`
B param.Field[int64] `json:"b"`
C param.Field[Struct] `json:"c"`
D param.Field[time.Time] `json:"d" format:"date"`
E param.Field[time.Time] `json:"e" format:"date-time"`
F param.Field[int64] `json:"f"`
}
func TestFieldMarshal(t *testing.T) {
tests := map[string]struct {
value interface{}
expected string
}{
"null_string": {param.Field[string]{Present: true, Null: true}, "null"},
"null_int": {param.Field[int]{Present: true, Null: true}, "null"},
"null_int64": {param.Field[int64]{Present: true, Null: true}, "null"},
"null_struct": {param.Field[Struct]{Present: true, Null: true}, "null"},
"string": {param.Field[string]{Present: true, Value: "string"}, `"string"`},
"int": {param.Field[int]{Present: true, Value: 123}, "123"},
"int64": {param.Field[int64]{Present: true, Value: int64(123456789123456789)}, "123456789123456789"},
"struct": {param.Field[Struct]{Present: true, Value: Struct{A: "yo", B: 123}}, `{"a":"yo","b":123}`},
"string_raw": {param.Field[int]{Present: true, Raw: "string"}, `"string"`},
"int_raw": {param.Field[int]{Present: true, Raw: 123}, "123"},
"int64_raw": {param.Field[int]{Present: true, Raw: int64(123456789123456789)}, "123456789123456789"},
"struct_raw": {param.Field[int]{Present: true, Raw: Struct{A: "yo", B: 123}}, `{"a":"yo","b":123}`},
"param_struct": {
FieldStruct{
A: param.Field[string]{Present: true, Value: "hello"},
B: param.Field[int64]{Present: true, Value: int64(12)},
D: param.Field[time.Time]{Present: true, Value: time.Date(2023, time.March, 18, 14, 47, 38, 0, time.UTC)},
E: param.Field[time.Time]{Present: true, Value: time.Date(2023, time.March, 18, 14, 47, 38, 0, time.UTC)},
},
`{"a":"hello","b":12,"d":"2023-03-18","e":"2023-03-18T14:47:38Z"}`,
},
}
for name, test := range tests {
t.Run(name, func(t *testing.T) {
b, err := Marshal(test.value)
if err != nil {
t.Fatalf("didn't expect error %v", err)
}
if string(b) != test.expected {
t.Fatalf("expected %s, received %s", test.expected, string(b))
}
})
}
}

View File

@@ -0,0 +1,617 @@
package apijson
import (
"reflect"
"strings"
"testing"
"time"
"github.com/tidwall/gjson"
)
func P[T any](v T) *T { return &v }
type Primitives struct {
A bool `json:"a"`
B int `json:"b"`
C uint `json:"c"`
D float64 `json:"d"`
E float32 `json:"e"`
F []int `json:"f"`
}
type PrimitivePointers struct {
A *bool `json:"a"`
B *int `json:"b"`
C *uint `json:"c"`
D *float64 `json:"d"`
E *float32 `json:"e"`
F *[]int `json:"f"`
}
type Slices struct {
Slice []Primitives `json:"slices"`
}
type DateTime struct {
Date time.Time `json:"date" format:"date"`
DateTime time.Time `json:"date-time" format:"date-time"`
}
type AdditionalProperties struct {
A bool `json:"a"`
ExtraFields map[string]interface{} `json:"-,extras"`
}
type TypedAdditionalProperties struct {
A bool `json:"a"`
ExtraFields map[string]int `json:"-,extras"`
}
type EmbeddedStruct struct {
A bool `json:"a"`
B string `json:"b"`
JSON EmbeddedStructJSON
}
type EmbeddedStructJSON struct {
A Field
B Field
ExtraFields map[string]Field
raw string
}
type EmbeddedStructs struct {
EmbeddedStruct
A *int `json:"a"`
ExtraFields map[string]interface{} `json:"-,extras"`
JSON EmbeddedStructsJSON
}
type EmbeddedStructsJSON struct {
A Field
ExtraFields map[string]Field
raw string
}
type Recursive struct {
Name string `json:"name"`
Child *Recursive `json:"child"`
}
type JSONFieldStruct struct {
A bool `json:"a"`
B int64 `json:"b"`
C string `json:"c"`
D string `json:"d"`
ExtraFields map[string]int64 `json:"-,extras"`
JSON JSONFieldStructJSON `json:"-,metadata"`
}
type JSONFieldStructJSON struct {
A Field
B Field
C Field
D Field
ExtraFields map[string]Field
raw string
}
type UnknownStruct struct {
Unknown interface{} `json:"unknown"`
}
type UnionStruct struct {
Union Union `json:"union" format:"date"`
}
type Union interface {
union()
}
type Inline struct {
InlineField Primitives `json:"-,inline"`
JSON InlineJSON `json:"-,metadata"`
}
type InlineArray struct {
InlineField []string `json:"-,inline"`
JSON InlineJSON `json:"-,metadata"`
}
type InlineJSON struct {
InlineField Field
raw string
}
type UnionInteger int64
func (UnionInteger) union() {}
type UnionStructA struct {
Type string `json:"type"`
A string `json:"a"`
B string `json:"b"`
}
func (UnionStructA) union() {}
type UnionStructB struct {
Type string `json:"type"`
A string `json:"a"`
}
func (UnionStructB) union() {}
type UnionTime time.Time
func (UnionTime) union() {}
func init() {
RegisterUnion(reflect.TypeOf((*Union)(nil)).Elem(), "type",
UnionVariant{
TypeFilter: gjson.String,
Type: reflect.TypeOf(UnionTime{}),
},
UnionVariant{
TypeFilter: gjson.Number,
Type: reflect.TypeOf(UnionInteger(0)),
},
UnionVariant{
TypeFilter: gjson.JSON,
DiscriminatorValue: "typeA",
Type: reflect.TypeOf(UnionStructA{}),
},
UnionVariant{
TypeFilter: gjson.JSON,
DiscriminatorValue: "typeB",
Type: reflect.TypeOf(UnionStructB{}),
},
)
}
type ComplexUnionStruct struct {
Union ComplexUnion `json:"union"`
}
type ComplexUnion interface {
complexUnion()
}
type ComplexUnionA struct {
Boo string `json:"boo"`
Foo bool `json:"foo"`
}
func (ComplexUnionA) complexUnion() {}
type ComplexUnionB struct {
Boo bool `json:"boo"`
Foo string `json:"foo"`
}
func (ComplexUnionB) complexUnion() {}
type ComplexUnionC struct {
Boo int64 `json:"boo"`
}
func (ComplexUnionC) complexUnion() {}
type ComplexUnionTypeA struct {
Baz int64 `json:"baz"`
Type TypeA `json:"type"`
}
func (ComplexUnionTypeA) complexUnion() {}
type TypeA string
func (t TypeA) IsKnown() bool {
return t == "a"
}
type ComplexUnionTypeB struct {
Baz int64 `json:"baz"`
Type TypeB `json:"type"`
}
type TypeB string
func (t TypeB) IsKnown() bool {
return t == "b"
}
type UnmarshalStruct struct {
Foo string `json:"foo"`
prop bool `json:"-"`
}
func (r *UnmarshalStruct) UnmarshalJSON(json []byte) error {
r.prop = true
return UnmarshalRoot(json, r)
}
func (ComplexUnionTypeB) complexUnion() {}
func init() {
RegisterUnion(reflect.TypeOf((*ComplexUnion)(nil)).Elem(), "",
UnionVariant{
TypeFilter: gjson.JSON,
Type: reflect.TypeOf(ComplexUnionA{}),
},
UnionVariant{
TypeFilter: gjson.JSON,
Type: reflect.TypeOf(ComplexUnionB{}),
},
UnionVariant{
TypeFilter: gjson.JSON,
Type: reflect.TypeOf(ComplexUnionC{}),
},
UnionVariant{
TypeFilter: gjson.JSON,
Type: reflect.TypeOf(ComplexUnionTypeA{}),
},
UnionVariant{
TypeFilter: gjson.JSON,
Type: reflect.TypeOf(ComplexUnionTypeB{}),
},
)
}
type MarshallingUnionStruct struct {
Union MarshallingUnion
}
func (r *MarshallingUnionStruct) UnmarshalJSON(data []byte) (err error) {
*r = MarshallingUnionStruct{}
err = UnmarshalRoot(data, &r.Union)
return
}
func (r MarshallingUnionStruct) MarshalJSON() (data []byte, err error) {
return MarshalRoot(r.Union)
}
type MarshallingUnion interface {
marshallingUnion()
}
type MarshallingUnionA struct {
Boo string `json:"boo"`
}
func (MarshallingUnionA) marshallingUnion() {}
func (r *MarshallingUnionA) UnmarshalJSON(data []byte) (err error) {
return UnmarshalRoot(data, r)
}
type MarshallingUnionB struct {
Foo string `json:"foo"`
}
func (MarshallingUnionB) marshallingUnion() {}
func (r *MarshallingUnionB) UnmarshalJSON(data []byte) (err error) {
return UnmarshalRoot(data, r)
}
func init() {
RegisterUnion(
reflect.TypeOf((*MarshallingUnion)(nil)).Elem(),
"",
UnionVariant{
TypeFilter: gjson.JSON,
Type: reflect.TypeOf(MarshallingUnionA{}),
},
UnionVariant{
TypeFilter: gjson.JSON,
Type: reflect.TypeOf(MarshallingUnionB{}),
},
)
}
var tests = map[string]struct {
buf string
val interface{}
}{
"true": {"true", true},
"false": {"false", false},
"int": {"1", 1},
"int_bigger": {"12324", 12324},
"int_string_coerce": {`"65"`, 65},
"int_boolean_coerce": {"true", 1},
"int64": {"1", int64(1)},
"int64_huge": {"123456789123456789", int64(123456789123456789)},
"uint": {"1", uint(1)},
"uint_bigger": {"12324", uint(12324)},
"uint_coerce": {`"65"`, uint(65)},
"float_1.54": {"1.54", float32(1.54)},
"float_1.89": {"1.89", float64(1.89)},
"string": {`"str"`, "str"},
"string_int_coerce": {`12`, "12"},
"array_string": {`["foo","bar"]`, []string{"foo", "bar"}},
"array_int": {`[1,2]`, []int{1, 2}},
"array_int_coerce": {`["1",2]`, []int{1, 2}},
"ptr_true": {"true", P(true)},
"ptr_false": {"false", P(false)},
"ptr_int": {"1", P(1)},
"ptr_int_bigger": {"12324", P(12324)},
"ptr_int_string_coerce": {`"65"`, P(65)},
"ptr_int_boolean_coerce": {"true", P(1)},
"ptr_int64": {"1", P(int64(1))},
"ptr_int64_huge": {"123456789123456789", P(int64(123456789123456789))},
"ptr_uint": {"1", P(uint(1))},
"ptr_uint_bigger": {"12324", P(uint(12324))},
"ptr_uint_coerce": {`"65"`, P(uint(65))},
"ptr_float_1.54": {"1.54", P(float32(1.54))},
"ptr_float_1.89": {"1.89", P(float64(1.89))},
"date_time": {`"2007-03-01T13:00:00Z"`, time.Date(2007, time.March, 1, 13, 0, 0, 0, time.UTC)},
"date_time_nano_coerce": {`"2007-03-01T13:03:05.123456789Z"`, time.Date(2007, time.March, 1, 13, 3, 5, 123456789, time.UTC)},
"date_time_missing_t_coerce": {`"2007-03-01 13:03:05Z"`, time.Date(2007, time.March, 1, 13, 3, 5, 0, time.UTC)},
"date_time_missing_timezone_coerce": {`"2007-03-01T13:03:05"`, time.Date(2007, time.March, 1, 13, 3, 5, 0, time.UTC)},
// note: using -1200 to minimize probability of conflicting with the local timezone of the test runner
// see https://en.wikipedia.org/wiki/UTC%E2%88%9212:00
"date_time_missing_timezone_colon_coerce": {`"2007-03-01T13:03:05-1200"`, time.Date(2007, time.March, 1, 13, 3, 5, 0, time.FixedZone("", -12*60*60))},
"date_time_nano_missing_t_coerce": {`"2007-03-01 13:03:05.123456789Z"`, time.Date(2007, time.March, 1, 13, 3, 5, 123456789, time.UTC)},
"map_string": {`{"foo":"bar"}`, map[string]string{"foo": "bar"}},
"map_string_with_sjson_path_chars": {`{":a.b.c*:d*-1e.f":"bar"}`, map[string]string{":a.b.c*:d*-1e.f": "bar"}},
"map_interface": {`{"a":1,"b":"str","c":false}`, map[string]interface{}{"a": float64(1), "b": "str", "c": false}},
"primitive_struct": {
`{"a":false,"b":237628372683,"c":654,"d":9999.43,"e":43.76,"f":[1,2,3,4]}`,
Primitives{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}},
},
"slices": {
`{"slices":[{"a":false,"b":237628372683,"c":654,"d":9999.43,"e":43.76,"f":[1,2,3,4]}]}`,
Slices{
Slice: []Primitives{{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}}},
},
},
"primitive_pointer_struct": {
`{"a":false,"b":237628372683,"c":654,"d":9999.43,"e":43.76,"f":[1,2,3,4,5]}`,
PrimitivePointers{
A: P(false),
B: P(237628372683),
C: P(uint(654)),
D: P(9999.43),
E: P(float32(43.76)),
F: &[]int{1, 2, 3, 4, 5},
},
},
"datetime_struct": {
`{"date":"2006-01-02","date-time":"2006-01-02T15:04:05Z"}`,
DateTime{
Date: time.Date(2006, time.January, 2, 0, 0, 0, 0, time.UTC),
DateTime: time.Date(2006, time.January, 2, 15, 4, 5, 0, time.UTC),
},
},
"additional_properties": {
`{"a":true,"bar":"value","foo":true}`,
AdditionalProperties{
A: true,
ExtraFields: map[string]interface{}{
"bar": "value",
"foo": true,
},
},
},
"embedded_struct": {
`{"a":1,"b":"bar"}`,
EmbeddedStructs{
EmbeddedStruct: EmbeddedStruct{
A: true,
B: "bar",
JSON: EmbeddedStructJSON{
A: Field{raw: `1`, status: valid},
B: Field{raw: `"bar"`, status: valid},
raw: `{"a":1,"b":"bar"}`,
},
},
A: P(1),
ExtraFields: map[string]interface{}{"b": "bar"},
JSON: EmbeddedStructsJSON{
A: Field{raw: `1`, status: valid},
ExtraFields: map[string]Field{
"b": {raw: `"bar"`, status: valid},
},
raw: `{"a":1,"b":"bar"}`,
},
},
},
"recursive_struct": {
`{"child":{"name":"Alex"},"name":"Robert"}`,
Recursive{Name: "Robert", Child: &Recursive{Name: "Alex"}},
},
"metadata_coerce": {
`{"a":"12","b":"12","c":null,"extra_typed":12,"extra_untyped":{"foo":"bar"}}`,
JSONFieldStruct{
A: false,
B: 12,
C: "",
JSON: JSONFieldStructJSON{
raw: `{"a":"12","b":"12","c":null,"extra_typed":12,"extra_untyped":{"foo":"bar"}}`,
A: Field{raw: `"12"`, status: invalid},
B: Field{raw: `"12"`, status: valid},
C: Field{raw: "null", status: null},
D: Field{raw: "", status: missing},
ExtraFields: map[string]Field{
"extra_typed": {
raw: "12",
status: valid,
},
"extra_untyped": {
raw: `{"foo":"bar"}`,
status: invalid,
},
},
},
ExtraFields: map[string]int64{
"extra_typed": 12,
"extra_untyped": 0,
},
},
},
"unknown_struct_number": {
`{"unknown":12}`,
UnknownStruct{
Unknown: 12.,
},
},
"unknown_struct_map": {
`{"unknown":{"foo":"bar"}}`,
UnknownStruct{
Unknown: map[string]interface{}{
"foo": "bar",
},
},
},
"union_integer": {
`{"union":12}`,
UnionStruct{
Union: UnionInteger(12),
},
},
"union_struct_discriminated_a": {
`{"union":{"a":"foo","b":"bar","type":"typeA"}}`,
UnionStruct{
Union: UnionStructA{
Type: "typeA",
A: "foo",
B: "bar",
},
},
},
"union_struct_discriminated_b": {
`{"union":{"a":"foo","type":"typeB"}}`,
UnionStruct{
Union: UnionStructB{
Type: "typeB",
A: "foo",
},
},
},
"union_struct_time": {
`{"union":"2010-05-23"}`,
UnionStruct{
Union: UnionTime(time.Date(2010, 05, 23, 0, 0, 0, 0, time.UTC)),
},
},
"complex_union_a": {
`{"union":{"boo":"12","foo":true}}`,
ComplexUnionStruct{Union: ComplexUnionA{Boo: "12", Foo: true}},
},
"complex_union_b": {
`{"union":{"boo":true,"foo":"12"}}`,
ComplexUnionStruct{Union: ComplexUnionB{Boo: true, Foo: "12"}},
},
"complex_union_c": {
`{"union":{"boo":12}}`,
ComplexUnionStruct{Union: ComplexUnionC{Boo: 12}},
},
"complex_union_type_a": {
`{"union":{"baz":12,"type":"a"}}`,
ComplexUnionStruct{Union: ComplexUnionTypeA{Baz: 12, Type: TypeA("a")}},
},
"complex_union_type_b": {
`{"union":{"baz":12,"type":"b"}}`,
ComplexUnionStruct{Union: ComplexUnionTypeB{Baz: 12, Type: TypeB("b")}},
},
"marshalling_union_a": {
`{"boo":"hello"}`,
MarshallingUnionStruct{Union: MarshallingUnionA{Boo: "hello"}},
},
"marshalling_union_b": {
`{"foo":"hi"}`,
MarshallingUnionStruct{Union: MarshallingUnionB{Foo: "hi"}},
},
"unmarshal": {
`{"foo":"hello"}`,
&UnmarshalStruct{Foo: "hello", prop: true},
},
"array_of_unmarshal": {
`[{"foo":"hello"}]`,
[]UnmarshalStruct{{Foo: "hello", prop: true}},
},
"inline_coerce": {
`{"a":false,"b":237628372683,"c":654,"d":9999.43,"e":43.76,"f":[1,2,3,4]}`,
Inline{
InlineField: Primitives{A: false, B: 237628372683, C: 0x28e, D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}},
JSON: InlineJSON{
InlineField: Field{raw: "{\"a\":false,\"b\":237628372683,\"c\":654,\"d\":9999.43,\"e\":43.76,\"f\":[1,2,3,4]}", status: 3},
raw: "{\"a\":false,\"b\":237628372683,\"c\":654,\"d\":9999.43,\"e\":43.76,\"f\":[1,2,3,4]}",
},
},
},
"inline_array_coerce": {
`["Hello","foo","bar"]`,
InlineArray{
InlineField: []string{"Hello", "foo", "bar"},
JSON: InlineJSON{
InlineField: Field{raw: `["Hello","foo","bar"]`, status: 3},
raw: `["Hello","foo","bar"]`,
},
},
},
}
func TestDecode(t *testing.T) {
for name, test := range tests {
t.Run(name, func(t *testing.T) {
result := reflect.New(reflect.TypeOf(test.val))
if err := Unmarshal([]byte(test.buf), result.Interface()); err != nil {
t.Fatalf("deserialization of %v failed with error %v", result, err)
}
if !reflect.DeepEqual(result.Elem().Interface(), test.val) {
t.Fatalf("expected '%s' to deserialize to \n%#v\nbut got\n%#v", test.buf, test.val, result.Elem().Interface())
}
})
}
}
func TestEncode(t *testing.T) {
for name, test := range tests {
if strings.HasSuffix(name, "_coerce") {
continue
}
t.Run(name, func(t *testing.T) {
raw, err := Marshal(test.val)
if err != nil {
t.Fatalf("serialization of %v failed with error %v", test.val, err)
}
if string(raw) != test.buf {
t.Fatalf("expected %+#v to serialize to %s but got %s", test.val, test.buf, string(raw))
}
})
}
}

View File

@@ -0,0 +1,120 @@
package apijson
import (
"fmt"
"reflect"
)
// Port copies over values from one struct to another struct.
func Port(from any, to any) error {
toVal := reflect.ValueOf(to)
fromVal := reflect.ValueOf(from)
if toVal.Kind() != reflect.Ptr || toVal.IsNil() {
return fmt.Errorf("destination must be a non-nil pointer")
}
for toVal.Kind() == reflect.Ptr {
toVal = toVal.Elem()
}
toType := toVal.Type()
for fromVal.Kind() == reflect.Ptr {
fromVal = fromVal.Elem()
}
fromType := fromVal.Type()
if toType.Kind() != reflect.Struct {
return fmt.Errorf("destination must be a non-nil pointer to a struct (%v %v)", toType, toType.Kind())
}
values := map[string]reflect.Value{}
fields := map[string]reflect.Value{}
fromJSON := fromVal.FieldByName("JSON")
toJSON := toVal.FieldByName("JSON")
// Iterate through the fields of v and load all the "normal" fields in the struct to the map of
// string to reflect.Value, as well as their raw .JSON.Foo counterpart indicated by j.
var getFields func(t reflect.Type, v reflect.Value)
getFields = func(t reflect.Type, v reflect.Value) {
j := v.FieldByName("JSON")
// Recurse into anonymous fields first, since the fields on the object should win over the fields in the
// embedded object.
for i := 0; i < t.NumField(); i++ {
field := t.Field(i)
if field.Anonymous {
getFields(field.Type, v.Field(i))
continue
}
}
for i := 0; i < t.NumField(); i++ {
field := t.Field(i)
ptag, ok := parseJSONStructTag(field)
if !ok || ptag.name == "-" {
continue
}
values[ptag.name] = v.Field(i)
if j.IsValid() {
fields[ptag.name] = j.FieldByName(field.Name)
}
}
}
getFields(fromType, fromVal)
// Use the values from the previous step to populate the 'to' struct.
for i := 0; i < toType.NumField(); i++ {
field := toType.Field(i)
ptag, ok := parseJSONStructTag(field)
if !ok {
continue
}
if ptag.name == "-" {
continue
}
if value, ok := values[ptag.name]; ok {
delete(values, ptag.name)
if field.Type.Kind() == reflect.Interface {
toVal.Field(i).Set(value)
} else {
switch value.Kind() {
case reflect.String:
toVal.Field(i).SetString(value.String())
case reflect.Bool:
toVal.Field(i).SetBool(value.Bool())
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
toVal.Field(i).SetInt(value.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
toVal.Field(i).SetUint(value.Uint())
case reflect.Float32, reflect.Float64:
toVal.Field(i).SetFloat(value.Float())
default:
toVal.Field(i).Set(value)
}
}
}
if fromJSONField, ok := fields[ptag.name]; ok {
if toJSONField := toJSON.FieldByName(field.Name); toJSONField.IsValid() {
toJSONField.Set(fromJSONField)
}
}
}
// Finally, copy over the .JSON.raw and .JSON.ExtraFields
if toJSON.IsValid() {
if raw := toJSON.FieldByName("raw"); raw.IsValid() {
setUnexportedField(raw, fromJSON.Interface().(interface{ RawJSON() string }).RawJSON())
}
if toExtraFields := toJSON.FieldByName("ExtraFields"); toExtraFields.IsValid() {
if fromExtraFields := fromJSON.FieldByName("ExtraFields"); fromExtraFields.IsValid() {
setUnexportedField(toExtraFields, fromExtraFields.Interface())
}
}
}
return nil
}

View File

@@ -0,0 +1,257 @@
package apijson
import (
"reflect"
"testing"
)
type Metadata struct {
CreatedAt string `json:"created_at"`
}
// Card is the "combined" type of CardVisa and CardMastercard
type Card struct {
Processor CardProcessor `json:"processor"`
Data any `json:"data"`
IsFoo bool `json:"is_foo"`
IsBar bool `json:"is_bar"`
Metadata Metadata `json:"metadata"`
Value interface{} `json:"value"`
JSON cardJSON
}
type cardJSON struct {
Processor Field
Data Field
IsFoo Field
IsBar Field
Metadata Field
Value Field
ExtraFields map[string]Field
raw string
}
func (r cardJSON) RawJSON() string { return r.raw }
type CardProcessor string
// CardVisa
type CardVisa struct {
Processor CardVisaProcessor `json:"processor"`
Data CardVisaData `json:"data"`
IsFoo bool `json:"is_foo"`
Metadata Metadata `json:"metadata"`
Value string `json:"value"`
JSON cardVisaJSON
}
type cardVisaJSON struct {
Processor Field
Data Field
IsFoo Field
Metadata Field
Value Field
ExtraFields map[string]Field
raw string
}
func (r cardVisaJSON) RawJSON() string { return r.raw }
type CardVisaProcessor string
type CardVisaData struct {
Foo string `json:"foo"`
}
// CardMastercard
type CardMastercard struct {
Processor CardMastercardProcessor `json:"processor"`
Data CardMastercardData `json:"data"`
IsBar bool `json:"is_bar"`
Metadata Metadata `json:"metadata"`
Value bool `json:"value"`
JSON cardMastercardJSON
}
type cardMastercardJSON struct {
Processor Field
Data Field
IsBar Field
Metadata Field
Value Field
ExtraFields map[string]Field
raw string
}
func (r cardMastercardJSON) RawJSON() string { return r.raw }
type CardMastercardProcessor string
type CardMastercardData struct {
Bar int64 `json:"bar"`
}
type CommonFields struct {
Metadata Metadata `json:"metadata"`
Value string `json:"value"`
JSON commonFieldsJSON
}
type commonFieldsJSON struct {
Metadata Field
Value Field
ExtraFields map[string]Field
raw string
}
type CardEmbedded struct {
CommonFields
Processor CardVisaProcessor `json:"processor"`
Data CardVisaData `json:"data"`
IsFoo bool `json:"is_foo"`
JSON cardEmbeddedJSON
}
type cardEmbeddedJSON struct {
Processor Field
Data Field
IsFoo Field
ExtraFields map[string]Field
raw string
}
func (r cardEmbeddedJSON) RawJSON() string { return r.raw }
var portTests = map[string]struct {
from any
to any
}{
"visa to card": {
CardVisa{
Processor: "visa",
IsFoo: true,
Data: CardVisaData{
Foo: "foo",
},
Metadata: Metadata{
CreatedAt: "Mar 29 2024",
},
Value: "value",
JSON: cardVisaJSON{
raw: `{"processor":"visa","is_foo":true,"data":{"foo":"foo"}}`,
Processor: Field{raw: `"visa"`, status: valid},
IsFoo: Field{raw: `true`, status: valid},
Data: Field{raw: `{"foo":"foo"}`, status: valid},
Value: Field{raw: `"value"`, status: valid},
ExtraFields: map[string]Field{"extra": {raw: `"yo"`, status: valid}},
},
},
Card{
Processor: "visa",
IsFoo: true,
IsBar: false,
Data: CardVisaData{
Foo: "foo",
},
Metadata: Metadata{
CreatedAt: "Mar 29 2024",
},
Value: "value",
JSON: cardJSON{
raw: `{"processor":"visa","is_foo":true,"data":{"foo":"foo"}}`,
Processor: Field{raw: `"visa"`, status: valid},
IsFoo: Field{raw: `true`, status: valid},
Data: Field{raw: `{"foo":"foo"}`, status: valid},
Value: Field{raw: `"value"`, status: valid},
ExtraFields: map[string]Field{"extra": {raw: `"yo"`, status: valid}},
},
},
},
"mastercard to card": {
CardMastercard{
Processor: "mastercard",
IsBar: true,
Data: CardMastercardData{
Bar: 13,
},
Value: false,
},
Card{
Processor: "mastercard",
IsFoo: false,
IsBar: true,
Data: CardMastercardData{
Bar: 13,
},
Value: false,
},
},
"embedded to card": {
CardEmbedded{
CommonFields: CommonFields{
Metadata: Metadata{
CreatedAt: "Mar 29 2024",
},
Value: "embedded_value",
JSON: commonFieldsJSON{
Metadata: Field{raw: `{"created_at":"Mar 29 2024"}`, status: valid},
Value: Field{raw: `"embedded_value"`, status: valid},
raw: `should not matter`,
},
},
Processor: "visa",
IsFoo: true,
Data: CardVisaData{
Foo: "embedded_foo",
},
JSON: cardEmbeddedJSON{
raw: `{"processor":"visa","is_foo":true,"data":{"foo":"embedded_foo"},"metadata":{"created_at":"Mar 29 2024"},"value":"embedded_value"}`,
Processor: Field{raw: `"visa"`, status: valid},
IsFoo: Field{raw: `true`, status: valid},
Data: Field{raw: `{"foo":"embedded_foo"}`, status: valid},
},
},
Card{
Processor: "visa",
IsFoo: true,
IsBar: false,
Data: CardVisaData{
Foo: "embedded_foo",
},
Metadata: Metadata{
CreatedAt: "Mar 29 2024",
},
Value: "embedded_value",
JSON: cardJSON{
raw: `{"processor":"visa","is_foo":true,"data":{"foo":"embedded_foo"},"metadata":{"created_at":"Mar 29 2024"},"value":"embedded_value"}`,
Processor: Field{raw: `"visa"`, status: 0x3},
IsFoo: Field{raw: "true", status: 0x3},
Data: Field{raw: `{"foo":"embedded_foo"}`, status: 0x3},
Metadata: Field{raw: `{"created_at":"Mar 29 2024"}`, status: 0x3},
Value: Field{raw: `"embedded_value"`, status: 0x3},
},
},
},
}
func TestPort(t *testing.T) {
for name, test := range portTests {
t.Run(name, func(t *testing.T) {
toVal := reflect.New(reflect.TypeOf(test.to))
err := Port(test.from, toVal.Interface())
if err != nil {
t.Fatalf("port of %v failed with error %v", test.from, err)
}
if !reflect.DeepEqual(toVal.Elem().Interface(), test.to) {
t.Fatalf("expected:\n%+#v\n\nto port to:\n%+#v\n\nbut got:\n%+#v", test.from, test.to, toVal.Elem().Interface())
}
})
}
}

View File

@@ -0,0 +1,41 @@
package apijson
import (
"reflect"
"github.com/tidwall/gjson"
)
type UnionVariant struct {
TypeFilter gjson.Type
DiscriminatorValue interface{}
Type reflect.Type
}
var unionRegistry = map[reflect.Type]unionEntry{}
var unionVariants = map[reflect.Type]interface{}{}
type unionEntry struct {
discriminatorKey string
variants []UnionVariant
}
func RegisterUnion(typ reflect.Type, discriminator string, variants ...UnionVariant) {
unionRegistry[typ] = unionEntry{
discriminatorKey: discriminator,
variants: variants,
}
for _, variant := range variants {
unionVariants[variant.Type] = typ
}
}
// Useful to wrap a union type to force it to use [apijson.UnmarshalJSON] since you cannot define an
// UnmarshalJSON function on the interface itself.
type UnionUnmarshaler[T any] struct {
Value T
}
func (c *UnionUnmarshaler[T]) UnmarshalJSON(buf []byte) error {
return UnmarshalRoot(buf, &c.Value)
}

View File

@@ -0,0 +1,47 @@
package apijson
import (
"reflect"
"strings"
)
const jsonStructTag = "json"
const formatStructTag = "format"
type parsedStructTag struct {
name string
required bool
extras bool
metadata bool
inline bool
}
func parseJSONStructTag(field reflect.StructField) (tag parsedStructTag, ok bool) {
raw, ok := field.Tag.Lookup(jsonStructTag)
if !ok {
return
}
parts := strings.Split(raw, ",")
if len(parts) == 0 {
return tag, false
}
tag.name = parts[0]
for _, part := range parts[1:] {
switch part {
case "required":
tag.required = true
case "extras":
tag.extras = true
case "metadata":
tag.metadata = true
case "inline":
tag.inline = true
}
}
return
}
func parseFormatStructTag(field reflect.StructField) (format string, ok bool) {
format, ok = field.Tag.Lookup(formatStructTag)
return
}

View File

@@ -0,0 +1,341 @@
package apiquery
import (
"encoding/json"
"fmt"
"reflect"
"strconv"
"strings"
"sync"
"time"
"github.com/sst/opencode-sdk-go/internal/param"
)
var encoders sync.Map // map[reflect.Type]encoderFunc
type encoder struct {
dateFormat string
root bool
settings QuerySettings
}
type encoderFunc func(key string, value reflect.Value) []Pair
type encoderField struct {
tag parsedStructTag
fn encoderFunc
idx []int
}
type encoderEntry struct {
reflect.Type
dateFormat string
root bool
settings QuerySettings
}
type Pair struct {
key string
value string
}
func (e *encoder) typeEncoder(t reflect.Type) encoderFunc {
entry := encoderEntry{
Type: t,
dateFormat: e.dateFormat,
root: e.root,
settings: e.settings,
}
if fi, ok := encoders.Load(entry); ok {
return fi.(encoderFunc)
}
// To deal with recursive types, populate the map with an
// indirect func before we build it. This type waits on the
// real func (f) to be ready and then calls it. This indirect
// func is only used for recursive types.
var (
wg sync.WaitGroup
f encoderFunc
)
wg.Add(1)
fi, loaded := encoders.LoadOrStore(entry, encoderFunc(func(key string, v reflect.Value) []Pair {
wg.Wait()
return f(key, v)
}))
if loaded {
return fi.(encoderFunc)
}
// Compute the real encoder and replace the indirect func with it.
f = e.newTypeEncoder(t)
wg.Done()
encoders.Store(entry, f)
return f
}
func marshalerEncoder(key string, value reflect.Value) []Pair {
s, _ := value.Interface().(json.Marshaler).MarshalJSON()
return []Pair{{key, string(s)}}
}
func (e *encoder) newTypeEncoder(t reflect.Type) encoderFunc {
if t.ConvertibleTo(reflect.TypeOf(time.Time{})) {
return e.newTimeTypeEncoder(t)
}
if !e.root && t.Implements(reflect.TypeOf((*json.Marshaler)(nil)).Elem()) {
return marshalerEncoder
}
e.root = false
switch t.Kind() {
case reflect.Pointer:
encoder := e.typeEncoder(t.Elem())
return func(key string, value reflect.Value) (pairs []Pair) {
if !value.IsValid() || value.IsNil() {
return
}
pairs = encoder(key, value.Elem())
return
}
case reflect.Struct:
return e.newStructTypeEncoder(t)
case reflect.Array:
fallthrough
case reflect.Slice:
return e.newArrayTypeEncoder(t)
case reflect.Map:
return e.newMapEncoder(t)
case reflect.Interface:
return e.newInterfaceEncoder()
default:
return e.newPrimitiveTypeEncoder(t)
}
}
func (e *encoder) newStructTypeEncoder(t reflect.Type) encoderFunc {
if t.Implements(reflect.TypeOf((*param.FieldLike)(nil)).Elem()) {
return e.newFieldTypeEncoder(t)
}
encoderFields := []encoderField{}
// This helper allows us to recursively collect field encoders into a flat
// array. The parameter `index` keeps track of the access patterns necessary
// to get to some field.
var collectEncoderFields func(r reflect.Type, index []int)
collectEncoderFields = func(r reflect.Type, index []int) {
for i := 0; i < r.NumField(); i++ {
idx := append(index, i)
field := t.FieldByIndex(idx)
if !field.IsExported() {
continue
}
// If this is an embedded struct, traverse one level deeper to extract
// the field and get their encoders as well.
if field.Anonymous {
collectEncoderFields(field.Type, idx)
continue
}
// If query tag is not present, then we skip, which is intentionally
// different behavior from the stdlib.
ptag, ok := parseQueryStructTag(field)
if !ok {
continue
}
if ptag.name == "-" && !ptag.inline {
continue
}
dateFormat, ok := parseFormatStructTag(field)
oldFormat := e.dateFormat
if ok {
switch dateFormat {
case "date-time":
e.dateFormat = time.RFC3339
case "date":
e.dateFormat = "2006-01-02"
}
}
encoderFields = append(encoderFields, encoderField{ptag, e.typeEncoder(field.Type), idx})
e.dateFormat = oldFormat
}
}
collectEncoderFields(t, []int{})
return func(key string, value reflect.Value) (pairs []Pair) {
for _, ef := range encoderFields {
var subkey string = e.renderKeyPath(key, ef.tag.name)
if ef.tag.inline {
subkey = key
}
field := value.FieldByIndex(ef.idx)
pairs = append(pairs, ef.fn(subkey, field)...)
}
return
}
}
func (e *encoder) newMapEncoder(t reflect.Type) encoderFunc {
keyEncoder := e.typeEncoder(t.Key())
elementEncoder := e.typeEncoder(t.Elem())
return func(key string, value reflect.Value) (pairs []Pair) {
iter := value.MapRange()
for iter.Next() {
encodedKey := keyEncoder("", iter.Key())
if len(encodedKey) != 1 {
panic("Unexpected number of parts for encoded map key. Are you using a non-primitive for this map?")
}
subkey := encodedKey[0].value
keyPath := e.renderKeyPath(key, subkey)
pairs = append(pairs, elementEncoder(keyPath, iter.Value())...)
}
return
}
}
func (e *encoder) renderKeyPath(key string, subkey string) string {
if len(key) == 0 {
return subkey
}
if e.settings.NestedFormat == NestedQueryFormatDots {
return fmt.Sprintf("%s.%s", key, subkey)
}
return fmt.Sprintf("%s[%s]", key, subkey)
}
func (e *encoder) newArrayTypeEncoder(t reflect.Type) encoderFunc {
switch e.settings.ArrayFormat {
case ArrayQueryFormatComma:
innerEncoder := e.typeEncoder(t.Elem())
return func(key string, v reflect.Value) []Pair {
elements := []string{}
for i := 0; i < v.Len(); i++ {
for _, pair := range innerEncoder("", v.Index(i)) {
elements = append(elements, pair.value)
}
}
if len(elements) == 0 {
return []Pair{}
}
return []Pair{{key, strings.Join(elements, ",")}}
}
case ArrayQueryFormatRepeat:
innerEncoder := e.typeEncoder(t.Elem())
return func(key string, value reflect.Value) (pairs []Pair) {
for i := 0; i < value.Len(); i++ {
pairs = append(pairs, innerEncoder(key, value.Index(i))...)
}
return pairs
}
case ArrayQueryFormatIndices:
panic("The array indices format is not supported yet")
case ArrayQueryFormatBrackets:
innerEncoder := e.typeEncoder(t.Elem())
return func(key string, value reflect.Value) []Pair {
pairs := []Pair{}
for i := 0; i < value.Len(); i++ {
pairs = append(pairs, innerEncoder(key+"[]", value.Index(i))...)
}
return pairs
}
default:
panic(fmt.Sprintf("Unknown ArrayFormat value: %d", e.settings.ArrayFormat))
}
}
func (e *encoder) newPrimitiveTypeEncoder(t reflect.Type) encoderFunc {
switch t.Kind() {
case reflect.Pointer:
inner := t.Elem()
innerEncoder := e.newPrimitiveTypeEncoder(inner)
return func(key string, v reflect.Value) []Pair {
if !v.IsValid() || v.IsNil() {
return nil
}
return innerEncoder(key, v.Elem())
}
case reflect.String:
return func(key string, v reflect.Value) []Pair {
return []Pair{{key, v.String()}}
}
case reflect.Bool:
return func(key string, v reflect.Value) []Pair {
if v.Bool() {
return []Pair{{key, "true"}}
}
return []Pair{{key, "false"}}
}
case reflect.Int, reflect.Int16, reflect.Int32, reflect.Int64:
return func(key string, v reflect.Value) []Pair {
return []Pair{{key, strconv.FormatInt(v.Int(), 10)}}
}
case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return func(key string, v reflect.Value) []Pair {
return []Pair{{key, strconv.FormatUint(v.Uint(), 10)}}
}
case reflect.Float32, reflect.Float64:
return func(key string, v reflect.Value) []Pair {
return []Pair{{key, strconv.FormatFloat(v.Float(), 'f', -1, 64)}}
}
case reflect.Complex64, reflect.Complex128:
bitSize := 64
if t.Kind() == reflect.Complex128 {
bitSize = 128
}
return func(key string, v reflect.Value) []Pair {
return []Pair{{key, strconv.FormatComplex(v.Complex(), 'f', -1, bitSize)}}
}
default:
return func(key string, v reflect.Value) []Pair {
return nil
}
}
}
func (e *encoder) newFieldTypeEncoder(t reflect.Type) encoderFunc {
f, _ := t.FieldByName("Value")
enc := e.typeEncoder(f.Type)
return func(key string, value reflect.Value) []Pair {
present := value.FieldByName("Present")
if !present.Bool() {
return nil
}
null := value.FieldByName("Null")
if null.Bool() {
// TODO: Error?
return nil
}
raw := value.FieldByName("Raw")
if !raw.IsNil() {
return e.typeEncoder(raw.Type())(key, raw)
}
return enc(key, value.FieldByName("Value"))
}
}
func (e *encoder) newTimeTypeEncoder(t reflect.Type) encoderFunc {
format := e.dateFormat
return func(key string, value reflect.Value) []Pair {
return []Pair{{
key,
value.Convert(reflect.TypeOf(time.Time{})).Interface().(time.Time).Format(format),
}}
}
}
func (e encoder) newInterfaceEncoder() encoderFunc {
return func(key string, value reflect.Value) []Pair {
value = value.Elem()
if !value.IsValid() {
return nil
}
return e.typeEncoder(value.Type())(key, value)
}
}

View File

@@ -0,0 +1,50 @@
package apiquery
import (
"net/url"
"reflect"
"time"
)
func MarshalWithSettings(value interface{}, settings QuerySettings) url.Values {
e := encoder{time.RFC3339, true, settings}
kv := url.Values{}
val := reflect.ValueOf(value)
if !val.IsValid() {
return nil
}
typ := val.Type()
for _, pair := range e.typeEncoder(typ)("", val) {
kv.Add(pair.key, pair.value)
}
return kv
}
func Marshal(value interface{}) url.Values {
return MarshalWithSettings(value, QuerySettings{})
}
type Queryer interface {
URLQuery() url.Values
}
type QuerySettings struct {
NestedFormat NestedQueryFormat
ArrayFormat ArrayQueryFormat
}
type NestedQueryFormat int
const (
NestedQueryFormatBrackets NestedQueryFormat = iota
NestedQueryFormatDots
)
type ArrayQueryFormat int
const (
ArrayQueryFormatComma ArrayQueryFormat = iota
ArrayQueryFormatRepeat
ArrayQueryFormatIndices
ArrayQueryFormatBrackets
)

View File

@@ -0,0 +1,335 @@
package apiquery
import (
"net/url"
"testing"
"time"
)
func P[T any](v T) *T { return &v }
type Primitives struct {
A bool `query:"a"`
B int `query:"b"`
C uint `query:"c"`
D float64 `query:"d"`
E float32 `query:"e"`
F []int `query:"f"`
}
type PrimitivePointers struct {
A *bool `query:"a"`
B *int `query:"b"`
C *uint `query:"c"`
D *float64 `query:"d"`
E *float32 `query:"e"`
F *[]int `query:"f"`
}
type Slices struct {
Slice []Primitives `query:"slices"`
Mixed []interface{} `query:"mixed"`
}
type DateTime struct {
Date time.Time `query:"date" format:"date"`
DateTime time.Time `query:"date-time" format:"date-time"`
}
type AdditionalProperties struct {
A bool `query:"a"`
Extras map[string]interface{} `query:"-,inline"`
}
type Recursive struct {
Name string `query:"name"`
Child *Recursive `query:"child"`
}
type UnknownStruct struct {
Unknown interface{} `query:"unknown"`
}
type UnionStruct struct {
Union Union `query:"union" format:"date"`
}
type Union interface {
union()
}
type UnionInteger int64
func (UnionInteger) union() {}
type UnionString string
func (UnionString) union() {}
type UnionStructA struct {
Type string `query:"type"`
A string `query:"a"`
B string `query:"b"`
}
func (UnionStructA) union() {}
type UnionStructB struct {
Type string `query:"type"`
A string `query:"a"`
}
func (UnionStructB) union() {}
type UnionTime time.Time
func (UnionTime) union() {}
type DeeplyNested struct {
A DeeplyNested1 `query:"a"`
}
type DeeplyNested1 struct {
B DeeplyNested2 `query:"b"`
}
type DeeplyNested2 struct {
C DeeplyNested3 `query:"c"`
}
type DeeplyNested3 struct {
D *string `query:"d"`
}
var tests = map[string]struct {
enc string
val interface{}
settings QuerySettings
}{
"primitives": {
"a=false&b=237628372683&c=654&d=9999.43&e=43.7599983215332&f=1,2,3,4",
Primitives{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}},
QuerySettings{},
},
"slices_brackets": {
`mixed[]=1&mixed[]=2.3&mixed[]=hello&slices[][a]=false&slices[][a]=false&slices[][b]=237628372683&slices[][b]=237628372683&slices[][c]=654&slices[][c]=654&slices[][d]=9999.43&slices[][d]=9999.43&slices[][e]=43.7599983215332&slices[][e]=43.7599983215332&slices[][f][]=1&slices[][f][]=2&slices[][f][]=3&slices[][f][]=4&slices[][f][]=1&slices[][f][]=2&slices[][f][]=3&slices[][f][]=4`,
Slices{
Slice: []Primitives{
{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}},
{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}},
},
Mixed: []interface{}{1, 2.3, "hello"},
},
QuerySettings{ArrayFormat: ArrayQueryFormatBrackets},
},
"slices_comma": {
`mixed=1,2.3,hello`,
Slices{
Mixed: []interface{}{1, 2.3, "hello"},
},
QuerySettings{ArrayFormat: ArrayQueryFormatComma},
},
"slices_repeat": {
`mixed=1&mixed=2.3&mixed=hello&slices[a]=false&slices[a]=false&slices[b]=237628372683&slices[b]=237628372683&slices[c]=654&slices[c]=654&slices[d]=9999.43&slices[d]=9999.43&slices[e]=43.7599983215332&slices[e]=43.7599983215332&slices[f]=1&slices[f]=2&slices[f]=3&slices[f]=4&slices[f]=1&slices[f]=2&slices[f]=3&slices[f]=4`,
Slices{
Slice: []Primitives{
{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}},
{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}},
},
Mixed: []interface{}{1, 2.3, "hello"},
},
QuerySettings{ArrayFormat: ArrayQueryFormatRepeat},
},
"primitive_pointer_struct": {
"a=false&b=237628372683&c=654&d=9999.43&e=43.7599983215332&f=1,2,3,4,5",
PrimitivePointers{
A: P(false),
B: P(237628372683),
C: P(uint(654)),
D: P(9999.43),
E: P(float32(43.76)),
F: &[]int{1, 2, 3, 4, 5},
},
QuerySettings{},
},
"datetime_struct": {
`date=2006-01-02&date-time=2006-01-02T15:04:05Z`,
DateTime{
Date: time.Date(2006, time.January, 2, 0, 0, 0, 0, time.UTC),
DateTime: time.Date(2006, time.January, 2, 15, 4, 5, 0, time.UTC),
},
QuerySettings{},
},
"additional_properties": {
`a=true&bar=value&foo=true`,
AdditionalProperties{
A: true,
Extras: map[string]interface{}{
"bar": "value",
"foo": true,
},
},
QuerySettings{},
},
"recursive_struct_brackets": {
`child[name]=Alex&name=Robert`,
Recursive{Name: "Robert", Child: &Recursive{Name: "Alex"}},
QuerySettings{NestedFormat: NestedQueryFormatBrackets},
},
"recursive_struct_dots": {
`child.name=Alex&name=Robert`,
Recursive{Name: "Robert", Child: &Recursive{Name: "Alex"}},
QuerySettings{NestedFormat: NestedQueryFormatDots},
},
"unknown_struct_number": {
`unknown=12`,
UnknownStruct{
Unknown: 12.,
},
QuerySettings{},
},
"unknown_struct_map_brackets": {
`unknown[foo]=bar`,
UnknownStruct{
Unknown: map[string]interface{}{
"foo": "bar",
},
},
QuerySettings{NestedFormat: NestedQueryFormatBrackets},
},
"unknown_struct_map_dots": {
`unknown.foo=bar`,
UnknownStruct{
Unknown: map[string]interface{}{
"foo": "bar",
},
},
QuerySettings{NestedFormat: NestedQueryFormatDots},
},
"union_string": {
`union=hello`,
UnionStruct{
Union: UnionString("hello"),
},
QuerySettings{},
},
"union_integer": {
`union=12`,
UnionStruct{
Union: UnionInteger(12),
},
QuerySettings{},
},
"union_struct_discriminated_a": {
`union[a]=foo&union[b]=bar&union[type]=typeA`,
UnionStruct{
Union: UnionStructA{
Type: "typeA",
A: "foo",
B: "bar",
},
},
QuerySettings{},
},
"union_struct_discriminated_b": {
`union[a]=foo&union[type]=typeB`,
UnionStruct{
Union: UnionStructB{
Type: "typeB",
A: "foo",
},
},
QuerySettings{},
},
"union_struct_time": {
`union=2010-05-23`,
UnionStruct{
Union: UnionTime(time.Date(2010, 05, 23, 0, 0, 0, 0, time.UTC)),
},
QuerySettings{},
},
"deeply_nested_brackets": {
`a[b][c][d]=hello`,
DeeplyNested{
A: DeeplyNested1{
B: DeeplyNested2{
C: DeeplyNested3{
D: P("hello"),
},
},
},
},
QuerySettings{NestedFormat: NestedQueryFormatBrackets},
},
"deeply_nested_dots": {
`a.b.c.d=hello`,
DeeplyNested{
A: DeeplyNested1{
B: DeeplyNested2{
C: DeeplyNested3{
D: P("hello"),
},
},
},
},
QuerySettings{NestedFormat: NestedQueryFormatDots},
},
"deeply_nested_brackets_empty": {
``,
DeeplyNested{
A: DeeplyNested1{
B: DeeplyNested2{
C: DeeplyNested3{
D: nil,
},
},
},
},
QuerySettings{NestedFormat: NestedQueryFormatBrackets},
},
"deeply_nested_dots_empty": {
``,
DeeplyNested{
A: DeeplyNested1{
B: DeeplyNested2{
C: DeeplyNested3{
D: nil,
},
},
},
},
QuerySettings{NestedFormat: NestedQueryFormatDots},
},
}
func TestEncode(t *testing.T) {
for name, test := range tests {
t.Run(name, func(t *testing.T) {
values := MarshalWithSettings(test.val, test.settings)
str, _ := url.QueryUnescape(values.Encode())
if str != test.enc {
t.Fatalf("expected %+#v to serialize to %s but got %s", test.val, test.enc, str)
}
})
}
}

View File

@@ -0,0 +1,41 @@
package apiquery
import (
"reflect"
"strings"
)
const queryStructTag = "query"
const formatStructTag = "format"
type parsedStructTag struct {
name string
omitempty bool
inline bool
}
func parseQueryStructTag(field reflect.StructField) (tag parsedStructTag, ok bool) {
raw, ok := field.Tag.Lookup(queryStructTag)
if !ok {
return
}
parts := strings.Split(raw, ",")
if len(parts) == 0 {
return tag, false
}
tag.name = parts[0]
for _, part := range parts[1:] {
switch part {
case "omitempty":
tag.omitempty = true
case "inline":
tag.inline = true
}
}
return
}
func parseFormatStructTag(field reflect.StructField) (format string, ok bool) {
format, ok = field.Tag.Lookup(formatStructTag)
return
}

View File

@@ -0,0 +1,29 @@
package param
import (
"fmt"
)
type FieldLike interface{ field() }
// Field is a wrapper used for all values sent to the API,
// to distinguish zero values from null or omitted fields.
//
// It also allows sending arbitrary deserializable values.
//
// To instantiate a Field, use the helpers exported from
// the package root: `F()`, `Null()`, `Raw()`, etc.
type Field[T any] struct {
FieldLike
Value T
Null bool
Present bool
Raw any
}
func (f Field[T]) String() string {
if s, ok := any(f.Value).(fmt.Stringer); ok {
return s.String()
}
return fmt.Sprintf("%v", f.Value)
}

View File

@@ -0,0 +1,629 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
package requestconfig
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"math"
"math/rand"
"mime"
"net/http"
"net/url"
"runtime"
"strconv"
"strings"
"time"
"github.com/sst/opencode-sdk-go/internal"
"github.com/sst/opencode-sdk-go/internal/apierror"
"github.com/sst/opencode-sdk-go/internal/apiform"
"github.com/sst/opencode-sdk-go/internal/apiquery"
"github.com/sst/opencode-sdk-go/internal/param"
)
func getDefaultHeaders() map[string]string {
return map[string]string{
"User-Agent": fmt.Sprintf("Opencode/Go %s", internal.PackageVersion),
}
}
func getNormalizedOS() string {
switch runtime.GOOS {
case "ios":
return "iOS"
case "android":
return "Android"
case "darwin":
return "MacOS"
case "window":
return "Windows"
case "freebsd":
return "FreeBSD"
case "openbsd":
return "OpenBSD"
case "linux":
return "Linux"
default:
return fmt.Sprintf("Other:%s", runtime.GOOS)
}
}
func getNormalizedArchitecture() string {
switch runtime.GOARCH {
case "386":
return "x32"
case "amd64":
return "x64"
case "arm":
return "arm"
case "arm64":
return "arm64"
default:
return fmt.Sprintf("other:%s", runtime.GOARCH)
}
}
func getPlatformProperties() map[string]string {
return map[string]string{
"X-Stainless-Lang": "go",
"X-Stainless-Package-Version": internal.PackageVersion,
"X-Stainless-OS": getNormalizedOS(),
"X-Stainless-Arch": getNormalizedArchitecture(),
"X-Stainless-Runtime": "go",
"X-Stainless-Runtime-Version": runtime.Version(),
}
}
type RequestOption interface {
Apply(*RequestConfig) error
}
type RequestOptionFunc func(*RequestConfig) error
type PreRequestOptionFunc func(*RequestConfig) error
func (s RequestOptionFunc) Apply(r *RequestConfig) error { return s(r) }
func (s PreRequestOptionFunc) Apply(r *RequestConfig) error { return s(r) }
func NewRequestConfig(ctx context.Context, method string, u string, body interface{}, dst interface{}, opts ...RequestOption) (*RequestConfig, error) {
var reader io.Reader
contentType := "application/json"
hasSerializationFunc := false
if body, ok := body.(json.Marshaler); ok {
content, err := body.MarshalJSON()
if err != nil {
return nil, err
}
reader = bytes.NewBuffer(content)
hasSerializationFunc = true
}
if body, ok := body.(apiform.Marshaler); ok {
var (
content []byte
err error
)
content, contentType, err = body.MarshalMultipart()
if err != nil {
return nil, err
}
reader = bytes.NewBuffer(content)
hasSerializationFunc = true
}
if body, ok := body.(apiquery.Queryer); ok {
hasSerializationFunc = true
params := body.URLQuery().Encode()
if params != "" {
u = u + "?" + params
}
}
if body, ok := body.([]byte); ok {
reader = bytes.NewBuffer(body)
hasSerializationFunc = true
}
if body, ok := body.(io.Reader); ok {
reader = body
hasSerializationFunc = true
}
// Fallback to json serialization if none of the serialization functions that we expect
// to see is present.
if body != nil && !hasSerializationFunc {
content, err := json.Marshal(body)
if err != nil {
return nil, err
}
reader = bytes.NewBuffer(content)
}
req, err := http.NewRequestWithContext(ctx, method, u, nil)
if err != nil {
return nil, err
}
if reader != nil {
req.Header.Set("Content-Type", contentType)
}
req.Header.Set("Accept", "application/json")
req.Header.Set("X-Stainless-Retry-Count", "0")
req.Header.Set("X-Stainless-Timeout", "0")
for k, v := range getDefaultHeaders() {
req.Header.Add(k, v)
}
for k, v := range getPlatformProperties() {
req.Header.Add(k, v)
}
cfg := RequestConfig{
MaxRetries: 2,
Context: ctx,
Request: req,
HTTPClient: http.DefaultClient,
Body: reader,
}
cfg.ResponseBodyInto = dst
err = cfg.Apply(opts...)
if err != nil {
return nil, err
}
// This must run after `cfg.Apply(...)` above in case the request timeout gets modified. We also only
// apply our own logic for it if it's still "0" from above. If it's not, then it was deleted or modified
// by the user and we should respect that.
if req.Header.Get("X-Stainless-Timeout") == "0" {
if cfg.RequestTimeout == time.Duration(0) {
req.Header.Del("X-Stainless-Timeout")
} else {
req.Header.Set("X-Stainless-Timeout", strconv.Itoa(int(cfg.RequestTimeout.Seconds())))
}
}
return &cfg, nil
}
func UseDefaultParam[T any](dst *param.Field[T], src *T) {
if !dst.Present && src != nil {
dst.Value = *src
dst.Present = true
}
}
// This interface is primarily used to describe an [*http.Client], but also
// supports custom HTTP implementations.
type HTTPDoer interface {
Do(req *http.Request) (*http.Response, error)
}
// RequestConfig represents all the state related to one request.
//
// Editing the variables inside RequestConfig directly is unstable api. Prefer
// composing the RequestOption instead if possible.
type RequestConfig struct {
MaxRetries int
RequestTimeout time.Duration
Context context.Context
Request *http.Request
BaseURL *url.URL
// DefaultBaseURL will be used if BaseURL is not explicitly overridden using
// WithBaseURL.
DefaultBaseURL *url.URL
CustomHTTPDoer HTTPDoer
HTTPClient *http.Client
Middlewares []middleware
// If ResponseBodyInto not nil, then we will attempt to deserialize into
// ResponseBodyInto. If Destination is a []byte, then it will return the body as
// is.
ResponseBodyInto interface{}
// ResponseInto copies the \*http.Response of the corresponding request into the
// given address
ResponseInto **http.Response
Body io.Reader
}
// middleware is exactly the same type as the Middleware type found in the [option] package,
// but it is redeclared here for circular dependency issues.
type middleware = func(*http.Request, middlewareNext) (*http.Response, error)
// middlewareNext is exactly the same type as the MiddlewareNext type found in the [option] package,
// but it is redeclared here for circular dependency issues.
type middlewareNext = func(*http.Request) (*http.Response, error)
func applyMiddleware(middleware middleware, next middlewareNext) middlewareNext {
return func(req *http.Request) (res *http.Response, err error) {
return middleware(req, next)
}
}
func shouldRetry(req *http.Request, res *http.Response) bool {
// If there is no way to recover the Body, then we shouldn't retry.
if req.Body != nil && req.GetBody == nil {
return false
}
// If there is no response, that indicates that there is a connection error
// so we retry the request.
if res == nil {
return true
}
// If the header explicitly wants a retry behavior, respect that over the
// http status code.
if res.Header.Get("x-should-retry") == "true" {
return true
}
if res.Header.Get("x-should-retry") == "false" {
return false
}
return res.StatusCode == http.StatusRequestTimeout ||
res.StatusCode == http.StatusConflict ||
res.StatusCode == http.StatusTooManyRequests ||
res.StatusCode >= http.StatusInternalServerError
}
func parseRetryAfterHeader(resp *http.Response) (time.Duration, bool) {
if resp == nil {
return 0, false
}
type retryData struct {
header string
units time.Duration
// custom is used when the regular algorithm failed and is optional.
// the returned duration is used verbatim (units is not applied).
custom func(string) (time.Duration, bool)
}
nop := func(string) (time.Duration, bool) { return 0, false }
// the headers are listed in order of preference
retries := []retryData{
{
header: "Retry-After-Ms",
units: time.Millisecond,
custom: nop,
},
{
header: "Retry-After",
units: time.Second,
// retry-after values are expressed in either number of
// seconds or an HTTP-date indicating when to try again
custom: func(ra string) (time.Duration, bool) {
t, err := time.Parse(time.RFC1123, ra)
if err != nil {
return 0, false
}
return time.Until(t), true
},
},
}
for _, retry := range retries {
v := resp.Header.Get(retry.header)
if v == "" {
continue
}
if retryAfter, err := strconv.ParseFloat(v, 64); err == nil {
return time.Duration(retryAfter * float64(retry.units)), true
}
if d, ok := retry.custom(v); ok {
return d, true
}
}
return 0, false
}
// isBeforeContextDeadline reports whether the non-zero Time t is
// before ctx's deadline. If ctx does not have a deadline, it
// always reports true (the deadline is considered infinite).
func isBeforeContextDeadline(t time.Time, ctx context.Context) bool {
d, ok := ctx.Deadline()
if !ok {
return true
}
return t.Before(d)
}
// bodyWithTimeout is an io.ReadCloser which can observe a context's cancel func
// to handle timeouts etc. It wraps an existing io.ReadCloser.
type bodyWithTimeout struct {
stop func() // stops the time.Timer waiting to cancel the request
rc io.ReadCloser
}
func (b *bodyWithTimeout) Read(p []byte) (n int, err error) {
n, err = b.rc.Read(p)
if err == nil {
return n, nil
}
if err == io.EOF {
return n, err
}
return n, err
}
func (b *bodyWithTimeout) Close() error {
err := b.rc.Close()
b.stop()
return err
}
func retryDelay(res *http.Response, retryCount int) time.Duration {
// If the API asks us to wait a certain amount of time (and it's a reasonable amount),
// just do what it says.
if retryAfterDelay, ok := parseRetryAfterHeader(res); ok && 0 <= retryAfterDelay && retryAfterDelay < time.Minute {
return retryAfterDelay
}
maxDelay := 8 * time.Second
delay := time.Duration(0.5 * float64(time.Second) * math.Pow(2, float64(retryCount)))
if delay > maxDelay {
delay = maxDelay
}
jitter := rand.Int63n(int64(delay / 4))
delay -= time.Duration(jitter)
return delay
}
func (cfg *RequestConfig) Execute() (err error) {
if cfg.BaseURL == nil {
if cfg.DefaultBaseURL != nil {
cfg.BaseURL = cfg.DefaultBaseURL
} else {
return fmt.Errorf("requestconfig: base url is not set")
}
}
cfg.Request.URL, err = cfg.BaseURL.Parse(strings.TrimLeft(cfg.Request.URL.String(), "/"))
if err != nil {
return err
}
if cfg.Body != nil && cfg.Request.Body == nil {
switch body := cfg.Body.(type) {
case *bytes.Buffer:
b := body.Bytes()
cfg.Request.ContentLength = int64(body.Len())
cfg.Request.GetBody = func() (io.ReadCloser, error) { return io.NopCloser(bytes.NewReader(b)), nil }
cfg.Request.Body, _ = cfg.Request.GetBody()
case *bytes.Reader:
cfg.Request.ContentLength = int64(body.Len())
cfg.Request.GetBody = func() (io.ReadCloser, error) {
_, err := body.Seek(0, 0)
return io.NopCloser(body), err
}
cfg.Request.Body, _ = cfg.Request.GetBody()
default:
if rc, ok := body.(io.ReadCloser); ok {
cfg.Request.Body = rc
} else {
cfg.Request.Body = io.NopCloser(body)
}
}
}
handler := cfg.HTTPClient.Do
if cfg.CustomHTTPDoer != nil {
handler = cfg.CustomHTTPDoer.Do
}
for i := len(cfg.Middlewares) - 1; i >= 0; i -= 1 {
handler = applyMiddleware(cfg.Middlewares[i], handler)
}
// Don't send the current retry count in the headers if the caller modified the header defaults.
shouldSendRetryCount := cfg.Request.Header.Get("X-Stainless-Retry-Count") == "0"
var res *http.Response
var cancel context.CancelFunc
for retryCount := 0; retryCount <= cfg.MaxRetries; retryCount += 1 {
ctx := cfg.Request.Context()
if cfg.RequestTimeout != time.Duration(0) && isBeforeContextDeadline(time.Now().Add(cfg.RequestTimeout), ctx) {
ctx, cancel = context.WithTimeout(ctx, cfg.RequestTimeout)
defer func() {
// The cancel function is nil if it was handed off to be handled in a different scope.
if cancel != nil {
cancel()
}
}()
}
req := cfg.Request.Clone(ctx)
if shouldSendRetryCount {
req.Header.Set("X-Stainless-Retry-Count", strconv.Itoa(retryCount))
}
res, err = handler(req)
if ctx != nil && ctx.Err() != nil {
return ctx.Err()
}
if !shouldRetry(cfg.Request, res) || retryCount >= cfg.MaxRetries {
break
}
// Prepare next request and wait for the retry delay
if cfg.Request.GetBody != nil {
cfg.Request.Body, err = cfg.Request.GetBody()
if err != nil {
return err
}
}
// Can't actually refresh the body, so we don't attempt to retry here
if cfg.Request.GetBody == nil && cfg.Request.Body != nil {
break
}
time.Sleep(retryDelay(res, retryCount))
}
// Save *http.Response if it is requested to, even if there was an error making the request. This is
// useful in cases where you might want to debug by inspecting the response. Note that if err != nil,
// the response should be generally be empty, but there are edge cases.
if cfg.ResponseInto != nil {
*cfg.ResponseInto = res
}
if responseBodyInto, ok := cfg.ResponseBodyInto.(**http.Response); ok {
*responseBodyInto = res
}
// If there was a connection error in the final request or any other transport error,
// return that early without trying to coerce into an APIError.
if err != nil {
return err
}
if res.StatusCode >= 400 {
contents, err := io.ReadAll(res.Body)
res.Body.Close()
if err != nil {
return err
}
// If there is an APIError, re-populate the response body so that debugging
// utilities can conveniently dump the response without issue.
res.Body = io.NopCloser(bytes.NewBuffer(contents))
// Load the contents into the error format if it is provided.
aerr := apierror.Error{Request: cfg.Request, Response: res, StatusCode: res.StatusCode}
err = aerr.UnmarshalJSON(contents)
if err != nil {
return err
}
return &aerr
}
_, intoCustomResponseBody := cfg.ResponseBodyInto.(**http.Response)
if cfg.ResponseBodyInto == nil || intoCustomResponseBody {
// We aren't reading the response body in this scope, but whoever is will need the
// cancel func from the context to observe request timeouts.
// Put the cancel function in the response body so it can be handled elsewhere.
if cancel != nil {
res.Body = &bodyWithTimeout{rc: res.Body, stop: cancel}
cancel = nil
}
return nil
}
contents, err := io.ReadAll(res.Body)
res.Body.Close()
if err != nil {
return fmt.Errorf("error reading response body: %w", err)
}
// If we are not json, return plaintext
contentType := res.Header.Get("content-type")
mediaType, _, _ := mime.ParseMediaType(contentType)
isJSON := strings.Contains(mediaType, "application/json") || strings.HasSuffix(mediaType, "+json")
if !isJSON {
switch dst := cfg.ResponseBodyInto.(type) {
case *string:
*dst = string(contents)
case **string:
tmp := string(contents)
*dst = &tmp
case *[]byte:
*dst = contents
default:
return fmt.Errorf("expected destination type of 'string' or '[]byte' for responses with content-type '%s' that is not 'application/json'", contentType)
}
return nil
}
switch dst := cfg.ResponseBodyInto.(type) {
// If the response happens to be a byte array, deserialize the body as-is.
case *[]byte:
*dst = contents
default:
err = json.NewDecoder(bytes.NewReader(contents)).Decode(cfg.ResponseBodyInto)
if err != nil {
return fmt.Errorf("error parsing response json: %w", err)
}
}
return nil
}
func ExecuteNewRequest(ctx context.Context, method string, u string, body interface{}, dst interface{}, opts ...RequestOption) error {
cfg, err := NewRequestConfig(ctx, method, u, body, dst, opts...)
if err != nil {
return err
}
return cfg.Execute()
}
func (cfg *RequestConfig) Clone(ctx context.Context) *RequestConfig {
if cfg == nil {
return nil
}
req := cfg.Request.Clone(ctx)
var err error
if req.Body != nil {
req.Body, err = req.GetBody()
}
if err != nil {
return nil
}
new := &RequestConfig{
MaxRetries: cfg.MaxRetries,
RequestTimeout: cfg.RequestTimeout,
Context: ctx,
Request: req,
BaseURL: cfg.BaseURL,
HTTPClient: cfg.HTTPClient,
Middlewares: cfg.Middlewares,
}
return new
}
func (cfg *RequestConfig) Apply(opts ...RequestOption) error {
for _, opt := range opts {
err := opt.Apply(cfg)
if err != nil {
return err
}
}
return nil
}
// PreRequestOptions is used to collect all the options which need to be known before
// a call to [RequestConfig.ExecuteNewRequest], such as path parameters
// or global defaults.
// PreRequestOptions will return a [RequestConfig] with the options applied.
//
// Only request option functions of type [PreRequestOptionFunc] are applied.
func PreRequestOptions(opts ...RequestOption) (RequestConfig, error) {
cfg := RequestConfig{}
for _, opt := range opts {
if opt, ok := opt.(PreRequestOptionFunc); ok {
err := opt.Apply(&cfg)
if err != nil {
return cfg, err
}
}
}
return cfg, nil
}
// WithDefaultBaseURL returns a RequestOption that sets the client's default Base URL.
// This is always overridden by setting a base URL with WithBaseURL.
// WithBaseURL should be used instead of WithDefaultBaseURL except in internal code.
func WithDefaultBaseURL(baseURL string) RequestOption {
u, err := url.Parse(baseURL)
return RequestOptionFunc(func(r *RequestConfig) error {
if err != nil {
return err
}
r.DefaultBaseURL = u
return nil
})
}

View File

@@ -0,0 +1,27 @@
package testutil
import (
"net/http"
"os"
"strconv"
"testing"
)
func CheckTestServer(t *testing.T, url string) bool {
if _, err := http.Get(url); err != nil {
const SKIP_MOCK_TESTS = "SKIP_MOCK_TESTS"
if str, ok := os.LookupEnv(SKIP_MOCK_TESTS); ok {
skip, err := strconv.ParseBool(str)
if err != nil {
t.Fatalf("strconv.ParseBool(os.LookupEnv(%s)) failed: %s", SKIP_MOCK_TESTS, err)
}
if skip {
t.Skip("The test will not run without a mock Prism server running against your OpenAPI spec")
return false
}
t.Errorf("The test will not run without a mock Prism server running against your OpenAPI spec. You can set the environment variable %s to true to skip running any tests that require the mock server", SKIP_MOCK_TESTS)
return false
}
}
return true
}

View File

@@ -0,0 +1,5 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
package internal
const PackageVersion = "0.1.0-alpha.8" // x-release-please-version