From 4178159ba2463a40282e89ac9faab9e8fa2d6863 Mon Sep 17 00:00:00 2001 From: Miguel Targa Date: Tue, 23 Jul 2024 22:08:57 -0400 Subject: [PATCH 01/43] Add New Facet Search Method - https://www.meilisearch.com/docs/reference/api/facet_search --- index_facet_search.go | 60 ++++++++++++++++++++++++++++ index_facet_search_test.go | 80 ++++++++++++++++++++++++++++++++++++++ types.go | 15 +++++++ 3 files changed, 155 insertions(+) create mode 100644 index_facet_search.go create mode 100644 index_facet_search_test.go diff --git a/index_facet_search.go b/index_facet_search.go new file mode 100644 index 00000000..2a14c6b4 --- /dev/null +++ b/index_facet_search.go @@ -0,0 +1,60 @@ +package meilisearch + +import ( + "encoding/json" + "errors" + "net/http" +) + +var ErrNoFacetSearchRequest = errors.New("no search facet request provided") + +func (i Index) FacetSearch(request *FacetSearchRequest) (*json.RawMessage, error) { + if request == nil { + return nil, ErrNoFacetSearchRequest + } + + searchPostRequestParams := FacetSearchPostRequestParams(request) + + resp := &json.RawMessage{} + + req := internalRequest{ + endpoint: "/indexes/" + i.UID + "/facet-search", + method: http.MethodPost, + contentType: contentTypeJSON, + withRequest: searchPostRequestParams, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "FacetSearch", + } + + if err := i.client.executeRequest(req); err != nil { + return nil, err + } + + return resp, nil +} + +func FacetSearchPostRequestParams(request *FacetSearchRequest) map[string]interface{} { + params := make(map[string]interface{}, 22) + + if request.Q != "" { + params["q"] = request.Q + } + if request.FacetName != "" { + params["facetName"] = request.FacetName + } + if request.FacetQuery != "" { + params["facetQuery"] = request.FacetQuery + } + if request.Filter != "" { + params["filter"] = request.Filter + } + if request.MatchingStrategy != "" { + params["matchingStrategy"] = request.MatchingStrategy + } + if len(request.AttributesToSearchOn) != 0 { + params["attributesToSearchOn"] = request.AttributesToSearchOn + } + + return params +} diff --git a/index_facet_search_test.go b/index_facet_search_test.go new file mode 100644 index 00000000..6858a616 --- /dev/null +++ b/index_facet_search_test.go @@ -0,0 +1,80 @@ +package meilisearch + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestIndex_FacetSearch(t *testing.T) { + type args struct { + UID string + PrimaryKey string + client *Client + request *FacetSearchRequest + filterableAttributes []string + } + + tests := []struct { + name string + args args + want *FacetSearchResponse + wantErr bool + }{ + { + name: "TestIndexBasicFacetSearch", + args: args{ + UID: "indexUID", + client: defaultClient, + request: &FacetSearchRequest{ + FacetName: "tag", + FacetQuery: "Novel", + }, + filterableAttributes: []string{"tag"}, + }, + want: &FacetSearchResponse{ + FacetHits: []interface{}{ + map[string]interface{}{ + "value": "Novel", "count": float64(5), + }, + }, + FacetQuery: "Novel", + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + SetUpIndexForFaceting() + c := tt.args.client + i := c.Index(tt.args.UID) + t.Cleanup(cleanup(c)) + + updateFilter, err := i.UpdateFilterableAttributes(&tt.args.filterableAttributes) + require.NoError(t, err) + testWaitForTask(t, i, updateFilter) + + gotRaw, err := i.FacetSearch(tt.args.request) + + if tt.wantErr { + require.Error(t, err) + require.Nil(t, tt.want) + return + } + + require.NoError(t, err) + // Unmarshall the raw response from FacetSearch into a FacetSearchResponse + var got FacetSearchResponse + err = json.Unmarshal(*gotRaw, &got) + require.NoError(t, err, "error unmarshalling raw got FacetSearchResponse") + + require.Equal(t, len(tt.want.FacetHits), len(got.FacetHits)) + for len := range got.FacetHits { + require.Equal(t, tt.want.FacetHits[len].(map[string]interface{})["value"], got.FacetHits[len].(map[string]interface{})["value"]) + require.Equal(t, tt.want.FacetHits[len].(map[string]interface{})["count"], got.FacetHits[len].(map[string]interface{})["count"]) + } + require.Equal(t, tt.want.FacetQuery, got.FacetQuery) + }) + } +} diff --git a/types.go b/types.go index 126c06ca..cab5328a 100644 --- a/types.go +++ b/types.go @@ -394,6 +394,21 @@ type MultiSearchResponse struct { Results []SearchResponse `json:"results"` } +type FacetSearchRequest struct { + FacetName string `json:"facetName,omitempty"` + FacetQuery string `json:"facetQuery,omitempty"` + Q string `json:"q,omitempty"` + Filter string `json:"filter,omitempty"` + MatchingStrategy string `json:"matchingStrategy,omitempty"` + AttributesToSearchOn []string `json:"attributesToSearchOn,omitempty"` +} + +type FacetSearchResponse struct { + FacetHits []interface{} `json:"facetHits"` + FacetQuery string `json:"facetQuery"` + processingTimeMs int64 `json:"processingTimeMs"` +} + // DocumentQuery is the request body get one documents method type DocumentQuery struct { Fields []string `json:"fields,omitempty"` From 2310d3c674849a55c70d84cff4700e2d8e742bf6 Mon Sep 17 00:00:00 2001 From: Miguel Targa Date: Tue, 23 Jul 2024 22:17:36 -0400 Subject: [PATCH 02/43] Fix ProcessingTimeMs case --- types.go | 2 +- types_easyjson.go | 693 ++++++++++++++++++++++++++++++++-------------- 2 files changed, 488 insertions(+), 207 deletions(-) diff --git a/types.go b/types.go index cab5328a..fe7b3a4e 100644 --- a/types.go +++ b/types.go @@ -406,7 +406,7 @@ type FacetSearchRequest struct { type FacetSearchResponse struct { FacetHits []interface{} `json:"facetHits"` FacetQuery string `json:"facetQuery"` - processingTimeMs int64 `json:"processingTimeMs"` + ProcessingTimeMs int64 `json:"processingTimeMs"` } // DocumentQuery is the request body get one documents method diff --git a/types_easyjson.go b/types_easyjson.go index b8292390..d7408675 100644 --- a/types_easyjson.go +++ b/types_easyjson.go @@ -4312,7 +4312,288 @@ func (v *Faceting) UnmarshalJSON(data []byte) error { func (v *Faceting) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(in *jlexer.Lexer, out *Embedder) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(in *jlexer.Lexer, out *FacetSearchResponse) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "facetHits": + if in.IsNull() { + in.Skip() + out.FacetHits = nil + } else { + in.Delim('[') + if out.FacetHits == nil { + if !in.IsDelim(']') { + out.FacetHits = make([]interface{}, 0, 4) + } else { + out.FacetHits = []interface{}{} + } + } else { + out.FacetHits = (out.FacetHits)[:0] + } + for !in.IsDelim(']') { + var v105 interface{} + if m, ok := v105.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := v105.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + v105 = in.Interface() + } + out.FacetHits = append(out.FacetHits, v105) + in.WantComma() + } + in.Delim(']') + } + case "facetQuery": + out.FacetQuery = string(in.String()) + case "processingTimeMs": + out.ProcessingTimeMs = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(out *jwriter.Writer, in FacetSearchResponse) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"facetHits\":" + out.RawString(prefix[1:]) + if in.FacetHits == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v106, v107 := range in.FacetHits { + if v106 > 0 { + out.RawByte(',') + } + if m, ok := v107.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := v107.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(v107)) + } + } + out.RawByte(']') + } + } + { + const prefix string = ",\"facetQuery\":" + out.RawString(prefix) + out.String(string(in.FacetQuery)) + } + { + const prefix string = ",\"processingTimeMs\":" + out.RawString(prefix) + out.Int64(int64(in.ProcessingTimeMs)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v FacetSearchResponse) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v FacetSearchResponse) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *FacetSearchResponse) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *FacetSearchResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, out *FacetSearchRequest) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "facetName": + out.FacetName = string(in.String()) + case "facetQuery": + out.FacetQuery = string(in.String()) + case "q": + out.Q = string(in.String()) + case "filter": + out.Filter = string(in.String()) + case "matchingStrategy": + out.MatchingStrategy = string(in.String()) + case "attributesToSearchOn": + if in.IsNull() { + in.Skip() + out.AttributesToSearchOn = nil + } else { + in.Delim('[') + if out.AttributesToSearchOn == nil { + if !in.IsDelim(']') { + out.AttributesToSearchOn = make([]string, 0, 4) + } else { + out.AttributesToSearchOn = []string{} + } + } else { + out.AttributesToSearchOn = (out.AttributesToSearchOn)[:0] + } + for !in.IsDelim(']') { + var v108 string + v108 = string(in.String()) + out.AttributesToSearchOn = append(out.AttributesToSearchOn, v108) + in.WantComma() + } + in.Delim(']') + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writer, in FacetSearchRequest) { + out.RawByte('{') + first := true + _ = first + if in.FacetName != "" { + const prefix string = ",\"facetName\":" + first = false + out.RawString(prefix[1:]) + out.String(string(in.FacetName)) + } + if in.FacetQuery != "" { + const prefix string = ",\"facetQuery\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.FacetQuery)) + } + if in.Q != "" { + const prefix string = ",\"q\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.Q)) + } + if in.Filter != "" { + const prefix string = ",\"filter\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.Filter)) + } + if in.MatchingStrategy != "" { + const prefix string = ",\"matchingStrategy\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.MatchingStrategy)) + } + if len(in.AttributesToSearchOn) != 0 { + const prefix string = ",\"attributesToSearchOn\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v109, v110 := range in.AttributesToSearchOn { + if v109 > 0 { + out.RawByte(',') + } + out.String(string(v110)) + } + out.RawByte(']') + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v FacetSearchRequest) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v FacetSearchRequest) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *FacetSearchRequest) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *FacetSearchRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(in *jlexer.Lexer, out *Embedder) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4351,7 +4632,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(out *jwriter.Writer, in Embedder) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(out *jwriter.Writer, in Embedder) { out.RawByte('{') first := true _ = first @@ -4386,27 +4667,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Embedder) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Embedder) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Embedder) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Embedder) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, out *DocumentsResult) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, out *DocumentsResult) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4441,29 +4722,29 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, out.Results = (out.Results)[:0] } for !in.IsDelim(']') { - var v105 map[string]interface{} + var v111 map[string]interface{} if in.IsNull() { in.Skip() } else { in.Delim('{') - v105 = make(map[string]interface{}) + v111 = make(map[string]interface{}) for !in.IsDelim('}') { key := string(in.String()) in.WantColon() - var v106 interface{} - if m, ok := v106.(easyjson.Unmarshaler); ok { + var v112 interface{} + if m, ok := v112.(easyjson.Unmarshaler); ok { m.UnmarshalEasyJSON(in) - } else if m, ok := v106.(json.Unmarshaler); ok { + } else if m, ok := v112.(json.Unmarshaler); ok { _ = m.UnmarshalJSON(in.Raw()) } else { - v106 = in.Interface() + v112 = in.Interface() } - (v105)[key] = v106 + (v111)[key] = v112 in.WantComma() } in.Delim('}') } - out.Results = append(out.Results, v105) + out.Results = append(out.Results, v111) in.WantComma() } in.Delim(']') @@ -4484,7 +4765,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writer, in DocumentsResult) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writer, in DocumentsResult) { out.RawByte('{') first := true _ = first @@ -4495,29 +4776,29 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v107, v108 := range in.Results { - if v107 > 0 { + for v113, v114 := range in.Results { + if v113 > 0 { out.RawByte(',') } - if v108 == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { + if v114 == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { out.RawString(`null`) } else { out.RawByte('{') - v109First := true - for v109Name, v109Value := range v108 { - if v109First { - v109First = false + v115First := true + for v115Name, v115Value := range v114 { + if v115First { + v115First = false } else { out.RawByte(',') } - out.String(string(v109Name)) + out.String(string(v115Name)) out.RawByte(':') - if m, ok := v109Value.(easyjson.Marshaler); ok { + if m, ok := v115Value.(easyjson.Marshaler); ok { m.MarshalEasyJSON(out) - } else if m, ok := v109Value.(json.Marshaler); ok { + } else if m, ok := v115Value.(json.Marshaler); ok { out.Raw(m.MarshalJSON()) } else { - out.Raw(json.Marshal(v109Value)) + out.Raw(json.Marshal(v115Value)) } } out.RawByte('}') @@ -4547,27 +4828,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v DocumentsResult) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DocumentsResult) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DocumentsResult) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DocumentsResult) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(in *jlexer.Lexer, out *DocumentsQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out *DocumentsQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4606,9 +4887,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(in *jlexer.Lexer, out.Fields = (out.Fields)[:0] } for !in.IsDelim(']') { - var v110 string - v110 = string(in.String()) - out.Fields = append(out.Fields, v110) + var v116 string + v116 = string(in.String()) + out.Fields = append(out.Fields, v116) in.WantComma() } in.Delim(']') @@ -4631,7 +4912,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(out *jwriter.Writer, in DocumentsQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writer, in DocumentsQuery) { out.RawByte('{') first := true _ = first @@ -4661,11 +4942,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(out *jwriter.Writ } { out.RawByte('[') - for v111, v112 := range in.Fields { - if v111 > 0 { + for v117, v118 := range in.Fields { + if v117 > 0 { out.RawByte(',') } - out.String(string(v112)) + out.String(string(v118)) } out.RawByte(']') } @@ -4692,27 +4973,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v DocumentsQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DocumentsQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DocumentsQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DocumentsQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, out *DocumentQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out *DocumentQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4747,9 +5028,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, out.Fields = (out.Fields)[:0] } for !in.IsDelim(']') { - var v113 string - v113 = string(in.String()) - out.Fields = append(out.Fields, v113) + var v119 string + v119 = string(in.String()) + out.Fields = append(out.Fields, v119) in.WantComma() } in.Delim(']') @@ -4764,7 +5045,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writer, in DocumentQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writer, in DocumentQuery) { out.RawByte('{') first := true _ = first @@ -4774,11 +5055,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writ out.RawString(prefix[1:]) { out.RawByte('[') - for v114, v115 := range in.Fields { - if v114 > 0 { + for v120, v121 := range in.Fields { + if v120 > 0 { out.RawByte(',') } - out.String(string(v115)) + out.String(string(v121)) } out.RawByte(']') } @@ -4789,27 +5070,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v DocumentQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DocumentQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DocumentQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DocumentQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out *Details) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out *Details) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4854,9 +5135,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out.RankingRules = (out.RankingRules)[:0] } for !in.IsDelim(']') { - var v116 string - v116 = string(in.String()) - out.RankingRules = append(out.RankingRules, v116) + var v122 string + v122 = string(in.String()) + out.RankingRules = append(out.RankingRules, v122) in.WantComma() } in.Delim(']') @@ -4887,9 +5168,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out.SearchableAttributes = (out.SearchableAttributes)[:0] } for !in.IsDelim(']') { - var v117 string - v117 = string(in.String()) - out.SearchableAttributes = append(out.SearchableAttributes, v117) + var v123 string + v123 = string(in.String()) + out.SearchableAttributes = append(out.SearchableAttributes, v123) in.WantComma() } in.Delim(']') @@ -4910,9 +5191,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out.DisplayedAttributes = (out.DisplayedAttributes)[:0] } for !in.IsDelim(']') { - var v118 string - v118 = string(in.String()) - out.DisplayedAttributes = append(out.DisplayedAttributes, v118) + var v124 string + v124 = string(in.String()) + out.DisplayedAttributes = append(out.DisplayedAttributes, v124) in.WantComma() } in.Delim(']') @@ -4933,9 +5214,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out.StopWords = (out.StopWords)[:0] } for !in.IsDelim(']') { - var v119 string - v119 = string(in.String()) - out.StopWords = append(out.StopWords, v119) + var v125 string + v125 = string(in.String()) + out.StopWords = append(out.StopWords, v125) in.WantComma() } in.Delim(']') @@ -4953,30 +5234,30 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, for !in.IsDelim('}') { key := string(in.String()) in.WantColon() - var v120 []string + var v126 []string if in.IsNull() { in.Skip() - v120 = nil + v126 = nil } else { in.Delim('[') - if v120 == nil { + if v126 == nil { if !in.IsDelim(']') { - v120 = make([]string, 0, 4) + v126 = make([]string, 0, 4) } else { - v120 = []string{} + v126 = []string{} } } else { - v120 = (v120)[:0] + v126 = (v126)[:0] } for !in.IsDelim(']') { - var v121 string - v121 = string(in.String()) - v120 = append(v120, v121) + var v127 string + v127 = string(in.String()) + v126 = append(v126, v127) in.WantComma() } in.Delim(']') } - (out.Synonyms)[key] = v120 + (out.Synonyms)[key] = v126 in.WantComma() } in.Delim('}') @@ -4997,9 +5278,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out.FilterableAttributes = (out.FilterableAttributes)[:0] } for !in.IsDelim(']') { - var v122 string - v122 = string(in.String()) - out.FilterableAttributes = append(out.FilterableAttributes, v122) + var v128 string + v128 = string(in.String()) + out.FilterableAttributes = append(out.FilterableAttributes, v128) in.WantComma() } in.Delim(']') @@ -5020,9 +5301,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out.SortableAttributes = (out.SortableAttributes)[:0] } for !in.IsDelim(']') { - var v123 string - v123 = string(in.String()) - out.SortableAttributes = append(out.SortableAttributes, v123) + var v129 string + v129 = string(in.String()) + out.SortableAttributes = append(out.SortableAttributes, v129) in.WantComma() } in.Delim(']') @@ -5081,9 +5362,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out.Swaps = (out.Swaps)[:0] } for !in.IsDelim(']') { - var v124 SwapIndexesParams - (v124).UnmarshalEasyJSON(in) - out.Swaps = append(out.Swaps, v124) + var v130 SwapIndexesParams + (v130).UnmarshalEasyJSON(in) + out.Swaps = append(out.Swaps, v130) in.WantComma() } in.Delim(']') @@ -5100,7 +5381,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writer, in Details) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writer, in Details) { out.RawByte('{') first := true _ = first @@ -5160,11 +5441,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writ } { out.RawByte('[') - for v125, v126 := range in.RankingRules { - if v125 > 0 { + for v131, v132 := range in.RankingRules { + if v131 > 0 { out.RawByte(',') } - out.String(string(v126)) + out.String(string(v132)) } out.RawByte(']') } @@ -5189,11 +5470,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writ } { out.RawByte('[') - for v127, v128 := range in.SearchableAttributes { - if v127 > 0 { + for v133, v134 := range in.SearchableAttributes { + if v133 > 0 { out.RawByte(',') } - out.String(string(v128)) + out.String(string(v134)) } out.RawByte(']') } @@ -5208,11 +5489,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writ } { out.RawByte('[') - for v129, v130 := range in.DisplayedAttributes { - if v129 > 0 { + for v135, v136 := range in.DisplayedAttributes { + if v135 > 0 { out.RawByte(',') } - out.String(string(v130)) + out.String(string(v136)) } out.RawByte(']') } @@ -5227,11 +5508,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writ } { out.RawByte('[') - for v131, v132 := range in.StopWords { - if v131 > 0 { + for v137, v138 := range in.StopWords { + if v137 > 0 { out.RawByte(',') } - out.String(string(v132)) + out.String(string(v138)) } out.RawByte(']') } @@ -5246,24 +5527,24 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writ } { out.RawByte('{') - v133First := true - for v133Name, v133Value := range in.Synonyms { - if v133First { - v133First = false + v139First := true + for v139Name, v139Value := range in.Synonyms { + if v139First { + v139First = false } else { out.RawByte(',') } - out.String(string(v133Name)) + out.String(string(v139Name)) out.RawByte(':') - if v133Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + if v139Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') - for v134, v135 := range v133Value { - if v134 > 0 { + for v140, v141 := range v139Value { + if v140 > 0 { out.RawByte(',') } - out.String(string(v135)) + out.String(string(v141)) } out.RawByte(']') } @@ -5281,11 +5562,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writ } { out.RawByte('[') - for v136, v137 := range in.FilterableAttributes { - if v136 > 0 { + for v142, v143 := range in.FilterableAttributes { + if v142 > 0 { out.RawByte(',') } - out.String(string(v137)) + out.String(string(v143)) } out.RawByte(']') } @@ -5300,11 +5581,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writ } { out.RawByte('[') - for v138, v139 := range in.SortableAttributes { - if v138 > 0 { + for v144, v145 := range in.SortableAttributes { + if v144 > 0 { out.RawByte(',') } - out.String(string(v139)) + out.String(string(v145)) } out.RawByte(']') } @@ -5389,11 +5670,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writ } { out.RawByte('[') - for v140, v141 := range in.Swaps { - if v140 > 0 { + for v146, v147 := range in.Swaps { + if v146 > 0 { out.RawByte(',') } - (v141).MarshalEasyJSON(out) + (v147).MarshalEasyJSON(out) } out.RawByte(']') } @@ -5414,27 +5695,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Details) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Details) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Details) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Details) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out *DeleteTasksQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out *DeleteTasksQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -5469,9 +5750,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out.UIDS = (out.UIDS)[:0] } for !in.IsDelim(']') { - var v142 int64 - v142 = int64(in.Int64()) - out.UIDS = append(out.UIDS, v142) + var v148 int64 + v148 = int64(in.Int64()) + out.UIDS = append(out.UIDS, v148) in.WantComma() } in.Delim(']') @@ -5492,9 +5773,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out.IndexUIDS = (out.IndexUIDS)[:0] } for !in.IsDelim(']') { - var v143 string - v143 = string(in.String()) - out.IndexUIDS = append(out.IndexUIDS, v143) + var v149 string + v149 = string(in.String()) + out.IndexUIDS = append(out.IndexUIDS, v149) in.WantComma() } in.Delim(']') @@ -5515,9 +5796,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out.Statuses = (out.Statuses)[:0] } for !in.IsDelim(']') { - var v144 TaskStatus - v144 = TaskStatus(in.String()) - out.Statuses = append(out.Statuses, v144) + var v150 TaskStatus + v150 = TaskStatus(in.String()) + out.Statuses = append(out.Statuses, v150) in.WantComma() } in.Delim(']') @@ -5538,9 +5819,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out.Types = (out.Types)[:0] } for !in.IsDelim(']') { - var v145 TaskType - v145 = TaskType(in.String()) - out.Types = append(out.Types, v145) + var v151 TaskType + v151 = TaskType(in.String()) + out.Types = append(out.Types, v151) in.WantComma() } in.Delim(']') @@ -5561,9 +5842,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out.CanceledBy = (out.CanceledBy)[:0] } for !in.IsDelim(']') { - var v146 int64 - v146 = int64(in.Int64()) - out.CanceledBy = append(out.CanceledBy, v146) + var v152 int64 + v152 = int64(in.Int64()) + out.CanceledBy = append(out.CanceledBy, v152) in.WantComma() } in.Delim(']') @@ -5602,7 +5883,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writer, in DeleteTasksQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writer, in DeleteTasksQuery) { out.RawByte('{') first := true _ = first @@ -5613,11 +5894,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v147, v148 := range in.UIDS { - if v147 > 0 { + for v153, v154 := range in.UIDS { + if v153 > 0 { out.RawByte(',') } - out.Int64(int64(v148)) + out.Int64(int64(v154)) } out.RawByte(']') } @@ -5629,11 +5910,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v149, v150 := range in.IndexUIDS { - if v149 > 0 { + for v155, v156 := range in.IndexUIDS { + if v155 > 0 { out.RawByte(',') } - out.String(string(v150)) + out.String(string(v156)) } out.RawByte(']') } @@ -5645,11 +5926,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v151, v152 := range in.Statuses { - if v151 > 0 { + for v157, v158 := range in.Statuses { + if v157 > 0 { out.RawByte(',') } - out.String(string(v152)) + out.String(string(v158)) } out.RawByte(']') } @@ -5661,11 +5942,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v153, v154 := range in.Types { - if v153 > 0 { + for v159, v160 := range in.Types { + if v159 > 0 { out.RawByte(',') } - out.String(string(v154)) + out.String(string(v160)) } out.RawByte(']') } @@ -5677,11 +5958,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v155, v156 := range in.CanceledBy { - if v155 > 0 { + for v161, v162 := range in.CanceledBy { + if v161 > 0 { out.RawByte(',') } - out.Int64(int64(v156)) + out.Int64(int64(v162)) } out.RawByte(']') } @@ -5722,27 +6003,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v DeleteTasksQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DeleteTasksQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DeleteTasksQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DeleteTasksQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out *CsvDocumentsQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(in *jlexer.Lexer, out *CsvDocumentsQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -5775,7 +6056,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writer, in CsvDocumentsQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(out *jwriter.Writer, in CsvDocumentsQuery) { out.RawByte('{') first := true _ = first @@ -5801,27 +6082,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v CsvDocumentsQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CsvDocumentsQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CsvDocumentsQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CsvDocumentsQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out *CreateIndexRequest) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, out *CreateIndexRequest) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -5854,7 +6135,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writer, in CreateIndexRequest) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writer, in CreateIndexRequest) { out.RawByte('{') first := true _ = first @@ -5880,27 +6161,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v CreateIndexRequest) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CreateIndexRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CreateIndexRequest) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CreateIndexRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(in *jlexer.Lexer, out *Client) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(in *jlexer.Lexer, out *Client) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -5929,7 +6210,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(out *jwriter.Writer, in Client) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(out *jwriter.Writer, in Client) { out.RawByte('{') first := true _ = first @@ -5939,27 +6220,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Client) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Client) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Client) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Client) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, out *CancelTasksQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out *CancelTasksQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -5994,9 +6275,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, out.UIDS = (out.UIDS)[:0] } for !in.IsDelim(']') { - var v157 int64 - v157 = int64(in.Int64()) - out.UIDS = append(out.UIDS, v157) + var v163 int64 + v163 = int64(in.Int64()) + out.UIDS = append(out.UIDS, v163) in.WantComma() } in.Delim(']') @@ -6017,9 +6298,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, out.IndexUIDS = (out.IndexUIDS)[:0] } for !in.IsDelim(']') { - var v158 string - v158 = string(in.String()) - out.IndexUIDS = append(out.IndexUIDS, v158) + var v164 string + v164 = string(in.String()) + out.IndexUIDS = append(out.IndexUIDS, v164) in.WantComma() } in.Delim(']') @@ -6040,9 +6321,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, out.Statuses = (out.Statuses)[:0] } for !in.IsDelim(']') { - var v159 TaskStatus - v159 = TaskStatus(in.String()) - out.Statuses = append(out.Statuses, v159) + var v165 TaskStatus + v165 = TaskStatus(in.String()) + out.Statuses = append(out.Statuses, v165) in.WantComma() } in.Delim(']') @@ -6063,9 +6344,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, out.Types = (out.Types)[:0] } for !in.IsDelim(']') { - var v160 TaskType - v160 = TaskType(in.String()) - out.Types = append(out.Types, v160) + var v166 TaskType + v166 = TaskType(in.String()) + out.Types = append(out.Types, v166) in.WantComma() } in.Delim(']') @@ -6096,7 +6377,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writer, in CancelTasksQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writer, in CancelTasksQuery) { out.RawByte('{') first := true _ = first @@ -6107,11 +6388,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v161, v162 := range in.UIDS { - if v161 > 0 { + for v167, v168 := range in.UIDS { + if v167 > 0 { out.RawByte(',') } - out.Int64(int64(v162)) + out.Int64(int64(v168)) } out.RawByte(']') } @@ -6123,11 +6404,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v163, v164 := range in.IndexUIDS { - if v163 > 0 { + for v169, v170 := range in.IndexUIDS { + if v169 > 0 { out.RawByte(',') } - out.String(string(v164)) + out.String(string(v170)) } out.RawByte(']') } @@ -6139,11 +6420,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v165, v166 := range in.Statuses { - if v165 > 0 { + for v171, v172 := range in.Statuses { + if v171 > 0 { out.RawByte(',') } - out.String(string(v166)) + out.String(string(v172)) } out.RawByte(']') } @@ -6155,11 +6436,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v167, v168 := range in.Types { - if v167 > 0 { + for v173, v174 := range in.Types { + if v173 > 0 { out.RawByte(',') } - out.String(string(v168)) + out.String(string(v174)) } out.RawByte(']') } @@ -6190,23 +6471,23 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v CancelTasksQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CancelTasksQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CancelTasksQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CancelTasksQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(l, v) } From 7a6746aef5e860557c06e2ef5eda04ca24b99631 Mon Sep 17 00:00:00 2001 From: Miguel Targa Date: Tue, 23 Jul 2024 22:31:21 -0400 Subject: [PATCH 03/43] Add code-samples --- .code-samples.meilisearch.yaml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index 4ce3549e..6c9af3e0 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -98,6 +98,17 @@ multi_search_1: |- }, }, }) +facet_search_1: |- + client.Index("movies").FacetSearch(&meilisearch.FacetSearchRequest{ + FacetQuery: "fiction", + FacetName: "genres", + Filter: "rating > 3", + }) +facet_search_3: |- + client.Index("movies").FacetSearch(&meilisearch.FacetSearchRequest{ + FacetQuery: "c", + FacetName: "genres", + }) delete_tasks_1: |- client.DeleteTaks(&meilisearch.DeleteTasksQuery{ UIDS: []int64{1, 2}, From ccf656ff7a8f2bb20cff3d51d0f5521852865f34 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine?= Date: Thu, 25 Jul 2024 14:39:24 +0200 Subject: [PATCH 04/43] Update .code-samples.meilisearch.yaml --- .code-samples.meilisearch.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index 6c9af3e0..d01f1f24 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -99,7 +99,7 @@ multi_search_1: |- }, }) facet_search_1: |- - client.Index("movies").FacetSearch(&meilisearch.FacetSearchRequest{ + client.Index("books").FacetSearch(&meilisearch.FacetSearchRequest{ FacetQuery: "fiction", FacetName: "genres", Filter: "rating > 3", From 4211e24c43535336b1f009be23cbbe170fb72daf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine?= Date: Thu, 25 Jul 2024 14:39:28 +0200 Subject: [PATCH 05/43] Update .code-samples.meilisearch.yaml --- .code-samples.meilisearch.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index d01f1f24..9cf7ce6c 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -105,7 +105,7 @@ facet_search_1: |- Filter: "rating > 3", }) facet_search_3: |- - client.Index("movies").FacetSearch(&meilisearch.FacetSearchRequest{ + client.Index("books").FacetSearch(&meilisearch.FacetSearchRequest{ FacetQuery: "c", FacetName: "genres", }) From 8039c2f42b6495e6f4845dbe8749f59e460e46d8 Mon Sep 17 00:00:00 2001 From: Miguel Targa Date: Thu, 25 Jul 2024 21:24:05 -0400 Subject: [PATCH 06/43] Increase code coverage --- index_facet_search_test.go | 141 +++++++++++++++++++++++++++++++++++-- 1 file changed, 137 insertions(+), 4 deletions(-) diff --git a/index_facet_search_test.go b/index_facet_search_test.go index 6858a616..87dbf639 100644 --- a/index_facet_search_test.go +++ b/index_facet_search_test.go @@ -43,7 +43,138 @@ func TestIndex_FacetSearch(t *testing.T) { }, wantErr: false, }, + { + name: "TestIndexFacetSearchWithFilter", + args: args{ + UID: "indexUID", + client: defaultClient, + request: &FacetSearchRequest{ + FacetName: "tag", + FacetQuery: "Novel", + Filter: "tag = 'Novel'", + }, + filterableAttributes: []string{"tag"}, + }, + want: &FacetSearchResponse{ + FacetHits: []interface{}{ + map[string]interface{}{ + "value": "Novel", "count": float64(5), + }, + }, + FacetQuery: "Novel", + }, + wantErr: false, + }, + { + name: "TestIndexFacetSearchWithMatchingStrategy", + args: args{ + UID: "indexUID", + client: defaultClient, + request: &FacetSearchRequest{ + FacetName: "tag", + FacetQuery: "Novel", + MatchingStrategy: "frequency", + }, + filterableAttributes: []string{"tag"}, + }, + want: &FacetSearchResponse{ + FacetHits: []interface{}{ + map[string]interface{}{ + "value": "Novel", "count": float64(5), + }, + }, + FacetQuery: "Novel", + }, + wantErr: false, + }, + { + name: "TestIndexFacetSearchWithAttributesToSearchOn", + args: args{ + UID: "indexUID", + client: defaultClient, + request: &FacetSearchRequest{ + FacetName: "tag", + FacetQuery: "Novel", + AttributesToSearchOn: []string{"tag"}, + }, + filterableAttributes: []string{"tag"}, + }, + want: &FacetSearchResponse{ + FacetHits: []interface{}{ + map[string]interface{}{ + "value": "Novel", "count": float64(5), + }, + }, + FacetQuery: "Novel", + }, + wantErr: false, + }, + { + name: "TestIndexFacetSearchWithNoFacetSearchRequest", + args: args{ + UID: "indexUID", + client: defaultClient, + request: nil, + }, + want: nil, + wantErr: true, + }, + { + name: "TestIndexFacetSearchWithNoFacetName", + args: args{ + UID: "indexUID", + client: defaultClient, + request: &FacetSearchRequest{ + FacetQuery: "Novel", + }, + }, + want: nil, + wantErr: true, + }, + { + name: "TestIndexFacetSearchWithNoFacetQuery", + args: args{ + UID: "indexUID", + client: defaultClient, + request: &FacetSearchRequest{ + FacetName: "tag", + }, + }, + want: nil, + wantErr: true, + }, + { + name: "TestIndexFacetSearchWithNoFilterableAttributes", + args: args{ + UID: "indexUID", + client: defaultClient, + request: &FacetSearchRequest{ + FacetName: "tag", + FacetQuery: "Novel", + }, + }, + want: nil, + wantErr: true, + }, + { + name: "TestIndexFacetSearchWithQ", + args: args{ + UID: "indexUID", + client: defaultClient, + request: &FacetSearchRequest{ + Q: "query", + FacetName: "tag", + }, + filterableAttributes: []string{"tag"}, + }, + want: &FacetSearchResponse{ + FacetHits: []interface{}{}, + FacetQuery: "", + }, + wantErr: false, + }, } + for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { SetUpIndexForFaceting() @@ -51,15 +182,17 @@ func TestIndex_FacetSearch(t *testing.T) { i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) - updateFilter, err := i.UpdateFilterableAttributes(&tt.args.filterableAttributes) - require.NoError(t, err) - testWaitForTask(t, i, updateFilter) + if len(tt.args.filterableAttributes) > 0 { + updateFilter, err := i.UpdateFilterableAttributes(&tt.args.filterableAttributes) + require.NoError(t, err) + testWaitForTask(t, i, updateFilter) + } gotRaw, err := i.FacetSearch(tt.args.request) if tt.wantErr { require.Error(t, err) - require.Nil(t, tt.want) + require.Nil(t, gotRaw) return } From c4e85d67aceb545a9d2610fde6c6eea1e8b04ffc Mon Sep 17 00:00:00 2001 From: Miguel Targa Date: Fri, 26 Jul 2024 09:15:24 -0400 Subject: [PATCH 07/43] fix linting --- .code-samples.meilisearch.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index 9cf7ce6c..59ec19b7 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -102,7 +102,7 @@ facet_search_1: |- client.Index("books").FacetSearch(&meilisearch.FacetSearchRequest{ FacetQuery: "fiction", FacetName: "genres", - Filter: "rating > 3", + Filter: "rating > 3", }) facet_search_3: |- client.Index("books").FacetSearch(&meilisearch.FacetSearchRequest{ @@ -656,7 +656,11 @@ getting_started_configure_settings: |- getting_started_faceting: |- client.Index("movies").UpdateFaceting(&meilisearch.Faceting{ MaxValuesPerFacet: 2, + SortFacetValuesBy: { + "*": "count" + } }) + getting_started_pagination: |- client.Index("movies").UpdatePagination(&meilisearch.Pagination{ MaxTotalHits: 500, From 2ee9dbf213504d221ae20ade7af79d76f9b54cbf Mon Sep 17 00:00:00 2001 From: Javad Date: Sat, 27 Jul 2024 15:07:44 +0330 Subject: [PATCH 08/43] Add rankingScoreThreshold search parameter --- .code-samples.meilisearch.yaml | 8 ++++++++ index_search.go | 4 ++++ index_search_test.go | 29 +++++++++++++++++++++++++++++ types.go | 1 + types_easyjson.go | 7 +++++++ 5 files changed, 49 insertions(+) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index 4ce3549e..85fb4553 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -868,3 +868,11 @@ date_guide_sort_1: |- "release_timestamp:desc", }, }) +search_parameter_reference_ranking_score_threshold_1: |- + curl \ + -X POST 'http://localhost:7700/indexes/INDEX_NAME/search' \ + -H 'Content-Type: application/json' \ + --data-binary '{ + "q": "badman", + "rankingScoreThreshold": 0.2 + }' diff --git a/index_search.go b/index_search.go index c12ca990..f6b41f1d 100644 --- a/index_search.go +++ b/index_search.go @@ -157,5 +157,9 @@ func searchPostRequestParams(query string, request *SearchRequest) map[string]in params["retrieveVectors"] = request.RetrieveVectors } + if request.RankingScoreThreshold != 0 { + params["rankingScoreThreshold"] = request.RankingScoreThreshold + } + return params } diff --git a/index_search_test.go b/index_search_test.go index af9e9299..32478a9d 100644 --- a/index_search_test.go +++ b/index_search_test.go @@ -2,6 +2,7 @@ package meilisearch import ( "encoding/json" + "github.com/stretchr/testify/assert" "testing" "github.com/stretchr/testify/require" @@ -1662,3 +1663,31 @@ func TestIndex_SearchWithVectorStore(t *testing.T) { require.NotNil(t, hit["_vectors"]) } } + +func TestIndex_SearchRankingScoreThreshold(t *testing.T) { + type args struct { + UID string + PrimaryKey string + client *Client + query string + request SearchRequest + } + testArg := args{ + UID: "indexUID", + client: defaultClient, + query: "Pri", + request: SearchRequest{ + RankingScoreThreshold: 0.2, + }, + } + + SetUpBasicIndex(testArg.UID) + + c := testArg.client + t.Cleanup(cleanup(c)) + + got, err := testArg.client.Index(testArg.UID).Search(testArg.query, &testArg.request) + require.NoError(t, err) + + assert.Len(t, got.Hits, 3) +} diff --git a/types.go b/types.go index 126c06ca..859f069b 100644 --- a/types.go +++ b/types.go @@ -362,6 +362,7 @@ type SearchRequest struct { Query string Hybrid *SearchRequestHybrid RetrieveVectors bool + RankingScoreThreshold float64 } type SearchRequestHybrid struct { diff --git a/types_easyjson.go b/types_easyjson.go index b8292390..b91482a6 100644 --- a/types_easyjson.go +++ b/types_easyjson.go @@ -2672,6 +2672,8 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, } case "RetrieveVectors": out.RetrieveVectors = bool(in.Bool()) + case "RankingScoreThreshold": + out.RankingScoreThreshold = float64(in.Float64()) default: in.SkipRecursive() } @@ -2898,6 +2900,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawString(prefix) out.Bool(bool(in.RetrieveVectors)) } + { + const prefix string = ",\"RankingScoreThreshold\":" + out.RawString(prefix) + out.Float64(float64(in.RankingScoreThreshold)) + } out.RawByte('}') } From 388f064a510e49b63e84b56a3ae8a3e73ebb9503 Mon Sep 17 00:00:00 2001 From: Javad Date: Sat, 27 Jul 2024 17:03:34 +0330 Subject: [PATCH 09/43] Fix add test ranking score threshold to index search --- index_search_test.go | 59 ++++++++++++++++++++++---------------------- 1 file changed, 30 insertions(+), 29 deletions(-) diff --git a/index_search_test.go b/index_search_test.go index 32478a9d..24d8978c 100644 --- a/index_search_test.go +++ b/index_search_test.go @@ -2,7 +2,6 @@ package meilisearch import ( "encoding/json" - "github.com/stretchr/testify/assert" "testing" "github.com/stretchr/testify/require" @@ -480,6 +479,36 @@ func TestIndex_Search(t *testing.T) { }, wantErr: false, }, + { + name: "TestIndexSearchWithRankingScoreThreshold", + args: args{ + UID: "indexUID", + client: defaultClient, + query: "pri", + request: &SearchRequest{ + Limit: 10, + AttributesToRetrieve: []string{"book_id", "title"}, + RankingScoreThreshold: 0.2, + }, + }, + want: &SearchResponse{ + Hits: []interface{}{ + map[string]interface{}{ + "book_id": float64(123), "title": "Pride and Prejudice", + }, + map[string]interface{}{ + "book_id": float64(456), "title": "Le Petit Prince", + }, + map[string]interface{}{ + "book_id": float64(4), "title": "Harry Potter and the Half-Blood Prince", + }, + }, + EstimatedTotalHits: 3, + Offset: 0, + Limit: 10, + }, + wantErr: false, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -1663,31 +1692,3 @@ func TestIndex_SearchWithVectorStore(t *testing.T) { require.NotNil(t, hit["_vectors"]) } } - -func TestIndex_SearchRankingScoreThreshold(t *testing.T) { - type args struct { - UID string - PrimaryKey string - client *Client - query string - request SearchRequest - } - testArg := args{ - UID: "indexUID", - client: defaultClient, - query: "Pri", - request: SearchRequest{ - RankingScoreThreshold: 0.2, - }, - } - - SetUpBasicIndex(testArg.UID) - - c := testArg.client - t.Cleanup(cleanup(c)) - - got, err := testArg.client.Index(testArg.UID).Search(testArg.query, &testArg.request) - require.NoError(t, err) - - assert.Len(t, got.Hits, 3) -} From f227f7b0b3f526d58eca5f9425c807b7a14b4bab Mon Sep 17 00:00:00 2001 From: Javad Date: Mon, 29 Jul 2024 22:15:22 +0330 Subject: [PATCH 10/43] fix: translated curl example to golang --- .code-samples.meilisearch.yaml | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index e8a897dc..3f2440bb 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -884,10 +884,6 @@ date_guide_sort_1: |- }, }) search_parameter_reference_ranking_score_threshold_1: |- - curl \ - -X POST 'http://localhost:7700/indexes/INDEX_NAME/search' \ - -H 'Content-Type: application/json' \ - --data-binary '{ - "q": "badman", - "rankingScoreThreshold": 0.2 - }' + resp, err := client.Index("INDEX_NAME").Search("badman", &meilisearch.SearchRequest{ + RankingScoreThreshold: 0.2, + }) From 7946372b305d89d531927691b1c00dfe582aff0c Mon Sep 17 00:00:00 2001 From: Javad Date: Mon, 29 Jul 2024 22:53:39 +0330 Subject: [PATCH 11/43] fix: change index name example to books --- .code-samples.meilisearch.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index 3f2440bb..dbe3f636 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -884,6 +884,6 @@ date_guide_sort_1: |- }, }) search_parameter_reference_ranking_score_threshold_1: |- - resp, err := client.Index("INDEX_NAME").Search("badman", &meilisearch.SearchRequest{ + resp, err := client.Index("books").Search("badman", &meilisearch.SearchRequest{ RankingScoreThreshold: 0.2, }) From 81b4eafe4124131d71857027a8b97ef299b61ab3 Mon Sep 17 00:00:00 2001 From: Javad Date: Tue, 30 Jul 2024 10:17:04 +0330 Subject: [PATCH 12/43] Fix allow typo tolerance to be disabled for an index --- index_settings_test.go | 28 +++++++++++++++++++++++----- types.go | 2 +- types_easyjson.go | 24 ++++-------------------- 3 files changed, 28 insertions(+), 26 deletions(-) diff --git a/index_settings_test.go b/index_settings_test.go index d2e31d0e..218e0320 100644 --- a/index_settings_test.go +++ b/index_settings_test.go @@ -2696,6 +2696,28 @@ func TestIndex_UpdateTypoTolerance(t *testing.T) { }, wantResp: &defaultTypoTolerance, }, + { + name: "TestIndexDisableTypoTolerance", + args: args{ + UID: "indexUID", + client: defaultClient, + request: TypoTolerance{ + Enabled: false, + MinWordSizeForTypos: MinWordSizeForTypos{ + OneTypo: 5, + TwoTypos: 9, + }, + DisableOnWords: []string{}, + DisableOnAttributes: []string{}, + }, + }, + wantTask: &TaskInfo{ + TaskUID: 1, + }, + wantResp: &TypoTolerance{ + Enabled: false, + }, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -2704,16 +2726,12 @@ func TestIndex_UpdateTypoTolerance(t *testing.T) { i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) - gotResp, err := i.GetTypoTolerance() - require.NoError(t, err) - require.Equal(t, tt.wantResp, gotResp) - gotTask, err := i.UpdateTypoTolerance(&tt.args.request) require.NoError(t, err) require.GreaterOrEqual(t, gotTask.TaskUID, tt.wantTask.TaskUID) testWaitForTask(t, i, gotTask) - gotResp, err = i.GetTypoTolerance() + gotResp, err := i.GetTypoTolerance() require.NoError(t, err) require.Equal(t, &tt.args.request, gotResp) }) diff --git a/types.go b/types.go index fe7b3a4e..ff96a33a 100644 --- a/types.go +++ b/types.go @@ -57,7 +57,7 @@ type Settings struct { // TypoTolerance is the type that represents the typo tolerance setting in Meilisearch type TypoTolerance struct { - Enabled bool `json:"enabled,omitempty"` + Enabled bool `json:"enabled"` MinWordSizeForTypos MinWordSizeForTypos `json:"minWordSizeForTypos,omitempty"` DisableOnWords []string `json:"disableOnWords,omitempty"` DisableOnAttributes []string `json:"disableOnAttributes,omitempty"` diff --git a/types_easyjson.go b/types_easyjson.go index d7408675..f3af9b77 100644 --- a/types_easyjson.go +++ b/types_easyjson.go @@ -247,30 +247,19 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo2(out *jwriter.Write out.RawByte('{') first := true _ = first - if in.Enabled { + { const prefix string = ",\"enabled\":" - first = false out.RawString(prefix[1:]) out.Bool(bool(in.Enabled)) } if true { const prefix string = ",\"minWordSizeForTypos\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } + out.RawString(prefix) (in.MinWordSizeForTypos).MarshalEasyJSON(out) } if len(in.DisableOnWords) != 0 { const prefix string = ",\"disableOnWords\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } + out.RawString(prefix) { out.RawByte('[') for v3, v4 := range in.DisableOnWords { @@ -284,12 +273,7 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo2(out *jwriter.Write } if len(in.DisableOnAttributes) != 0 { const prefix string = ",\"disableOnAttributes\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } + out.RawString(prefix) { out.RawByte('[') for v5, v6 := range in.DisableOnAttributes { From 9691a7387fded301994d2e69b96a33e65a322de1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine?= Date: Tue, 30 Jul 2024 09:48:44 +0200 Subject: [PATCH 13/43] Update .code-samples.meilisearch.yaml --- .code-samples.meilisearch.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index dbe3f636..696ddb96 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -884,6 +884,6 @@ date_guide_sort_1: |- }, }) search_parameter_reference_ranking_score_threshold_1: |- - resp, err := client.Index("books").Search("badman", &meilisearch.SearchRequest{ + client.Index("INDEX_NAME").Search("badman", &meilisearch.SearchRequest{ RankingScoreThreshold: 0.2, }) From 966203d4f00f3820d27af7e0345376e36746c177 Mon Sep 17 00:00:00 2001 From: meili-bot <74670311+meili-bot@users.noreply.github.com> Date: Tue, 30 Jul 2024 09:58:11 +0200 Subject: [PATCH 14/43] Update version.go --- version.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.go b/version.go index 3cab5aac..2f4623a6 100644 --- a/version.go +++ b/version.go @@ -2,7 +2,7 @@ package meilisearch import "fmt" -const VERSION = "0.27.0" +const VERSION = "0.27.1" func GetQualifiedVersion() (qualifiedVersion string) { return getQualifiedVersion(VERSION) From b52fd49a674b4c2f401455b90686469a8f7f0063 Mon Sep 17 00:00:00 2001 From: Javad Date: Wed, 31 Jul 2024 09:17:16 +0330 Subject: [PATCH 15/43] Feat supported searchCutoffMs setting --- .code-samples.meilisearch.yaml | 6 +++++ index_settings.go | 46 ++++++++++++++++++++++++++++++++++ index_settings_test.go | 31 +++++++++++++++++++++++ types.go | 1 + types_easyjson.go | 12 +++++++++ 5 files changed, 96 insertions(+) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index 59ec19b7..f10db3c8 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -883,3 +883,9 @@ date_guide_sort_1: |- "release_timestamp:desc", }, }) +get_search_cutoff_1: |- + client.Index("movies").GetSearchCutoffMs() +update_search_cutoff_1: |- + client.Index("movies").UpdateSearchCutoffMs(150) +reset_search_cutoff_1: |- + client.Index("books").ResetSearchCutoffMs() diff --git a/index_settings.go b/index_settings.go index 6958b025..552786f5 100644 --- a/index_settings.go +++ b/index_settings.go @@ -638,3 +638,49 @@ func (i Index) ResetEmbedders() (resp *TaskInfo, err error) { } return resp, nil } + +func (i Index) GetSearchCutoffMs() (resp int64, err error) { + req := internalRequest{ + endpoint: "/indexes/" + i.UID + "/settings/search-cutoff-ms", + method: http.MethodGet, + withRequest: nil, + withResponse: &resp, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "GetSearchCutoffMs", + } + if err = i.client.executeRequest(req); err != nil { + return 0, err + } + return resp, nil +} + +func (i Index) UpdateSearchCutoffMs(request int64) (resp *TaskInfo, err error) { + req := internalRequest{ + endpoint: "/indexes/" + i.UID + "/settings/search-cutoff-ms", + method: http.MethodPut, + contentType: contentTypeJSON, + withRequest: &request, + withResponse: &resp, + acceptedStatusCodes: []int{http.StatusAccepted}, + functionName: "UpdateSearchCutoffMs", + } + if err = i.client.executeRequest(req); err != nil { + return nil, err + } + return resp, nil +} + +func (i Index) ResetSearchCutoffMs() (resp *TaskInfo, err error) { + req := internalRequest{ + endpoint: "/indexes/" + i.UID + "/settings/search-cutoff-ms", + method: http.MethodDelete, + withRequest: nil, + withResponse: &resp, + acceptedStatusCodes: []int{http.StatusAccepted}, + functionName: "ResetSearchCutoffMs", + } + if err = i.client.executeRequest(req); err != nil { + return nil, err + } + return resp, nil +} diff --git a/index_settings_test.go b/index_settings_test.go index d2e31d0e..bfabad25 100644 --- a/index_settings_test.go +++ b/index_settings_test.go @@ -225,6 +225,7 @@ func TestIndex_GetSettings(t *testing.T) { RankingRules: defaultRankingRules, DistinctAttribute: (*string)(nil), SearchableAttributes: []string{"*"}, + SearchCutoffMs: 0, DisplayedAttributes: []string{"*"}, StopWords: []string{}, Synonyms: map[string][]string(nil), @@ -245,6 +246,7 @@ func TestIndex_GetSettings(t *testing.T) { RankingRules: defaultRankingRules, DistinctAttribute: (*string)(nil), SearchableAttributes: []string{"*"}, + SearchCutoffMs: 0, DisplayedAttributes: []string{"*"}, StopWords: []string{}, Synonyms: map[string][]string(nil), @@ -3057,3 +3059,32 @@ func TestIndex_ResetEmbedders(t *testing.T) { require.NoError(t, err) require.Empty(t, got) } + +func Test_SearchCutoffMs(t *testing.T) { + c := defaultClient + t.Cleanup(cleanup(c)) + + indexID := "newIndexUID" + i := c.Index(indexID) + taskInfo, err := c.CreateIndex(&IndexConfig{Uid: indexID}) + require.NoError(t, err) + testWaitForTask(t, i, taskInfo) + + n := int64(250) + + task, err := i.UpdateSearchCutoffMs(n) + require.NoError(t, err) + testWaitForTask(t, i, task) + + got, err := i.GetSearchCutoffMs() + require.NoError(t, err) + require.Equal(t, n, got) + + task, err = i.ResetSearchCutoffMs() + require.NoError(t, err) + testWaitForTask(t, i, task) + + got, err = i.GetSearchCutoffMs() + require.NoError(t, err) + require.Equal(t, int64(0), got) +} diff --git a/types.go b/types.go index fe7b3a4e..03697f22 100644 --- a/types.go +++ b/types.go @@ -44,6 +44,7 @@ type Settings struct { RankingRules []string `json:"rankingRules,omitempty"` DistinctAttribute *string `json:"distinctAttribute,omitempty"` SearchableAttributes []string `json:"searchableAttributes,omitempty"` + SearchCutoffMs int64 `json:"searchCutoffMs,omitempty"` DisplayedAttributes []string `json:"displayedAttributes,omitempty"` StopWords []string `json:"stopWords,omitempty"` Synonyms map[string][]string `json:"synonyms,omitempty"` diff --git a/types_easyjson.go b/types_easyjson.go index d7408675..931e7370 100644 --- a/types_easyjson.go +++ b/types_easyjson.go @@ -1719,6 +1719,8 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, } in.Delim(']') } + case "searchCutoffMs": + out.SearchCutoffMs = int64(in.Int64()) case "displayedAttributes": if in.IsNull() { in.Skip() @@ -1960,6 +1962,16 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ out.RawByte(']') } } + if in.SearchCutoffMs != 0 { + const prefix string = ",\"searchCutoffMs\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.SearchCutoffMs)) + } if len(in.DisplayedAttributes) != 0 { const prefix string = ",\"displayedAttributes\":" if first { From c3f14b362476b2557ba7d914ece8d2df6509108a Mon Sep 17 00:00:00 2001 From: Javad Date: Wed, 31 Jul 2024 09:25:09 +0330 Subject: [PATCH 16/43] fix: add search cut off ms to index interface --- index.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/index.go b/index.go index 58236392..698b6556 100644 --- a/index.go +++ b/index.go @@ -74,6 +74,9 @@ type IndexInterface interface { GetFilterableAttributes() (resp *[]string, err error) UpdateFilterableAttributes(request *[]string) (resp *TaskInfo, err error) ResetFilterableAttributes() (resp *TaskInfo, err error) + GetSearchCutoffMs() (resp int64, err error) + UpdateSearchCutoffMs(request int64) (resp *TaskInfo, err error) + ResetSearchCutoffMs() (resp *TaskInfo, err error) WaitForTask(taskUID int64, options ...WaitParams) (*Task, error) } From 522f1794fd0a14a9f1b1c86c46705d3f7f682cc0 Mon Sep 17 00:00:00 2001 From: Javad Date: Wed, 31 Jul 2024 10:16:03 +0330 Subject: [PATCH 17/43] Feat accept distinct search parameter --- .code-samples.meilisearch.yaml | 15 ++++++++++++++ index_search.go | 3 +++ index_search_test.go | 32 ++++++++++++++++++++++++++++ main_test.go | 38 ++++++++++++++++++++++++++++++++++ types.go | 1 + types_easyjson.go | 7 +++++++ 6 files changed, 96 insertions(+) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index 59ec19b7..9560295e 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -883,3 +883,18 @@ date_guide_sort_1: |- "release_timestamp:desc", }, }) +search_parameter_reference_distinct_1: |- + client.Index("INDEX_NAME").Search("QUERY TERMS", &meilisearch.SearchRequest{ + Distinct: "ATTRIBUTE_A", + }) +distinct_attribute_guide_distinct_parameter_1: |- + client.Index("products").Search("white shirt", &meilisearch.SearchRequest{ + Distinct: "sku", + }) +distinct_attribute_guide_filterable_1: |- + filterableAttributes := []string{ + "product_id", + "sku", + "url", + } + client.Index("products").UpdateFilterableAttributes(&filterableAttributes) diff --git a/index_search.go b/index_search.go index c12ca990..b20a14dd 100644 --- a/index_search.go +++ b/index_search.go @@ -84,6 +84,9 @@ func searchPostRequestParams(query string, request *SearchRequest) map[string]in if !request.PlaceholderSearch { params["q"] = query } + if request.Distinct != "" { + params["distinct"] = request.Distinct + } if request.IndexUID != "" { params["indexUid"] = request.IndexUID } diff --git a/index_search_test.go b/index_search_test.go index af9e9299..006d57c1 100644 --- a/index_search_test.go +++ b/index_search_test.go @@ -1662,3 +1662,35 @@ func TestIndex_SearchWithVectorStore(t *testing.T) { require.NotNil(t, hit["_vectors"]) } } + +func TestIndex_SearchWithDistinct(t *testing.T) { + tests := []struct { + UID string + PrimaryKey string + client *Client + query string + request SearchRequest + }{ + { + UID: "indexUID", + client: defaultClient, + query: "white shirt", + request: SearchRequest{ + Distinct: "sku", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.UID, func(t *testing.T) { + SetUpDistinctIndex(tt.UID) + c := tt.client + t.Cleanup(cleanup(c)) + i := c.Index(tt.UID) + + got, err := i.Search(tt.query, &tt.request) + require.NoError(t, err) + require.NotNil(t, got.Hits) + }) + } +} diff --git a/main_test.go b/main_test.go index 01d3f63c..cf9426cb 100644 --- a/main_test.go +++ b/main_test.go @@ -214,6 +214,44 @@ func SetUpBasicIndex(indexUID string) { } } +func SetUpDistinctIndex(indexUID string) { + client := NewClient(ClientConfig{ + Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), + APIKey: masterKey, + }) + index := client.Index(indexUID) + + atters := []string{"product_id", "title", "sku", "url"} + task, err := index.UpdateFilterableAttributes(&atters) + if err != nil { + fmt.Println(err) + os.Exit(1) + } + + finalTask, _ := index.WaitForTask(task.TaskUID) + if finalTask.Status != "succeeded" { + os.Exit(1) + } + + documents := []map[string]interface{}{ + {"product_id": 123, "title": "white shirt", "sku": "sku1234", "url": "https://example.com/products/p123"}, + {"product_id": 456, "title": "red shirt", "sku": "sku213", "url": "https://example.com/products/p456"}, + {"product_id": 1, "title": "green shirt", "sku": "sku876", "url": "https://example.com/products/p1"}, + {"product_id": 1344, "title": "blue shirt", "sku": "sku963", "url": "https://example.com/products/p1344"}, + {"product_id": 4, "title": "yellow shirt", "sku": "sku9064", "url": "https://example.com/products/p4"}, + {"product_id": 42, "title": "gray shirt", "sku": "sku964", "url": "https://example.com/products/p42"}, + } + task, err = index.AddDocuments(documents) + if err != nil { + fmt.Println(err) + os.Exit(1) + } + finalTask, _ = index.WaitForTask(task.TaskUID) + if finalTask.Status != "succeeded" { + os.Exit(1) + } +} + func SetUpIndexWithNestedFields(indexUID string) { client := NewClient(ClientConfig{ Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), diff --git a/types.go b/types.go index fe7b3a4e..c24e1e22 100644 --- a/types.go +++ b/types.go @@ -360,6 +360,7 @@ type SearchRequest struct { Page int64 IndexUID string Query string + Distinct string Hybrid *SearchRequestHybrid RetrieveVectors bool } diff --git a/types_easyjson.go b/types_easyjson.go index d7408675..e901741c 100644 --- a/types_easyjson.go +++ b/types_easyjson.go @@ -2660,6 +2660,8 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.IndexUID = string(in.String()) case "Query": out.Query = string(in.String()) + case "Distinct": + out.Distinct = string(in.String()) case "Hybrid": if in.IsNull() { in.Skip() @@ -2884,6 +2886,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawString(prefix) out.String(string(in.Query)) } + { + const prefix string = ",\"Distinct\":" + out.RawString(prefix) + out.String(string(in.Distinct)) + } { const prefix string = ",\"Hybrid\":" out.RawString(prefix) From 8b4382e7e5c5ae63bb8e0084446d0c1e63af1888 Mon Sep 17 00:00:00 2001 From: Javad Date: Wed, 31 Jul 2024 15:22:36 +0330 Subject: [PATCH 18/43] fix: add searchCutoffMs in code sample and update settings --- .code-samples.meilisearch.yaml | 1 + index_settings_test.go | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index 4cd0d620..6507ea27 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -247,6 +247,7 @@ update_settings_1: |- Faceting: &meilisearch.Faceting{ MaxValuesPerFacet: 200, }, + SearchCutoffMs: 150, } client.Index("movies").UpdateSettings(&settings) reset_settings_1: |- diff --git a/index_settings_test.go b/index_settings_test.go index 5795455f..47918570 100644 --- a/index_settings_test.go +++ b/index_settings_test.go @@ -1533,6 +1533,7 @@ func TestIndex_UpdateSettings(t *testing.T) { Faceting: &Faceting{ MaxValuesPerFacet: 200, }, + SearchCutoffMs: 150, }, }, wantTask: &TaskInfo{ @@ -1550,6 +1551,7 @@ func TestIndex_UpdateSettings(t *testing.T) { TypoTolerance: &defaultTypoTolerance, Pagination: &defaultPagination, Faceting: &defaultFaceting, + SearchCutoffMs: 150, }, }, { @@ -1595,6 +1597,7 @@ func TestIndex_UpdateSettings(t *testing.T) { Faceting: &Faceting{ MaxValuesPerFacet: 200, }, + SearchCutoffMs: 150, }, }, wantTask: &TaskInfo{ @@ -1612,6 +1615,7 @@ func TestIndex_UpdateSettings(t *testing.T) { TypoTolerance: &defaultTypoTolerance, Pagination: &defaultPagination, Faceting: &defaultFaceting, + SearchCutoffMs: 150, }, }, } @@ -1622,16 +1626,12 @@ func TestIndex_UpdateSettings(t *testing.T) { i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) - gotResp, err := i.GetSettings() - require.NoError(t, err) - require.Equal(t, tt.wantResp, gotResp) - gotTask, err := i.UpdateSettings(&tt.args.request) require.NoError(t, err) require.GreaterOrEqual(t, gotTask.TaskUID, tt.wantTask.TaskUID) testWaitForTask(t, i, gotTask) - gotResp, err = i.GetSettings() + gotResp, err := i.GetSettings() require.NoError(t, err) require.Equal(t, &tt.args.request, gotResp) }) From f792ff591edd48a0f76f951e84a72510009fbf92 Mon Sep 17 00:00:00 2001 From: Javad Date: Wed, 31 Jul 2024 15:55:25 +0330 Subject: [PATCH 19/43] Feat get similar documents --- .code-samples.meilisearch.yaml | 5 + index.go | 1 + index_search.go | 17 + index_search_test.go | 32 + types.go | 23 + types_easyjson.go | 1398 ++++++++++++++++++++------------ 6 files changed, 970 insertions(+), 506 deletions(-) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index 6ce81f03..24ebd198 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -898,6 +898,11 @@ distinct_attribute_guide_filterable_1: |- "url", } client.Index("products").UpdateFilterableAttributes(&filterableAttributes) +get_similar_post_1: |- + resp := new(meilisearch.SimilarDocumentResult) + client.Index("INDEX_NAME").SearchSimilarDocuments(&meilisearch.SimilarDocumentQuery{ + Id: "TARGET_DOCUMENT_ID", + }, resp) search_parameter_reference_ranking_score_threshold_1: |- client.Index("INDEX_NAME").Search("badman", &meilisearch.SearchRequest{ RankingScoreThreshold: 0.2, diff --git a/index.go b/index.go index 58236392..94caa37e 100644 --- a/index.go +++ b/index.go @@ -46,6 +46,7 @@ type IndexInterface interface { DeleteAllDocuments() (resp *TaskInfo, err error) Search(query string, request *SearchRequest) (*SearchResponse, error) SearchRaw(query string, request *SearchRequest) (*json.RawMessage, error) + SearchSimilarDocuments(param *SimilarDocumentQuery, resp *SimilarDocumentResult) error GetTask(taskUID int64) (resp *Task, err error) GetTasks(param *TasksQuery) (resp *TaskResult, err error) diff --git a/index_search.go b/index_search.go index af1e45ab..f57dc27c 100644 --- a/index_search.go +++ b/index_search.go @@ -78,6 +78,23 @@ func (i Index) Search(query string, request *SearchRequest) (*SearchResponse, er return resp, nil } +func (i Index) SearchSimilarDocuments(param *SimilarDocumentQuery, resp *SimilarDocumentResult) error { + req := internalRequest{ + endpoint: "/indexes/" + i.UID + "/similar", + method: http.MethodPost, + withRequest: param, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "SearchSimilarDocuments", + contentType: contentTypeJSON, + } + + if err := i.client.executeRequest(req); err != nil { + return err + } + return nil +} + func searchPostRequestParams(query string, request *SearchRequest) map[string]interface{} { params := make(map[string]interface{}, 22) diff --git a/index_search_test.go b/index_search_test.go index 077a12db..f43be55f 100644 --- a/index_search_test.go +++ b/index_search_test.go @@ -1724,3 +1724,35 @@ func TestIndex_SearchWithDistinct(t *testing.T) { }) } } + +func TestIndex_SearchSimilarDocuments(t *testing.T) { + tests := []struct { + UID string + PrimaryKey string + client *Client + request *SimilarDocumentQuery + resp *SimilarDocumentResult + }{ + { + UID: "indexUID", + client: defaultClient, + request: &SimilarDocumentQuery{ + Id: "123", + }, + resp: new(SimilarDocumentResult), + }, + } + + for _, tt := range tests { + t.Run(tt.UID, func(t *testing.T) { + i, err := SetUpIndexWithVector(tt.UID) + require.NoError(t, err) + c := tt.client + t.Cleanup(cleanup(c)) + + err = i.SearchSimilarDocuments(tt.request, tt.resp) + require.NoError(t, err) + require.NotNil(t, tt.resp) + }) + } +} diff --git a/types.go b/types.go index e5841115..eac86550 100644 --- a/types.go +++ b/types.go @@ -424,6 +424,29 @@ type DocumentsQuery struct { Filter interface{} `json:"filter,omitempty"` } +// SimilarDocumentQuery is query parameters of similar documents +type SimilarDocumentQuery struct { + Id interface{} `json:"id,omitempty"` + Embedder string `json:"embedder,omitempty"` + AttributesToRetrieve []string `json:"attributesToRetrieve,omitempty"` + Offset int64 `json:"offset,omitempty"` + Limit int64 `json:"limit,omitempty"` + Filter string `json:"filter,omitempty"` + ShowRankingScore bool `json:"showRankingScore,omitempty"` + ShowRankingScoreDetails bool `json:"showRankingScoreDetails,omitempty"` + RankingScoreThreshold float64 `json:"rankingScoreThreshold,omitempty"` + RetrieveVectors bool `json:"retrieveVectors,omitempty"` +} + +type SimilarDocumentResult struct { + Hits []interface{} `json:"hits,omitempty"` + ID string `json:"id,omitempty"` + ProcessingTimeMS int64 `json:"processingTimeMs,omitempty"` + Limit int64 `json:"limit,omitempty"` + Offset int64 `json:"offset,omitempty"` + EstimatedTotalHits int64 `json:"estimatedTotalHits,omitempty"` +} + type CsvDocumentsQuery struct { PrimaryKey string `json:"primaryKey,omitempty"` CsvDelimiter string `json:"csvDelimiter,omitempty"` diff --git a/types_easyjson.go b/types_easyjson.go index 849b40cb..92f919ad 100644 --- a/types_easyjson.go +++ b/types_easyjson.go @@ -1628,7 +1628,393 @@ func (v *Stats) UnmarshalJSON(data []byte) error { func (v *Stats) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo12(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out *Settings) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out *SimilarDocumentResult) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "hits": + if in.IsNull() { + in.Skip() + out.Hits = nil + } else { + in.Delim('[') + if out.Hits == nil { + if !in.IsDelim(']') { + out.Hits = make([]interface{}, 0, 4) + } else { + out.Hits = []interface{}{} + } + } else { + out.Hits = (out.Hits)[:0] + } + for !in.IsDelim(']') { + var v32 interface{} + if m, ok := v32.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := v32.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + v32 = in.Interface() + } + out.Hits = append(out.Hits, v32) + in.WantComma() + } + in.Delim(']') + } + case "id": + out.ID = string(in.String()) + case "processingTimeMs": + out.ProcessingTimeMS = int64(in.Int64()) + case "limit": + out.Limit = int64(in.Int64()) + case "offset": + out.Offset = int64(in.Int64()) + case "estimatedTotalHits": + out.EstimatedTotalHits = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writer, in SimilarDocumentResult) { + out.RawByte('{') + first := true + _ = first + if len(in.Hits) != 0 { + const prefix string = ",\"hits\":" + first = false + out.RawString(prefix[1:]) + { + out.RawByte('[') + for v33, v34 := range in.Hits { + if v33 > 0 { + out.RawByte(',') + } + if m, ok := v34.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := v34.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(v34)) + } + } + out.RawByte(']') + } + } + if in.ID != "" { + const prefix string = ",\"id\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.ID)) + } + if in.ProcessingTimeMS != 0 { + const prefix string = ",\"processingTimeMs\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.ProcessingTimeMS)) + } + if in.Limit != 0 { + const prefix string = ",\"limit\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Limit)) + } + if in.Offset != 0 { + const prefix string = ",\"offset\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Offset)) + } + if in.EstimatedTotalHits != 0 { + const prefix string = ",\"estimatedTotalHits\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.EstimatedTotalHits)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v SimilarDocumentResult) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v SimilarDocumentResult) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *SimilarDocumentResult) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *SimilarDocumentResult) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(in *jlexer.Lexer, out *SimilarDocumentQuery) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "id": + if m, ok := out.Id.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := out.Id.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + out.Id = in.Interface() + } + case "embedder": + out.Embedder = string(in.String()) + case "attributesToRetrieve": + if in.IsNull() { + in.Skip() + out.AttributesToRetrieve = nil + } else { + in.Delim('[') + if out.AttributesToRetrieve == nil { + if !in.IsDelim(']') { + out.AttributesToRetrieve = make([]string, 0, 4) + } else { + out.AttributesToRetrieve = []string{} + } + } else { + out.AttributesToRetrieve = (out.AttributesToRetrieve)[:0] + } + for !in.IsDelim(']') { + var v35 string + v35 = string(in.String()) + out.AttributesToRetrieve = append(out.AttributesToRetrieve, v35) + in.WantComma() + } + in.Delim(']') + } + case "offset": + out.Offset = int64(in.Int64()) + case "limit": + out.Limit = int64(in.Int64()) + case "filter": + out.Filter = string(in.String()) + case "showRankingScore": + out.ShowRankingScore = bool(in.Bool()) + case "showRankingScoreDetails": + out.ShowRankingScoreDetails = bool(in.Bool()) + case "rankingScoreThreshold": + out.RankingScoreThreshold = float64(in.Float64()) + case "retrieveVectors": + out.RetrieveVectors = bool(in.Bool()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(out *jwriter.Writer, in SimilarDocumentQuery) { + out.RawByte('{') + first := true + _ = first + if in.Id != nil { + const prefix string = ",\"id\":" + first = false + out.RawString(prefix[1:]) + if m, ok := in.Id.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := in.Id.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(in.Id)) + } + } + if in.Embedder != "" { + const prefix string = ",\"embedder\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.Embedder)) + } + if len(in.AttributesToRetrieve) != 0 { + const prefix string = ",\"attributesToRetrieve\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v36, v37 := range in.AttributesToRetrieve { + if v36 > 0 { + out.RawByte(',') + } + out.String(string(v37)) + } + out.RawByte(']') + } + } + if in.Offset != 0 { + const prefix string = ",\"offset\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Offset)) + } + if in.Limit != 0 { + const prefix string = ",\"limit\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Limit)) + } + if in.Filter != "" { + const prefix string = ",\"filter\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.Filter)) + } + if in.ShowRankingScore { + const prefix string = ",\"showRankingScore\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Bool(bool(in.ShowRankingScore)) + } + if in.ShowRankingScoreDetails { + const prefix string = ",\"showRankingScoreDetails\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Bool(bool(in.ShowRankingScoreDetails)) + } + if in.RankingScoreThreshold != 0 { + const prefix string = ",\"rankingScoreThreshold\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Float64(float64(in.RankingScoreThreshold)) + } + if in.RetrieveVectors { + const prefix string = ",\"retrieveVectors\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Bool(bool(in.RetrieveVectors)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v SimilarDocumentQuery) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v SimilarDocumentQuery) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *SimilarDocumentQuery) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *SimilarDocumentQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(in *jlexer.Lexer, out *Settings) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -1663,9 +2049,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out.RankingRules = (out.RankingRules)[:0] } for !in.IsDelim(']') { - var v32 string - v32 = string(in.String()) - out.RankingRules = append(out.RankingRules, v32) + var v38 string + v38 = string(in.String()) + out.RankingRules = append(out.RankingRules, v38) in.WantComma() } in.Delim(']') @@ -1696,9 +2082,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out.SearchableAttributes = (out.SearchableAttributes)[:0] } for !in.IsDelim(']') { - var v33 string - v33 = string(in.String()) - out.SearchableAttributes = append(out.SearchableAttributes, v33) + var v39 string + v39 = string(in.String()) + out.SearchableAttributes = append(out.SearchableAttributes, v39) in.WantComma() } in.Delim(']') @@ -1719,9 +2105,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out.DisplayedAttributes = (out.DisplayedAttributes)[:0] } for !in.IsDelim(']') { - var v34 string - v34 = string(in.String()) - out.DisplayedAttributes = append(out.DisplayedAttributes, v34) + var v40 string + v40 = string(in.String()) + out.DisplayedAttributes = append(out.DisplayedAttributes, v40) in.WantComma() } in.Delim(']') @@ -1742,9 +2128,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out.StopWords = (out.StopWords)[:0] } for !in.IsDelim(']') { - var v35 string - v35 = string(in.String()) - out.StopWords = append(out.StopWords, v35) + var v41 string + v41 = string(in.String()) + out.StopWords = append(out.StopWords, v41) in.WantComma() } in.Delim(']') @@ -1762,30 +2148,30 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, for !in.IsDelim('}') { key := string(in.String()) in.WantColon() - var v36 []string + var v42 []string if in.IsNull() { in.Skip() - v36 = nil + v42 = nil } else { in.Delim('[') - if v36 == nil { + if v42 == nil { if !in.IsDelim(']') { - v36 = make([]string, 0, 4) + v42 = make([]string, 0, 4) } else { - v36 = []string{} + v42 = []string{} } } else { - v36 = (v36)[:0] + v42 = (v42)[:0] } for !in.IsDelim(']') { - var v37 string - v37 = string(in.String()) - v36 = append(v36, v37) + var v43 string + v43 = string(in.String()) + v42 = append(v42, v43) in.WantComma() } in.Delim(']') } - (out.Synonyms)[key] = v36 + (out.Synonyms)[key] = v42 in.WantComma() } in.Delim('}') @@ -1806,9 +2192,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out.FilterableAttributes = (out.FilterableAttributes)[:0] } for !in.IsDelim(']') { - var v38 string - v38 = string(in.String()) - out.FilterableAttributes = append(out.FilterableAttributes, v38) + var v44 string + v44 = string(in.String()) + out.FilterableAttributes = append(out.FilterableAttributes, v44) in.WantComma() } in.Delim(']') @@ -1829,9 +2215,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out.SortableAttributes = (out.SortableAttributes)[:0] } for !in.IsDelim(']') { - var v39 string - v39 = string(in.String()) - out.SortableAttributes = append(out.SortableAttributes, v39) + var v45 string + v45 = string(in.String()) + out.SortableAttributes = append(out.SortableAttributes, v45) in.WantComma() } in.Delim(']') @@ -1879,9 +2265,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, for !in.IsDelim('}') { key := string(in.String()) in.WantColon() - var v40 Embedder - (v40).UnmarshalEasyJSON(in) - (out.Embedders)[key] = v40 + var v46 Embedder + (v46).UnmarshalEasyJSON(in) + (out.Embedders)[key] = v46 in.WantComma() } in.Delim('}') @@ -1896,7 +2282,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writer, in Settings) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(out *jwriter.Writer, in Settings) { out.RawByte('{') first := true _ = first @@ -1906,11 +2292,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ out.RawString(prefix[1:]) { out.RawByte('[') - for v41, v42 := range in.RankingRules { - if v41 > 0 { + for v47, v48 := range in.RankingRules { + if v47 > 0 { out.RawByte(',') } - out.String(string(v42)) + out.String(string(v48)) } out.RawByte(']') } @@ -1935,11 +2321,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('[') - for v43, v44 := range in.SearchableAttributes { - if v43 > 0 { + for v49, v50 := range in.SearchableAttributes { + if v49 > 0 { out.RawByte(',') } - out.String(string(v44)) + out.String(string(v50)) } out.RawByte(']') } @@ -1954,11 +2340,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('[') - for v45, v46 := range in.DisplayedAttributes { - if v45 > 0 { + for v51, v52 := range in.DisplayedAttributes { + if v51 > 0 { out.RawByte(',') } - out.String(string(v46)) + out.String(string(v52)) } out.RawByte(']') } @@ -1973,11 +2359,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('[') - for v47, v48 := range in.StopWords { - if v47 > 0 { + for v53, v54 := range in.StopWords { + if v53 > 0 { out.RawByte(',') } - out.String(string(v48)) + out.String(string(v54)) } out.RawByte(']') } @@ -1992,24 +2378,24 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('{') - v49First := true - for v49Name, v49Value := range in.Synonyms { - if v49First { - v49First = false + v55First := true + for v55Name, v55Value := range in.Synonyms { + if v55First { + v55First = false } else { out.RawByte(',') } - out.String(string(v49Name)) + out.String(string(v55Name)) out.RawByte(':') - if v49Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + if v55Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') - for v50, v51 := range v49Value { - if v50 > 0 { + for v56, v57 := range v55Value { + if v56 > 0 { out.RawByte(',') } - out.String(string(v51)) + out.String(string(v57)) } out.RawByte(']') } @@ -2027,11 +2413,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('[') - for v52, v53 := range in.FilterableAttributes { - if v52 > 0 { + for v58, v59 := range in.FilterableAttributes { + if v58 > 0 { out.RawByte(',') } - out.String(string(v53)) + out.String(string(v59)) } out.RawByte(']') } @@ -2046,11 +2432,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('[') - for v54, v55 := range in.SortableAttributes { - if v54 > 0 { + for v60, v61 := range in.SortableAttributes { + if v60 > 0 { out.RawByte(',') } - out.String(string(v55)) + out.String(string(v61)) } out.RawByte(']') } @@ -2095,16 +2481,16 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('{') - v56First := true - for v56Name, v56Value := range in.Embedders { - if v56First { - v56First = false + v62First := true + for v62Name, v62Value := range in.Embedders { + if v62First { + v62First = false } else { out.RawByte(',') } - out.String(string(v56Name)) + out.String(string(v62Name)) out.RawByte(':') - (v56Value).MarshalEasyJSON(out) + (v62Value).MarshalEasyJSON(out) } out.RawByte('}') } @@ -2115,27 +2501,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Settings) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Settings) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Settings) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Settings) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(in *jlexer.Lexer, out *SearchResponse) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out *SearchResponse) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -2170,15 +2556,15 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(in *jlexer.Lexer, out.Hits = (out.Hits)[:0] } for !in.IsDelim(']') { - var v57 interface{} - if m, ok := v57.(easyjson.Unmarshaler); ok { + var v63 interface{} + if m, ok := v63.(easyjson.Unmarshaler); ok { m.UnmarshalEasyJSON(in) - } else if m, ok := v57.(json.Unmarshaler); ok { + } else if m, ok := v63.(json.Unmarshaler); ok { _ = m.UnmarshalJSON(in.Raw()) } else { - v57 = in.Interface() + v63 = in.Interface() } - out.Hits = append(out.Hits, v57) + out.Hits = append(out.Hits, v63) in.WantComma() } in.Delim(']') @@ -2229,7 +2615,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(out *jwriter.Writer, in SearchResponse) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writer, in SearchResponse) { out.RawByte('{') first := true _ = first @@ -2240,16 +2626,16 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v58, v59 := range in.Hits { - if v58 > 0 { + for v64, v65 := range in.Hits { + if v64 > 0 { out.RawByte(',') } - if m, ok := v59.(easyjson.Marshaler); ok { + if m, ok := v65.(easyjson.Marshaler); ok { m.MarshalEasyJSON(out) - } else if m, ok := v59.(json.Marshaler); ok { + } else if m, ok := v65.(json.Marshaler); ok { out.Raw(m.MarshalJSON()) } else { - out.Raw(json.Marshal(v59)) + out.Raw(json.Marshal(v65)) } } out.RawByte(']') @@ -2333,27 +2719,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v SearchResponse) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v SearchResponse) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *SearchResponse) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *SearchResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(in *jlexer.Lexer, out *SearchRequestHybrid) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(in *jlexer.Lexer, out *SearchRequestHybrid) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -2386,7 +2772,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(out *jwriter.Writer, in SearchRequestHybrid) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(out *jwriter.Writer, in SearchRequestHybrid) { out.RawByte('{') first := true _ = first @@ -2406,27 +2792,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v SearchRequestHybrid) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v SearchRequestHybrid) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *SearchRequestHybrid) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *SearchRequestHybrid) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out *SearchRequest) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(in *jlexer.Lexer, out *SearchRequest) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -2465,9 +2851,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.AttributesToRetrieve = (out.AttributesToRetrieve)[:0] } for !in.IsDelim(']') { - var v60 string - v60 = string(in.String()) - out.AttributesToRetrieve = append(out.AttributesToRetrieve, v60) + var v66 string + v66 = string(in.String()) + out.AttributesToRetrieve = append(out.AttributesToRetrieve, v66) in.WantComma() } in.Delim(']') @@ -2488,9 +2874,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.AttributesToSearchOn = (out.AttributesToSearchOn)[:0] } for !in.IsDelim(']') { - var v61 string - v61 = string(in.String()) - out.AttributesToSearchOn = append(out.AttributesToSearchOn, v61) + var v67 string + v67 = string(in.String()) + out.AttributesToSearchOn = append(out.AttributesToSearchOn, v67) in.WantComma() } in.Delim(']') @@ -2511,9 +2897,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.AttributesToCrop = (out.AttributesToCrop)[:0] } for !in.IsDelim(']') { - var v62 string - v62 = string(in.String()) - out.AttributesToCrop = append(out.AttributesToCrop, v62) + var v68 string + v68 = string(in.String()) + out.AttributesToCrop = append(out.AttributesToCrop, v68) in.WantComma() } in.Delim(']') @@ -2538,9 +2924,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.AttributesToHighlight = (out.AttributesToHighlight)[:0] } for !in.IsDelim(']') { - var v63 string - v63 = string(in.String()) - out.AttributesToHighlight = append(out.AttributesToHighlight, v63) + var v69 string + v69 = string(in.String()) + out.AttributesToHighlight = append(out.AttributesToHighlight, v69) in.WantComma() } in.Delim(']') @@ -2581,9 +2967,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.Facets = (out.Facets)[:0] } for !in.IsDelim(']') { - var v64 string - v64 = string(in.String()) - out.Facets = append(out.Facets, v64) + var v70 string + v70 = string(in.String()) + out.Facets = append(out.Facets, v70) in.WantComma() } in.Delim(']') @@ -2606,9 +2992,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.Sort = (out.Sort)[:0] } for !in.IsDelim(']') { - var v65 string - v65 = string(in.String()) - out.Sort = append(out.Sort, v65) + var v71 string + v71 = string(in.String()) + out.Sort = append(out.Sort, v71) in.WantComma() } in.Delim(']') @@ -2629,9 +3015,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.Vector = (out.Vector)[:0] } for !in.IsDelim(']') { - var v66 float32 - v66 = float32(in.Float32()) - out.Vector = append(out.Vector, v66) + var v72 float32 + v72 = float32(in.Float32()) + out.Vector = append(out.Vector, v72) in.WantComma() } in.Delim(']') @@ -2670,7 +3056,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writer, in SearchRequest) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(out *jwriter.Writer, in SearchRequest) { out.RawByte('{') first := true _ = first @@ -2691,11 +3077,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v67, v68 := range in.AttributesToRetrieve { - if v67 > 0 { + for v73, v74 := range in.AttributesToRetrieve { + if v73 > 0 { out.RawByte(',') } - out.String(string(v68)) + out.String(string(v74)) } out.RawByte(']') } @@ -2707,11 +3093,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v69, v70 := range in.AttributesToSearchOn { - if v69 > 0 { + for v75, v76 := range in.AttributesToSearchOn { + if v75 > 0 { out.RawByte(',') } - out.String(string(v70)) + out.String(string(v76)) } out.RawByte(']') } @@ -2723,11 +3109,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v71, v72 := range in.AttributesToCrop { - if v71 > 0 { + for v77, v78 := range in.AttributesToCrop { + if v77 > 0 { out.RawByte(',') } - out.String(string(v72)) + out.String(string(v78)) } out.RawByte(']') } @@ -2749,11 +3135,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v73, v74 := range in.AttributesToHighlight { - if v73 > 0 { + for v79, v80 := range in.AttributesToHighlight { + if v79 > 0 { out.RawByte(',') } - out.String(string(v74)) + out.String(string(v80)) } out.RawByte(']') } @@ -2806,11 +3192,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v75, v76 := range in.Facets { - if v75 > 0 { + for v81, v82 := range in.Facets { + if v81 > 0 { out.RawByte(',') } - out.String(string(v76)) + out.String(string(v82)) } out.RawByte(']') } @@ -2827,11 +3213,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v77, v78 := range in.Sort { - if v77 > 0 { + for v83, v84 := range in.Sort { + if v83 > 0 { out.RawByte(',') } - out.String(string(v78)) + out.String(string(v84)) } out.RawByte(']') } @@ -2843,11 +3229,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v79, v80 := range in.Vector { - if v79 > 0 { + for v85, v86 := range in.Vector { + if v85 > 0 { out.RawByte(',') } - out.Float32(float32(v80)) + out.Float32(float32(v86)) } out.RawByte(']') } @@ -2902,27 +3288,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v SearchRequest) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v SearchRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *SearchRequest) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *SearchRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(in *jlexer.Lexer, out *Pagination) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(in *jlexer.Lexer, out *Pagination) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -2953,7 +3339,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(out *jwriter.Writer, in Pagination) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(out *jwriter.Writer, in Pagination) { out.RawByte('{') first := true _ = first @@ -2968,27 +3354,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Pagination) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Pagination) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Pagination) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Pagination) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(in *jlexer.Lexer, out *MultiSearchResponse) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(in *jlexer.Lexer, out *MultiSearchResponse) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3023,9 +3409,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(in *jlexer.Lexer, out.Results = (out.Results)[:0] } for !in.IsDelim(']') { - var v81 SearchResponse - (v81).UnmarshalEasyJSON(in) - out.Results = append(out.Results, v81) + var v87 SearchResponse + (v87).UnmarshalEasyJSON(in) + out.Results = append(out.Results, v87) in.WantComma() } in.Delim(']') @@ -3040,7 +3426,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(out *jwriter.Writer, in MultiSearchResponse) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(out *jwriter.Writer, in MultiSearchResponse) { out.RawByte('{') first := true _ = first @@ -3051,11 +3437,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v82, v83 := range in.Results { - if v82 > 0 { + for v88, v89 := range in.Results { + if v88 > 0 { out.RawByte(',') } - (v83).MarshalEasyJSON(out) + (v89).MarshalEasyJSON(out) } out.RawByte(']') } @@ -3066,27 +3452,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v MultiSearchResponse) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v MultiSearchResponse) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *MultiSearchResponse) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *MultiSearchResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(in *jlexer.Lexer, out *MultiSearchRequest) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(in *jlexer.Lexer, out *MultiSearchRequest) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3121,9 +3507,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(in *jlexer.Lexer, out.Queries = (out.Queries)[:0] } for !in.IsDelim(']') { - var v84 SearchRequest - (v84).UnmarshalEasyJSON(in) - out.Queries = append(out.Queries, v84) + var v90 SearchRequest + (v90).UnmarshalEasyJSON(in) + out.Queries = append(out.Queries, v90) in.WantComma() } in.Delim(']') @@ -3138,7 +3524,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(out *jwriter.Writer, in MultiSearchRequest) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(out *jwriter.Writer, in MultiSearchRequest) { out.RawByte('{') first := true _ = first @@ -3149,11 +3535,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v85, v86 := range in.Queries { - if v85 > 0 { + for v91, v92 := range in.Queries { + if v91 > 0 { out.RawByte(',') } - (v86).MarshalEasyJSON(out) + (v92).MarshalEasyJSON(out) } out.RawByte(']') } @@ -3164,27 +3550,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v MultiSearchRequest) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v MultiSearchRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *MultiSearchRequest) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *MultiSearchRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(in *jlexer.Lexer, out *MinWordSizeForTypos) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(in *jlexer.Lexer, out *MinWordSizeForTypos) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3217,7 +3603,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(out *jwriter.Writer, in MinWordSizeForTypos) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(out *jwriter.Writer, in MinWordSizeForTypos) { out.RawByte('{') first := true _ = first @@ -3243,27 +3629,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v MinWordSizeForTypos) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v MinWordSizeForTypos) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *MinWordSizeForTypos) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *MinWordSizeForTypos) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(in *jlexer.Lexer, out *KeysResults) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(in *jlexer.Lexer, out *KeysResults) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3298,9 +3684,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(in *jlexer.Lexer, out.Results = (out.Results)[:0] } for !in.IsDelim(']') { - var v87 Key - (v87).UnmarshalEasyJSON(in) - out.Results = append(out.Results, v87) + var v93 Key + (v93).UnmarshalEasyJSON(in) + out.Results = append(out.Results, v93) in.WantComma() } in.Delim(']') @@ -3321,7 +3707,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(out *jwriter.Writer, in KeysResults) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(out *jwriter.Writer, in KeysResults) { out.RawByte('{') first := true _ = first @@ -3332,11 +3718,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v88, v89 := range in.Results { - if v88 > 0 { + for v94, v95 := range in.Results { + if v94 > 0 { out.RawByte(',') } - (v89).MarshalEasyJSON(out) + (v95).MarshalEasyJSON(out) } out.RawByte(']') } @@ -3362,27 +3748,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v KeysResults) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v KeysResults) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *KeysResults) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *KeysResults) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(in *jlexer.Lexer, out *KeysQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(in *jlexer.Lexer, out *KeysQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3415,7 +3801,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(out *jwriter.Writer, in KeysQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(out *jwriter.Writer, in KeysQuery) { out.RawByte('{') first := true _ = first @@ -3435,27 +3821,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v KeysQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v KeysQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *KeysQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *KeysQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(in *jlexer.Lexer, out *KeyUpdate) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(in *jlexer.Lexer, out *KeyUpdate) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3488,7 +3874,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(out *jwriter.Writer, in KeyUpdate) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(out *jwriter.Writer, in KeyUpdate) { out.RawByte('{') first := true _ = first @@ -3514,27 +3900,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v KeyUpdate) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v KeyUpdate) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *KeyUpdate) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *KeyUpdate) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(in *jlexer.Lexer, out *KeyParsed) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(in *jlexer.Lexer, out *KeyParsed) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3575,9 +3961,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(in *jlexer.Lexer, out.Actions = (out.Actions)[:0] } for !in.IsDelim(']') { - var v90 string - v90 = string(in.String()) - out.Actions = append(out.Actions, v90) + var v96 string + v96 = string(in.String()) + out.Actions = append(out.Actions, v96) in.WantComma() } in.Delim(']') @@ -3598,9 +3984,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(in *jlexer.Lexer, out.Indexes = (out.Indexes)[:0] } for !in.IsDelim(']') { - var v91 string - v91 = string(in.String()) - out.Indexes = append(out.Indexes, v91) + var v97 string + v97 = string(in.String()) + out.Indexes = append(out.Indexes, v97) in.WantComma() } in.Delim(']') @@ -3625,7 +4011,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(out *jwriter.Writer, in KeyParsed) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(out *jwriter.Writer, in KeyParsed) { out.RawByte('{') first := true _ = first @@ -3649,11 +4035,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(out *jwriter.Writ out.RawString(prefix) { out.RawByte('[') - for v92, v93 := range in.Actions { - if v92 > 0 { + for v98, v99 := range in.Actions { + if v98 > 0 { out.RawByte(',') } - out.String(string(v93)) + out.String(string(v99)) } out.RawByte(']') } @@ -3663,11 +4049,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(out *jwriter.Writ out.RawString(prefix) { out.RawByte('[') - for v94, v95 := range in.Indexes { - if v94 > 0 { + for v100, v101 := range in.Indexes { + if v100 > 0 { out.RawByte(',') } - out.String(string(v95)) + out.String(string(v101)) } out.RawByte(']') } @@ -3687,27 +4073,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v KeyParsed) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v KeyParsed) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *KeyParsed) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *KeyParsed) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(in *jlexer.Lexer, out *Key) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(in *jlexer.Lexer, out *Key) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3750,9 +4136,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(in *jlexer.Lexer, out.Actions = (out.Actions)[:0] } for !in.IsDelim(']') { - var v96 string - v96 = string(in.String()) - out.Actions = append(out.Actions, v96) + var v102 string + v102 = string(in.String()) + out.Actions = append(out.Actions, v102) in.WantComma() } in.Delim(']') @@ -3773,9 +4159,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(in *jlexer.Lexer, out.Indexes = (out.Indexes)[:0] } for !in.IsDelim(']') { - var v97 string - v97 = string(in.String()) - out.Indexes = append(out.Indexes, v97) + var v103 string + v103 = string(in.String()) + out.Indexes = append(out.Indexes, v103) in.WantComma() } in.Delim(']') @@ -3802,7 +4188,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(out *jwriter.Writer, in Key) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(out *jwriter.Writer, in Key) { out.RawByte('{') first := true _ = first @@ -3831,11 +4217,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(out *jwriter.Writ out.RawString(prefix) { out.RawByte('[') - for v98, v99 := range in.Actions { - if v98 > 0 { + for v104, v105 := range in.Actions { + if v104 > 0 { out.RawByte(',') } - out.String(string(v99)) + out.String(string(v105)) } out.RawByte(']') } @@ -3845,11 +4231,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(out *jwriter.Writ out.RawString(prefix) { out.RawByte('[') - for v100, v101 := range in.Indexes { - if v100 > 0 { + for v106, v107 := range in.Indexes { + if v106 > 0 { out.RawByte(',') } - out.String(string(v101)) + out.String(string(v107)) } out.RawByte(']') } @@ -3875,27 +4261,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Key) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Key) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Key) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Key) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(in *jlexer.Lexer, out *IndexesResults) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(in *jlexer.Lexer, out *IndexesResults) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3930,9 +4316,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(in *jlexer.Lexer, out.Results = (out.Results)[:0] } for !in.IsDelim(']') { - var v102 Index - (v102).UnmarshalEasyJSON(in) - out.Results = append(out.Results, v102) + var v108 Index + (v108).UnmarshalEasyJSON(in) + out.Results = append(out.Results, v108) in.WantComma() } in.Delim(']') @@ -3953,7 +4339,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(out *jwriter.Writer, in IndexesResults) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(out *jwriter.Writer, in IndexesResults) { out.RawByte('{') first := true _ = first @@ -3964,11 +4350,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v103, v104 := range in.Results { - if v103 > 0 { + for v109, v110 := range in.Results { + if v109 > 0 { out.RawByte(',') } - (v104).MarshalEasyJSON(out) + (v110).MarshalEasyJSON(out) } out.RawByte(']') } @@ -3994,27 +4380,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v IndexesResults) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v IndexesResults) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *IndexesResults) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *IndexesResults) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(in *jlexer.Lexer, out *IndexesQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(in *jlexer.Lexer, out *IndexesQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4047,7 +4433,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(out *jwriter.Writer, in IndexesQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(out *jwriter.Writer, in IndexesQuery) { out.RawByte('{') first := true _ = first @@ -4067,27 +4453,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v IndexesQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v IndexesQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *IndexesQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *IndexesQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(in *jlexer.Lexer, out *Index) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(in *jlexer.Lexer, out *Index) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4128,7 +4514,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(out *jwriter.Writer, in Index) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(out *jwriter.Writer, in Index) { out.RawByte('{') first := true _ = first @@ -4158,27 +4544,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Index) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Index) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Index) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Index) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(in *jlexer.Lexer, out *Health) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(in *jlexer.Lexer, out *Health) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4209,7 +4595,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(out *jwriter.Writer, in Health) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(out *jwriter.Writer, in Health) { out.RawByte('{') first := true _ = first @@ -4224,27 +4610,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Health) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Health) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Health) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Health) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(in *jlexer.Lexer, out *Faceting) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, out *Faceting) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4275,7 +4661,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(out *jwriter.Writer, in Faceting) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writer, in Faceting) { out.RawByte('{') first := true _ = first @@ -4290,27 +4676,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Faceting) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Faceting) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Faceting) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Faceting) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(in *jlexer.Lexer, out *FacetSearchResponse) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(in *jlexer.Lexer, out *FacetSearchResponse) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4345,15 +4731,15 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(in *jlexer.Lexer, out.FacetHits = (out.FacetHits)[:0] } for !in.IsDelim(']') { - var v105 interface{} - if m, ok := v105.(easyjson.Unmarshaler); ok { + var v111 interface{} + if m, ok := v111.(easyjson.Unmarshaler); ok { m.UnmarshalEasyJSON(in) - } else if m, ok := v105.(json.Unmarshaler); ok { + } else if m, ok := v111.(json.Unmarshaler); ok { _ = m.UnmarshalJSON(in.Raw()) } else { - v105 = in.Interface() + v111 = in.Interface() } - out.FacetHits = append(out.FacetHits, v105) + out.FacetHits = append(out.FacetHits, v111) in.WantComma() } in.Delim(']') @@ -4372,7 +4758,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(out *jwriter.Writer, in FacetSearchResponse) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(out *jwriter.Writer, in FacetSearchResponse) { out.RawByte('{') first := true _ = first @@ -4383,16 +4769,16 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v106, v107 := range in.FacetHits { - if v106 > 0 { + for v112, v113 := range in.FacetHits { + if v112 > 0 { out.RawByte(',') } - if m, ok := v107.(easyjson.Marshaler); ok { + if m, ok := v113.(easyjson.Marshaler); ok { m.MarshalEasyJSON(out) - } else if m, ok := v107.(json.Marshaler); ok { + } else if m, ok := v113.(json.Marshaler); ok { out.Raw(m.MarshalJSON()) } else { - out.Raw(json.Marshal(v107)) + out.Raw(json.Marshal(v113)) } } out.RawByte(']') @@ -4414,27 +4800,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v FacetSearchResponse) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v FacetSearchResponse) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *FacetSearchResponse) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *FacetSearchResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, out *FacetSearchRequest) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, out *FacetSearchRequest) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4479,9 +4865,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, out.AttributesToSearchOn = (out.AttributesToSearchOn)[:0] } for !in.IsDelim(']') { - var v108 string - v108 = string(in.String()) - out.AttributesToSearchOn = append(out.AttributesToSearchOn, v108) + var v114 string + v114 = string(in.String()) + out.AttributesToSearchOn = append(out.AttributesToSearchOn, v114) in.WantComma() } in.Delim(']') @@ -4496,7 +4882,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writer, in FacetSearchRequest) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writer, in FacetSearchRequest) { out.RawByte('{') first := true _ = first @@ -4556,11 +4942,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writ } { out.RawByte('[') - for v109, v110 := range in.AttributesToSearchOn { - if v109 > 0 { + for v115, v116 := range in.AttributesToSearchOn { + if v115 > 0 { out.RawByte(',') } - out.String(string(v110)) + out.String(string(v116)) } out.RawByte(']') } @@ -4571,27 +4957,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v FacetSearchRequest) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v FacetSearchRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *FacetSearchRequest) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *FacetSearchRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(in *jlexer.Lexer, out *Embedder) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out *Embedder) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4630,7 +5016,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(out *jwriter.Writer, in Embedder) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writer, in Embedder) { out.RawByte('{') first := true _ = first @@ -4665,27 +5051,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Embedder) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Embedder) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Embedder) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Embedder) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, out *DocumentsResult) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out *DocumentsResult) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4720,29 +5106,29 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, out.Results = (out.Results)[:0] } for !in.IsDelim(']') { - var v111 map[string]interface{} + var v117 map[string]interface{} if in.IsNull() { in.Skip() } else { in.Delim('{') - v111 = make(map[string]interface{}) + v117 = make(map[string]interface{}) for !in.IsDelim('}') { key := string(in.String()) in.WantColon() - var v112 interface{} - if m, ok := v112.(easyjson.Unmarshaler); ok { + var v118 interface{} + if m, ok := v118.(easyjson.Unmarshaler); ok { m.UnmarshalEasyJSON(in) - } else if m, ok := v112.(json.Unmarshaler); ok { + } else if m, ok := v118.(json.Unmarshaler); ok { _ = m.UnmarshalJSON(in.Raw()) } else { - v112 = in.Interface() + v118 = in.Interface() } - (v111)[key] = v112 + (v117)[key] = v118 in.WantComma() } in.Delim('}') } - out.Results = append(out.Results, v111) + out.Results = append(out.Results, v117) in.WantComma() } in.Delim(']') @@ -4763,7 +5149,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writer, in DocumentsResult) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writer, in DocumentsResult) { out.RawByte('{') first := true _ = first @@ -4774,29 +5160,29 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v113, v114 := range in.Results { - if v113 > 0 { + for v119, v120 := range in.Results { + if v119 > 0 { out.RawByte(',') } - if v114 == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { + if v120 == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { out.RawString(`null`) } else { out.RawByte('{') - v115First := true - for v115Name, v115Value := range v114 { - if v115First { - v115First = false + v121First := true + for v121Name, v121Value := range v120 { + if v121First { + v121First = false } else { out.RawByte(',') } - out.String(string(v115Name)) + out.String(string(v121Name)) out.RawByte(':') - if m, ok := v115Value.(easyjson.Marshaler); ok { + if m, ok := v121Value.(easyjson.Marshaler); ok { m.MarshalEasyJSON(out) - } else if m, ok := v115Value.(json.Marshaler); ok { + } else if m, ok := v121Value.(json.Marshaler); ok { out.Raw(m.MarshalJSON()) } else { - out.Raw(json.Marshal(v115Value)) + out.Raw(json.Marshal(v121Value)) } } out.RawByte('}') @@ -4826,27 +5212,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v DocumentsResult) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DocumentsResult) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DocumentsResult) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DocumentsResult) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out *DocumentsQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out *DocumentsQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4885,9 +5271,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out.Fields = (out.Fields)[:0] } for !in.IsDelim(']') { - var v116 string - v116 = string(in.String()) - out.Fields = append(out.Fields, v116) + var v122 string + v122 = string(in.String()) + out.Fields = append(out.Fields, v122) in.WantComma() } in.Delim(']') @@ -4910,7 +5296,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writer, in DocumentsQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writer, in DocumentsQuery) { out.RawByte('{') first := true _ = first @@ -4940,11 +5326,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writ } { out.RawByte('[') - for v117, v118 := range in.Fields { - if v117 > 0 { + for v123, v124 := range in.Fields { + if v123 > 0 { out.RawByte(',') } - out.String(string(v118)) + out.String(string(v124)) } out.RawByte(']') } @@ -4971,27 +5357,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v DocumentsQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DocumentsQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DocumentsQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DocumentsQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out *DocumentQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out *DocumentQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -5026,9 +5412,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out.Fields = (out.Fields)[:0] } for !in.IsDelim(']') { - var v119 string - v119 = string(in.String()) - out.Fields = append(out.Fields, v119) + var v125 string + v125 = string(in.String()) + out.Fields = append(out.Fields, v125) in.WantComma() } in.Delim(']') @@ -5043,7 +5429,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writer, in DocumentQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writer, in DocumentQuery) { out.RawByte('{') first := true _ = first @@ -5053,11 +5439,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writ out.RawString(prefix[1:]) { out.RawByte('[') - for v120, v121 := range in.Fields { - if v120 > 0 { + for v126, v127 := range in.Fields { + if v126 > 0 { out.RawByte(',') } - out.String(string(v121)) + out.String(string(v127)) } out.RawByte(']') } @@ -5068,27 +5454,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v DocumentQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DocumentQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DocumentQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DocumentQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out *Details) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(in *jlexer.Lexer, out *Details) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -5133,9 +5519,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.RankingRules = (out.RankingRules)[:0] } for !in.IsDelim(']') { - var v122 string - v122 = string(in.String()) - out.RankingRules = append(out.RankingRules, v122) + var v128 string + v128 = string(in.String()) + out.RankingRules = append(out.RankingRules, v128) in.WantComma() } in.Delim(']') @@ -5166,9 +5552,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.SearchableAttributes = (out.SearchableAttributes)[:0] } for !in.IsDelim(']') { - var v123 string - v123 = string(in.String()) - out.SearchableAttributes = append(out.SearchableAttributes, v123) + var v129 string + v129 = string(in.String()) + out.SearchableAttributes = append(out.SearchableAttributes, v129) in.WantComma() } in.Delim(']') @@ -5189,9 +5575,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.DisplayedAttributes = (out.DisplayedAttributes)[:0] } for !in.IsDelim(']') { - var v124 string - v124 = string(in.String()) - out.DisplayedAttributes = append(out.DisplayedAttributes, v124) + var v130 string + v130 = string(in.String()) + out.DisplayedAttributes = append(out.DisplayedAttributes, v130) in.WantComma() } in.Delim(']') @@ -5212,9 +5598,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.StopWords = (out.StopWords)[:0] } for !in.IsDelim(']') { - var v125 string - v125 = string(in.String()) - out.StopWords = append(out.StopWords, v125) + var v131 string + v131 = string(in.String()) + out.StopWords = append(out.StopWords, v131) in.WantComma() } in.Delim(']') @@ -5232,30 +5618,30 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, for !in.IsDelim('}') { key := string(in.String()) in.WantColon() - var v126 []string + var v132 []string if in.IsNull() { in.Skip() - v126 = nil + v132 = nil } else { in.Delim('[') - if v126 == nil { + if v132 == nil { if !in.IsDelim(']') { - v126 = make([]string, 0, 4) + v132 = make([]string, 0, 4) } else { - v126 = []string{} + v132 = []string{} } } else { - v126 = (v126)[:0] + v132 = (v132)[:0] } for !in.IsDelim(']') { - var v127 string - v127 = string(in.String()) - v126 = append(v126, v127) + var v133 string + v133 = string(in.String()) + v132 = append(v132, v133) in.WantComma() } in.Delim(']') } - (out.Synonyms)[key] = v126 + (out.Synonyms)[key] = v132 in.WantComma() } in.Delim('}') @@ -5276,9 +5662,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.FilterableAttributes = (out.FilterableAttributes)[:0] } for !in.IsDelim(']') { - var v128 string - v128 = string(in.String()) - out.FilterableAttributes = append(out.FilterableAttributes, v128) + var v134 string + v134 = string(in.String()) + out.FilterableAttributes = append(out.FilterableAttributes, v134) in.WantComma() } in.Delim(']') @@ -5299,9 +5685,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.SortableAttributes = (out.SortableAttributes)[:0] } for !in.IsDelim(']') { - var v129 string - v129 = string(in.String()) - out.SortableAttributes = append(out.SortableAttributes, v129) + var v135 string + v135 = string(in.String()) + out.SortableAttributes = append(out.SortableAttributes, v135) in.WantComma() } in.Delim(']') @@ -5360,9 +5746,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.Swaps = (out.Swaps)[:0] } for !in.IsDelim(']') { - var v130 SwapIndexesParams - (v130).UnmarshalEasyJSON(in) - out.Swaps = append(out.Swaps, v130) + var v136 SwapIndexesParams + (v136).UnmarshalEasyJSON(in) + out.Swaps = append(out.Swaps, v136) in.WantComma() } in.Delim(']') @@ -5379,7 +5765,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writer, in Details) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(out *jwriter.Writer, in Details) { out.RawByte('{') first := true _ = first @@ -5439,11 +5825,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v131, v132 := range in.RankingRules { - if v131 > 0 { + for v137, v138 := range in.RankingRules { + if v137 > 0 { out.RawByte(',') } - out.String(string(v132)) + out.String(string(v138)) } out.RawByte(']') } @@ -5468,11 +5854,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v133, v134 := range in.SearchableAttributes { - if v133 > 0 { + for v139, v140 := range in.SearchableAttributes { + if v139 > 0 { out.RawByte(',') } - out.String(string(v134)) + out.String(string(v140)) } out.RawByte(']') } @@ -5487,11 +5873,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v135, v136 := range in.DisplayedAttributes { - if v135 > 0 { + for v141, v142 := range in.DisplayedAttributes { + if v141 > 0 { out.RawByte(',') } - out.String(string(v136)) + out.String(string(v142)) } out.RawByte(']') } @@ -5506,11 +5892,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v137, v138 := range in.StopWords { - if v137 > 0 { + for v143, v144 := range in.StopWords { + if v143 > 0 { out.RawByte(',') } - out.String(string(v138)) + out.String(string(v144)) } out.RawByte(']') } @@ -5525,24 +5911,24 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('{') - v139First := true - for v139Name, v139Value := range in.Synonyms { - if v139First { - v139First = false + v145First := true + for v145Name, v145Value := range in.Synonyms { + if v145First { + v145First = false } else { out.RawByte(',') } - out.String(string(v139Name)) + out.String(string(v145Name)) out.RawByte(':') - if v139Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + if v145Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') - for v140, v141 := range v139Value { - if v140 > 0 { + for v146, v147 := range v145Value { + if v146 > 0 { out.RawByte(',') } - out.String(string(v141)) + out.String(string(v147)) } out.RawByte(']') } @@ -5560,11 +5946,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v142, v143 := range in.FilterableAttributes { - if v142 > 0 { + for v148, v149 := range in.FilterableAttributes { + if v148 > 0 { out.RawByte(',') } - out.String(string(v143)) + out.String(string(v149)) } out.RawByte(']') } @@ -5579,11 +5965,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v144, v145 := range in.SortableAttributes { - if v144 > 0 { + for v150, v151 := range in.SortableAttributes { + if v150 > 0 { out.RawByte(',') } - out.String(string(v145)) + out.String(string(v151)) } out.RawByte(']') } @@ -5668,11 +6054,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v146, v147 := range in.Swaps { - if v146 > 0 { + for v152, v153 := range in.Swaps { + if v152 > 0 { out.RawByte(',') } - (v147).MarshalEasyJSON(out) + (v153).MarshalEasyJSON(out) } out.RawByte(']') } @@ -5693,27 +6079,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Details) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Details) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Details) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Details) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out *DeleteTasksQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, out *DeleteTasksQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -5748,9 +6134,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out.UIDS = (out.UIDS)[:0] } for !in.IsDelim(']') { - var v148 int64 - v148 = int64(in.Int64()) - out.UIDS = append(out.UIDS, v148) + var v154 int64 + v154 = int64(in.Int64()) + out.UIDS = append(out.UIDS, v154) in.WantComma() } in.Delim(']') @@ -5771,9 +6157,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out.IndexUIDS = (out.IndexUIDS)[:0] } for !in.IsDelim(']') { - var v149 string - v149 = string(in.String()) - out.IndexUIDS = append(out.IndexUIDS, v149) + var v155 string + v155 = string(in.String()) + out.IndexUIDS = append(out.IndexUIDS, v155) in.WantComma() } in.Delim(']') @@ -5794,9 +6180,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out.Statuses = (out.Statuses)[:0] } for !in.IsDelim(']') { - var v150 TaskStatus - v150 = TaskStatus(in.String()) - out.Statuses = append(out.Statuses, v150) + var v156 TaskStatus + v156 = TaskStatus(in.String()) + out.Statuses = append(out.Statuses, v156) in.WantComma() } in.Delim(']') @@ -5817,9 +6203,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out.Types = (out.Types)[:0] } for !in.IsDelim(']') { - var v151 TaskType - v151 = TaskType(in.String()) - out.Types = append(out.Types, v151) + var v157 TaskType + v157 = TaskType(in.String()) + out.Types = append(out.Types, v157) in.WantComma() } in.Delim(']') @@ -5840,9 +6226,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out.CanceledBy = (out.CanceledBy)[:0] } for !in.IsDelim(']') { - var v152 int64 - v152 = int64(in.Int64()) - out.CanceledBy = append(out.CanceledBy, v152) + var v158 int64 + v158 = int64(in.Int64()) + out.CanceledBy = append(out.CanceledBy, v158) in.WantComma() } in.Delim(']') @@ -5881,7 +6267,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writer, in DeleteTasksQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writer, in DeleteTasksQuery) { out.RawByte('{') first := true _ = first @@ -5892,11 +6278,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v153, v154 := range in.UIDS { - if v153 > 0 { + for v159, v160 := range in.UIDS { + if v159 > 0 { out.RawByte(',') } - out.Int64(int64(v154)) + out.Int64(int64(v160)) } out.RawByte(']') } @@ -5908,11 +6294,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v155, v156 := range in.IndexUIDS { - if v155 > 0 { + for v161, v162 := range in.IndexUIDS { + if v161 > 0 { out.RawByte(',') } - out.String(string(v156)) + out.String(string(v162)) } out.RawByte(']') } @@ -5924,11 +6310,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v157, v158 := range in.Statuses { - if v157 > 0 { + for v163, v164 := range in.Statuses { + if v163 > 0 { out.RawByte(',') } - out.String(string(v158)) + out.String(string(v164)) } out.RawByte(']') } @@ -5940,11 +6326,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v159, v160 := range in.Types { - if v159 > 0 { + for v165, v166 := range in.Types { + if v165 > 0 { out.RawByte(',') } - out.String(string(v160)) + out.String(string(v166)) } out.RawByte(']') } @@ -5956,11 +6342,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v161, v162 := range in.CanceledBy { - if v161 > 0 { + for v167, v168 := range in.CanceledBy { + if v167 > 0 { out.RawByte(',') } - out.Int64(int64(v162)) + out.Int64(int64(v168)) } out.RawByte(']') } @@ -6001,27 +6387,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v DeleteTasksQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DeleteTasksQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DeleteTasksQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DeleteTasksQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(in *jlexer.Lexer, out *CsvDocumentsQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(in *jlexer.Lexer, out *CsvDocumentsQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -6054,7 +6440,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(out *jwriter.Writer, in CsvDocumentsQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(out *jwriter.Writer, in CsvDocumentsQuery) { out.RawByte('{') first := true _ = first @@ -6080,27 +6466,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v CsvDocumentsQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CsvDocumentsQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CsvDocumentsQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CsvDocumentsQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, out *CreateIndexRequest) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out *CreateIndexRequest) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -6133,7 +6519,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writer, in CreateIndexRequest) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writer, in CreateIndexRequest) { out.RawByte('{') first := true _ = first @@ -6159,27 +6545,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v CreateIndexRequest) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CreateIndexRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CreateIndexRequest) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CreateIndexRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(in *jlexer.Lexer, out *Client) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo43(in *jlexer.Lexer, out *Client) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -6208,7 +6594,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(out *jwriter.Writer, in Client) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo43(out *jwriter.Writer, in Client) { out.RawByte('{') first := true _ = first @@ -6218,27 +6604,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Client) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo43(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Client) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo43(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Client) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo43(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Client) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo43(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out *CancelTasksQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo44(in *jlexer.Lexer, out *CancelTasksQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -6273,9 +6659,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out.UIDS = (out.UIDS)[:0] } for !in.IsDelim(']') { - var v163 int64 - v163 = int64(in.Int64()) - out.UIDS = append(out.UIDS, v163) + var v169 int64 + v169 = int64(in.Int64()) + out.UIDS = append(out.UIDS, v169) in.WantComma() } in.Delim(']') @@ -6296,9 +6682,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out.IndexUIDS = (out.IndexUIDS)[:0] } for !in.IsDelim(']') { - var v164 string - v164 = string(in.String()) - out.IndexUIDS = append(out.IndexUIDS, v164) + var v170 string + v170 = string(in.String()) + out.IndexUIDS = append(out.IndexUIDS, v170) in.WantComma() } in.Delim(']') @@ -6319,9 +6705,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out.Statuses = (out.Statuses)[:0] } for !in.IsDelim(']') { - var v165 TaskStatus - v165 = TaskStatus(in.String()) - out.Statuses = append(out.Statuses, v165) + var v171 TaskStatus + v171 = TaskStatus(in.String()) + out.Statuses = append(out.Statuses, v171) in.WantComma() } in.Delim(']') @@ -6342,9 +6728,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out.Types = (out.Types)[:0] } for !in.IsDelim(']') { - var v166 TaskType - v166 = TaskType(in.String()) - out.Types = append(out.Types, v166) + var v172 TaskType + v172 = TaskType(in.String()) + out.Types = append(out.Types, v172) in.WantComma() } in.Delim(']') @@ -6375,7 +6761,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writer, in CancelTasksQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo44(out *jwriter.Writer, in CancelTasksQuery) { out.RawByte('{') first := true _ = first @@ -6386,11 +6772,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v167, v168 := range in.UIDS { - if v167 > 0 { + for v173, v174 := range in.UIDS { + if v173 > 0 { out.RawByte(',') } - out.Int64(int64(v168)) + out.Int64(int64(v174)) } out.RawByte(']') } @@ -6402,11 +6788,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v169, v170 := range in.IndexUIDS { - if v169 > 0 { + for v175, v176 := range in.IndexUIDS { + if v175 > 0 { out.RawByte(',') } - out.String(string(v170)) + out.String(string(v176)) } out.RawByte(']') } @@ -6418,11 +6804,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v171, v172 := range in.Statuses { - if v171 > 0 { + for v177, v178 := range in.Statuses { + if v177 > 0 { out.RawByte(',') } - out.String(string(v172)) + out.String(string(v178)) } out.RawByte(']') } @@ -6434,11 +6820,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v173, v174 := range in.Types { - if v173 > 0 { + for v179, v180 := range in.Types { + if v179 > 0 { out.RawByte(',') } - out.String(string(v174)) + out.String(string(v180)) } out.RawByte(']') } @@ -6469,23 +6855,23 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v CancelTasksQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo44(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CancelTasksQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo44(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CancelTasksQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo44(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CancelTasksQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo44(l, v) } From 726e14aa900eb1368be19746d315f2e6e71d4f69 Mon Sep 17 00:00:00 2001 From: Javad Date: Thu, 1 Aug 2024 12:40:30 +0330 Subject: [PATCH 20/43] Fix add json tag on search request --- client.go | 11 +- client_test.go | 7 +- index_search.go | 110 ++----------- index_search_test.go | 67 +++++--- types.go | 68 ++++---- types_easyjson.go | 371 +++++++++++++++++++++++++++---------------- 6 files changed, 336 insertions(+), 298 deletions(-) diff --git a/client.go b/client.go index 3487e693..4b4a9602 100644 --- a/client.go +++ b/client.go @@ -260,20 +260,15 @@ func (c *Client) CreateDump() (resp *TaskInfo, err error) { func (c *Client) MultiSearch(queries *MultiSearchRequest) (*MultiSearchResponse, error) { resp := &MultiSearchResponse{} - searchPostQueries := make(map[string][]map[string]interface{}, 1) - for i := 0; i < len(queries.Queries); i++ { - if queries.Queries[i].Limit == 0 { - queries.Queries[i].Limit = DefaultLimit - } - searchPostQueries["queries"] = append(searchPostQueries["queries"], searchPostRequestParams(queries.Queries[i].Query, &queries.Queries[i])) + queries.Queries[i].validate() } req := internalRequest{ endpoint: "/multi-search", method: http.MethodPost, contentType: contentTypeJSON, - withRequest: searchPostQueries, + withRequest: queries, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "MultiSearch", @@ -484,7 +479,7 @@ func (c *Client) WaitForTask(taskUID int64, options ...WaitParams) (*Task, error // ExpiresAt options is a time.Time when the key will expire. Note that if an ExpiresAt value is included it should be in UTC time. // ApiKey options is the API key parent of the token. If you leave it empty the client API Key will be used. func (c *Client) GenerateTenantToken(APIKeyUID string, SearchRules map[string]interface{}, Options *TenantTokenOptions) (resp string, err error) { - // Validate the arguments + // validate the arguments if SearchRules == nil { return "", fmt.Errorf("GenerateTenantToken: The search rules added in the token generation must be of type array or object") } diff --git a/client_test.go b/client_test.go index 1f7eb27e..3d6451da 100644 --- a/client_test.go +++ b/client_test.go @@ -1902,7 +1902,7 @@ func TestClient_MultiSearch(t *testing.T) { args: args{ client: defaultClient, queries: &MultiSearchRequest{ - []SearchRequest{ + []*SearchRequest{ { IndexUID: "TestClientMultiSearchOneIndex", Query: "wonder", @@ -1934,7 +1934,7 @@ func TestClient_MultiSearch(t *testing.T) { args: args{ client: defaultClient, queries: &MultiSearchRequest{ - []SearchRequest{ + []*SearchRequest{ { IndexUID: "TestClientMultiSearchOnTwoIndexes1", Query: "wonder", @@ -1987,7 +1987,7 @@ func TestClient_MultiSearch(t *testing.T) { args: args{ client: defaultClient, queries: &MultiSearchRequest{ - []SearchRequest{ + []*SearchRequest{ { Query: "", }, @@ -2011,6 +2011,7 @@ func TestClient_MultiSearch(t *testing.T) { if tt.wantErr { require.Error(t, err) } else { + require.NotNil(t, got) for i := 0; i < len(tt.want.Results); i++ { if !reflect.DeepEqual(got.Results[i].Hits, tt.want.Results[i].Hits) { t.Errorf("Client.MultiSearch() = %v, want %v", got.Results[i].Hits, tt.want.Results[i].Hits) diff --git a/index_search.go b/index_search.go index af1e45ab..4f3f3099 100644 --- a/index_search.go +++ b/index_search.go @@ -20,11 +20,15 @@ func (i Index) SearchRaw(query string, request *SearchRequest) (*json.RawMessage return nil, ErrNoSearchRequest } - if request.Limit == 0 { - request.Limit = DefaultLimit + if query != "" { + request.Query = query } - searchPostRequestParams := searchPostRequestParams(query, request) + if request.IndexUID != "" { + request.IndexUID = "" + } + + request.validate() resp := &json.RawMessage{} @@ -32,7 +36,7 @@ func (i Index) SearchRaw(query string, request *SearchRequest) (*json.RawMessage endpoint: "/indexes/" + i.UID + "/search", method: http.MethodPost, contentType: contentTypeJSON, - withRequest: searchPostRequestParams, + withRequest: request, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "SearchRaw", @@ -50,14 +54,15 @@ func (i Index) Search(query string, request *SearchRequest) (*SearchResponse, er return nil, ErrNoSearchRequest } - if request.Limit == 0 { - request.Limit = DefaultLimit + if query != "" { + request.Query = query } + if request.IndexUID != "" { request.IndexUID = "" } - searchPostRequestParams := searchPostRequestParams(query, request) + request.validate() resp := &SearchResponse{} @@ -65,7 +70,7 @@ func (i Index) Search(query string, request *SearchRequest) (*SearchResponse, er endpoint: "/indexes/" + i.UID + "/search", method: http.MethodPost, contentType: contentTypeJSON, - withRequest: searchPostRequestParams, + withRequest: request, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "Search", @@ -77,92 +82,3 @@ func (i Index) Search(query string, request *SearchRequest) (*SearchResponse, er return resp, nil } - -func searchPostRequestParams(query string, request *SearchRequest) map[string]interface{} { - params := make(map[string]interface{}, 22) - - if !request.PlaceholderSearch { - params["q"] = query - } - if request.Distinct != "" { - params["distinct"] = request.Distinct - } - if request.IndexUID != "" { - params["indexUid"] = request.IndexUID - } - if request.Limit != DefaultLimit { - params["limit"] = request.Limit - } - if request.ShowMatchesPosition { - params["showMatchesPosition"] = request.ShowMatchesPosition - } - if request.ShowRankingScore { - params["showRankingScore"] = request.ShowRankingScore - } - if request.ShowRankingScoreDetails { - params["showRankingScoreDetails"] = request.ShowRankingScoreDetails - } - if request.Filter != nil { - params["filter"] = request.Filter - } - if request.Offset != 0 { - params["offset"] = request.Offset - } - if request.CropLength != 0 { - params["cropLength"] = request.CropLength - } - if request.HitsPerPage != 0 { - params["hitsPerPage"] = request.HitsPerPage - } - if request.Page != 0 { - params["page"] = request.Page - } - if request.CropMarker != "" { - params["cropMarker"] = request.CropMarker - } - if request.HighlightPreTag != "" { - params["highlightPreTag"] = request.HighlightPreTag - } - if request.HighlightPostTag != "" { - params["highlightPostTag"] = request.HighlightPostTag - } - if request.MatchingStrategy != "" { - params["matchingStrategy"] = request.MatchingStrategy - } - if len(request.AttributesToRetrieve) != 0 { - params["attributesToRetrieve"] = request.AttributesToRetrieve - } - if len(request.AttributesToSearchOn) != 0 { - params["attributesToSearchOn"] = request.AttributesToSearchOn - } - if len(request.AttributesToCrop) != 0 { - params["attributesToCrop"] = request.AttributesToCrop - } - if len(request.AttributesToHighlight) != 0 { - params["attributesToHighlight"] = request.AttributesToHighlight - } - if len(request.Facets) != 0 { - params["facets"] = request.Facets - } - if len(request.Sort) != 0 { - params["sort"] = request.Sort - } - if request.Vector != nil && len(request.Vector) > 0 { - params["vector"] = request.Vector - } - if request.Hybrid != nil { - hybrid := make(map[string]interface{}, 2) - hybrid["embedder"] = request.Hybrid.Embedder - hybrid["semanticRatio"] = request.Hybrid.SemanticRatio - params["hybrid"] = hybrid - } - if request.RetrieveVectors { - params["retrieveVectors"] = request.RetrieveVectors - } - - if request.RankingScoreThreshold != 0 { - params["rankingScoreThreshold"] = request.RankingScoreThreshold - } - - return params -} diff --git a/index_search_test.go b/index_search_test.go index 077a12db..a7d0ea69 100644 --- a/index_search_test.go +++ b/index_search_test.go @@ -182,8 +182,7 @@ func TestIndex_Search(t *testing.T) { UID: "indexUID", client: defaultClient, request: &SearchRequest{ - PlaceholderSearch: true, - Limit: 1, + Limit: 1, }, }, want: &SearchResponse{ @@ -1656,40 +1655,60 @@ func TestIndex_SearchWithShowRankingScoreDetails(t *testing.T) { } func TestIndex_SearchWithVectorStore(t *testing.T) { - type args struct { + tests := []struct { + name string UID string PrimaryKey string client *Client query string request SearchRequest - } - testArg := args{ - UID: "indexUID", - client: defaultClient, - query: "Pride and Prejudice", - request: SearchRequest{ - Hybrid: &SearchRequestHybrid{ - SemanticRatio: 0.5, - Embedder: "default", + }{ + { + name: "basic hybrid test", + UID: "indexUID", + client: defaultClient, + query: "Pride and Prejudice", + request: SearchRequest{ + Hybrid: &SearchRequestHybrid{ + SemanticRatio: 0.5, + Embedder: "default", + }, + RetrieveVectors: true, + }, + }, + { + name: "empty Embedder", + UID: "indexUID", + client: defaultClient, + query: "Pride and Prejudice", + request: SearchRequest{ + Hybrid: &SearchRequestHybrid{ + SemanticRatio: 0.5, + Embedder: "", + }, + RetrieveVectors: true, }, - RetrieveVectors: true, }, } - i, err := SetUpIndexWithVector(testArg.UID) - if err != nil { - t.Fatal(err) - } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + i, err := SetUpIndexWithVector(tt.UID) + if err != nil { + t.Fatal(err) + } - c := testArg.client - t.Cleanup(cleanup(c)) + c := tt.client + t.Cleanup(cleanup(c)) - got, err := i.Search(testArg.query, &testArg.request) - require.NoError(t, err) + got, err := i.Search(tt.query, &tt.request) + require.NoError(t, err) - for _, hit := range got.Hits { - hit := hit.(map[string]interface{}) - require.NotNil(t, hit["_vectors"]) + for _, hit := range got.Hits { + hit := hit.(map[string]interface{}) + require.NotNil(t, hit["_vectors"]) + } + }) } } diff --git a/types.go b/types.go index 7c15d39e..78195344 100644 --- a/types.go +++ b/types.go @@ -338,42 +338,41 @@ type CreateIndexRequest struct { // // Documentation: https://www.meilisearch.com/docs/reference/api/search#search-parameters type SearchRequest struct { - Offset int64 - Limit int64 - AttributesToRetrieve []string - AttributesToSearchOn []string - AttributesToCrop []string - CropLength int64 - CropMarker string - AttributesToHighlight []string - HighlightPreTag string - HighlightPostTag string - MatchingStrategy string - Filter interface{} - ShowMatchesPosition bool - ShowRankingScore bool - ShowRankingScoreDetails bool - Facets []string - PlaceholderSearch bool - Sort []string - Vector []float32 - HitsPerPage int64 - Page int64 - IndexUID string - Query string - Distinct string - Hybrid *SearchRequestHybrid - RetrieveVectors bool - RankingScoreThreshold float64 + Offset int64 `json:"offset,omitempty"` + Limit int64 `json:"limit,omitempty"` + AttributesToRetrieve []string `json:"attributesToRetrieve,omitempty"` + AttributesToSearchOn []string `json:"attributesToSearchOn,omitempty"` + AttributesToCrop []string `json:"attributesToCrop,omitempty"` + CropLength int64 `json:"cropLength,omitempty"` + CropMarker string `json:"cropMarker,omitempty"` + AttributesToHighlight []string `json:"attributesToHighlight,omitempty"` + HighlightPreTag string `json:"highlightPreTag,omitempty"` + HighlightPostTag string `json:"highlightPostTag,omitempty"` + MatchingStrategy string `json:"matchingStrategy,omitempty"` + Filter interface{} `json:"filter,omitempty"` + ShowMatchesPosition bool `json:"showMatchesPosition,omitempty"` + ShowRankingScore bool `json:"showRankingScore,omitempty"` + ShowRankingScoreDetails bool `json:"showRankingScoreDetails,omitempty"` + Facets []string `json:"facets,omitempty"` + Sort []string `json:"sort,omitempty"` + Vector []float32 `json:"vector,omitempty"` + HitsPerPage int64 `json:"hitsPerPage,omitempty"` + Page int64 `json:"page,omitempty"` + IndexUID string `json:"indexUid,omitempty"` + Query string `json:"q"` + Distinct string `json:"distinct,omitempty"` + Hybrid *SearchRequestHybrid `json:"hybrid,omitempty"` + RetrieveVectors bool `json:"retrieveVectors,omitempty"` + RankingScoreThreshold float64 `json:"rankingScoreThreshold,omitempty"` } type SearchRequestHybrid struct { - SemanticRatio float64 - Embedder string + SemanticRatio float64 `json:"semanticRatio,omitempty"` + Embedder string `json:"embedder,omitempty"` } type MultiSearchRequest struct { - Queries []SearchRequest `json:"queries"` + Queries []*SearchRequest `json:"queries"` } // SearchResponse is the response body for search method @@ -467,3 +466,12 @@ func (b *RawType) UnmarshalJSON(data []byte) error { func (b RawType) MarshalJSON() ([]byte, error) { return b, nil } + +func (s *SearchRequest) validate() { + if s.Limit == 0 { + s.Limit = DefaultLimit + } + if s.Hybrid != nil && s.Hybrid.Embedder == "" { + s.Hybrid.Embedder = "default" + } +} diff --git a/types_easyjson.go b/types_easyjson.go index fc223f77..6722f496 100644 --- a/types_easyjson.go +++ b/types_easyjson.go @@ -2384,9 +2384,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(in *jlexer.Lexer, continue } switch key { - case "SemanticRatio": + case "semanticRatio": out.SemanticRatio = float64(in.Float64()) - case "Embedder": + case "embedder": out.Embedder = string(in.String()) default: in.SkipRecursive() @@ -2402,14 +2402,20 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(out *jwriter.Writ out.RawByte('{') first := true _ = first - { - const prefix string = ",\"SemanticRatio\":" + if in.SemanticRatio != 0 { + const prefix string = ",\"semanticRatio\":" + first = false out.RawString(prefix[1:]) out.Float64(float64(in.SemanticRatio)) } - { - const prefix string = ",\"Embedder\":" - out.RawString(prefix) + if in.Embedder != "" { + const prefix string = ",\"embedder\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.String(string(in.Embedder)) } out.RawByte('}') @@ -2457,11 +2463,11 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, continue } switch key { - case "Offset": + case "offset": out.Offset = int64(in.Int64()) - case "Limit": + case "limit": out.Limit = int64(in.Int64()) - case "AttributesToRetrieve": + case "attributesToRetrieve": if in.IsNull() { in.Skip() out.AttributesToRetrieve = nil @@ -2484,7 +2490,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, } in.Delim(']') } - case "AttributesToSearchOn": + case "attributesToSearchOn": if in.IsNull() { in.Skip() out.AttributesToSearchOn = nil @@ -2507,7 +2513,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, } in.Delim(']') } - case "AttributesToCrop": + case "attributesToCrop": if in.IsNull() { in.Skip() out.AttributesToCrop = nil @@ -2530,11 +2536,11 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, } in.Delim(']') } - case "CropLength": + case "cropLength": out.CropLength = int64(in.Int64()) - case "CropMarker": + case "cropMarker": out.CropMarker = string(in.String()) - case "AttributesToHighlight": + case "attributesToHighlight": if in.IsNull() { in.Skip() out.AttributesToHighlight = nil @@ -2557,13 +2563,13 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, } in.Delim(']') } - case "HighlightPreTag": + case "highlightPreTag": out.HighlightPreTag = string(in.String()) - case "HighlightPostTag": + case "highlightPostTag": out.HighlightPostTag = string(in.String()) - case "MatchingStrategy": + case "matchingStrategy": out.MatchingStrategy = string(in.String()) - case "Filter": + case "filter": if m, ok := out.Filter.(easyjson.Unmarshaler); ok { m.UnmarshalEasyJSON(in) } else if m, ok := out.Filter.(json.Unmarshaler); ok { @@ -2571,13 +2577,13 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, } else { out.Filter = in.Interface() } - case "ShowMatchesPosition": + case "showMatchesPosition": out.ShowMatchesPosition = bool(in.Bool()) - case "ShowRankingScore": + case "showRankingScore": out.ShowRankingScore = bool(in.Bool()) - case "ShowRankingScoreDetails": + case "showRankingScoreDetails": out.ShowRankingScoreDetails = bool(in.Bool()) - case "Facets": + case "facets": if in.IsNull() { in.Skip() out.Facets = nil @@ -2600,9 +2606,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, } in.Delim(']') } - case "PlaceholderSearch": - out.PlaceholderSearch = bool(in.Bool()) - case "Sort": + case "sort": if in.IsNull() { in.Skip() out.Sort = nil @@ -2625,7 +2629,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, } in.Delim(']') } - case "Vector": + case "vector": if in.IsNull() { in.Skip() out.Vector = nil @@ -2648,17 +2652,17 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, } in.Delim(']') } - case "HitsPerPage": + case "hitsPerPage": out.HitsPerPage = int64(in.Int64()) - case "Page": + case "page": out.Page = int64(in.Int64()) - case "IndexUID": + case "indexUid": out.IndexUID = string(in.String()) - case "Query": + case "q": out.Query = string(in.String()) - case "Distinct": + case "distinct": out.Distinct = string(in.String()) - case "Hybrid": + case "hybrid": if in.IsNull() { in.Skip() out.Hybrid = nil @@ -2668,9 +2672,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, } (*out.Hybrid).UnmarshalEasyJSON(in) } - case "RetrieveVectors": + case "retrieveVectors": out.RetrieveVectors = bool(in.Bool()) - case "RankingScoreThreshold": + case "rankingScoreThreshold": out.RankingScoreThreshold = float64(in.Float64()) default: in.SkipRecursive() @@ -2686,22 +2690,31 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawByte('{') first := true _ = first - { - const prefix string = ",\"Offset\":" + if in.Offset != 0 { + const prefix string = ",\"offset\":" + first = false out.RawString(prefix[1:]) out.Int64(int64(in.Offset)) } - { - const prefix string = ",\"Limit\":" - out.RawString(prefix) + if in.Limit != 0 { + const prefix string = ",\"limit\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.Int64(int64(in.Limit)) } - { - const prefix string = ",\"AttributesToRetrieve\":" - out.RawString(prefix) - if in.AttributesToRetrieve == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") + if len(in.AttributesToRetrieve) != 0 { + const prefix string = ",\"attributesToRetrieve\":" + if first { + first = false + out.RawString(prefix[1:]) } else { + out.RawString(prefix) + } + { out.RawByte('[') for v67, v68 := range in.AttributesToRetrieve { if v67 > 0 { @@ -2712,12 +2725,15 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawByte(']') } } - { - const prefix string = ",\"AttributesToSearchOn\":" - out.RawString(prefix) - if in.AttributesToSearchOn == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") + if len(in.AttributesToSearchOn) != 0 { + const prefix string = ",\"attributesToSearchOn\":" + if first { + first = false + out.RawString(prefix[1:]) } else { + out.RawString(prefix) + } + { out.RawByte('[') for v69, v70 := range in.AttributesToSearchOn { if v69 > 0 { @@ -2728,12 +2744,15 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawByte(']') } } - { - const prefix string = ",\"AttributesToCrop\":" - out.RawString(prefix) - if in.AttributesToCrop == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") + if len(in.AttributesToCrop) != 0 { + const prefix string = ",\"attributesToCrop\":" + if first { + first = false + out.RawString(prefix[1:]) } else { + out.RawString(prefix) + } + { out.RawByte('[') for v71, v72 := range in.AttributesToCrop { if v71 > 0 { @@ -2744,22 +2763,35 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawByte(']') } } - { - const prefix string = ",\"CropLength\":" - out.RawString(prefix) + if in.CropLength != 0 { + const prefix string = ",\"cropLength\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.Int64(int64(in.CropLength)) } - { - const prefix string = ",\"CropMarker\":" - out.RawString(prefix) + if in.CropMarker != "" { + const prefix string = ",\"cropMarker\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.String(string(in.CropMarker)) } - { - const prefix string = ",\"AttributesToHighlight\":" - out.RawString(prefix) - if in.AttributesToHighlight == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") + if len(in.AttributesToHighlight) != 0 { + const prefix string = ",\"attributesToHighlight\":" + if first { + first = false + out.RawString(prefix[1:]) } else { + out.RawString(prefix) + } + { out.RawByte('[') for v73, v74 := range in.AttributesToHighlight { if v73 > 0 { @@ -2770,24 +2802,44 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawByte(']') } } - { - const prefix string = ",\"HighlightPreTag\":" - out.RawString(prefix) + if in.HighlightPreTag != "" { + const prefix string = ",\"highlightPreTag\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.String(string(in.HighlightPreTag)) } - { - const prefix string = ",\"HighlightPostTag\":" - out.RawString(prefix) + if in.HighlightPostTag != "" { + const prefix string = ",\"highlightPostTag\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.String(string(in.HighlightPostTag)) } - { - const prefix string = ",\"MatchingStrategy\":" - out.RawString(prefix) + if in.MatchingStrategy != "" { + const prefix string = ",\"matchingStrategy\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.String(string(in.MatchingStrategy)) } - { - const prefix string = ",\"Filter\":" - out.RawString(prefix) + if in.Filter != nil { + const prefix string = ",\"filter\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } if m, ok := in.Filter.(easyjson.Marshaler); ok { m.MarshalEasyJSON(out) } else if m, ok := in.Filter.(json.Marshaler); ok { @@ -2796,27 +2848,45 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.Raw(json.Marshal(in.Filter)) } } - { - const prefix string = ",\"ShowMatchesPosition\":" - out.RawString(prefix) + if in.ShowMatchesPosition { + const prefix string = ",\"showMatchesPosition\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.Bool(bool(in.ShowMatchesPosition)) } - { - const prefix string = ",\"ShowRankingScore\":" - out.RawString(prefix) + if in.ShowRankingScore { + const prefix string = ",\"showRankingScore\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.Bool(bool(in.ShowRankingScore)) } - { - const prefix string = ",\"ShowRankingScoreDetails\":" - out.RawString(prefix) + if in.ShowRankingScoreDetails { + const prefix string = ",\"showRankingScoreDetails\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.Bool(bool(in.ShowRankingScoreDetails)) } - { - const prefix string = ",\"Facets\":" - out.RawString(prefix) - if in.Facets == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") + if len(in.Facets) != 0 { + const prefix string = ",\"facets\":" + if first { + first = false + out.RawString(prefix[1:]) } else { + out.RawString(prefix) + } + { out.RawByte('[') for v75, v76 := range in.Facets { if v75 > 0 { @@ -2827,17 +2897,15 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawByte(']') } } - { - const prefix string = ",\"PlaceholderSearch\":" - out.RawString(prefix) - out.Bool(bool(in.PlaceholderSearch)) - } - { - const prefix string = ",\"Sort\":" - out.RawString(prefix) - if in.Sort == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") + if len(in.Sort) != 0 { + const prefix string = ",\"sort\":" + if first { + first = false + out.RawString(prefix[1:]) } else { + out.RawString(prefix) + } + { out.RawByte('[') for v77, v78 := range in.Sort { if v77 > 0 { @@ -2848,12 +2916,15 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawByte(']') } } - { - const prefix string = ",\"Vector\":" - out.RawString(prefix) - if in.Vector == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") + if len(in.Vector) != 0 { + const prefix string = ",\"vector\":" + if first { + first = false + out.RawString(prefix[1:]) } else { + out.RawString(prefix) + } + { out.RawByte('[') for v79, v80 := range in.Vector { if v79 > 0 { @@ -2864,47 +2935,63 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ out.RawByte(']') } } - { - const prefix string = ",\"HitsPerPage\":" - out.RawString(prefix) + if in.HitsPerPage != 0 { + const prefix string = ",\"hitsPerPage\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.Int64(int64(in.HitsPerPage)) } - { - const prefix string = ",\"Page\":" - out.RawString(prefix) + if in.Page != 0 { + const prefix string = ",\"page\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.Int64(int64(in.Page)) } - { - const prefix string = ",\"IndexUID\":" - out.RawString(prefix) + if in.IndexUID != "" { + const prefix string = ",\"indexUid\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.String(string(in.IndexUID)) } { - const prefix string = ",\"Query\":" - out.RawString(prefix) + const prefix string = ",\"q\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } out.String(string(in.Query)) } - { - const prefix string = ",\"Distinct\":" + if in.Distinct != "" { + const prefix string = ",\"distinct\":" out.RawString(prefix) out.String(string(in.Distinct)) } - { - const prefix string = ",\"Hybrid\":" + if in.Hybrid != nil { + const prefix string = ",\"hybrid\":" out.RawString(prefix) - if in.Hybrid == nil { - out.RawString("null") - } else { - (*in.Hybrid).MarshalEasyJSON(out) - } + (*in.Hybrid).MarshalEasyJSON(out) } - { - const prefix string = ",\"RetrieveVectors\":" + if in.RetrieveVectors { + const prefix string = ",\"retrieveVectors\":" out.RawString(prefix) out.Bool(bool(in.RetrieveVectors)) } - { - const prefix string = ",\"RankingScoreThreshold\":" + if in.RankingScoreThreshold != 0 { + const prefix string = ",\"rankingScoreThreshold\":" out.RawString(prefix) out.Float64(float64(in.RankingScoreThreshold)) } @@ -3125,16 +3212,24 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(in *jlexer.Lexer, in.Delim('[') if out.Queries == nil { if !in.IsDelim(']') { - out.Queries = make([]SearchRequest, 0, 0) + out.Queries = make([]*SearchRequest, 0, 8) } else { - out.Queries = []SearchRequest{} + out.Queries = []*SearchRequest{} } } else { out.Queries = (out.Queries)[:0] } for !in.IsDelim(']') { - var v84 SearchRequest - (v84).UnmarshalEasyJSON(in) + var v84 *SearchRequest + if in.IsNull() { + in.Skip() + v84 = nil + } else { + if v84 == nil { + v84 = new(SearchRequest) + } + (*v84).UnmarshalEasyJSON(in) + } out.Queries = append(out.Queries, v84) in.WantComma() } @@ -3165,7 +3260,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(out *jwriter.Writ if v85 > 0 { out.RawByte(',') } - (v86).MarshalEasyJSON(out) + if v86 == nil { + out.RawString("null") + } else { + (*v86).MarshalEasyJSON(out) + } } out.RawByte(']') } From 52de46a1986079364bc79df144a08609cf1341ce Mon Sep 17 00:00:00 2001 From: Javad Date: Thu, 1 Aug 2024 17:46:14 +0330 Subject: [PATCH 21/43] fix: conflicts on new changes --- index_search.go | 17 + types_easyjson.go | 1406 +++++++++++++++++++++++++++++---------------- 2 files changed, 913 insertions(+), 510 deletions(-) diff --git a/index_search.go b/index_search.go index 4f3f3099..091d97d3 100644 --- a/index_search.go +++ b/index_search.go @@ -82,3 +82,20 @@ func (i Index) Search(query string, request *SearchRequest) (*SearchResponse, er return resp, nil } + +func (i Index) SearchSimilarDocuments(param *SimilarDocumentQuery, resp *SimilarDocumentResult) error { + req := internalRequest{ + endpoint: "/indexes/" + i.UID + "/similar", + method: http.MethodPost, + withRequest: param, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "SearchSimilarDocuments", + contentType: contentTypeJSON, + } + + if err := i.client.executeRequest(req); err != nil { + return err + } + return nil +} diff --git a/types_easyjson.go b/types_easyjson.go index 6722f496..af3c37b2 100644 --- a/types_easyjson.go +++ b/types_easyjson.go @@ -1628,7 +1628,393 @@ func (v *Stats) UnmarshalJSON(data []byte) error { func (v *Stats) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo12(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out *Settings) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out *SimilarDocumentResult) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "hits": + if in.IsNull() { + in.Skip() + out.Hits = nil + } else { + in.Delim('[') + if out.Hits == nil { + if !in.IsDelim(']') { + out.Hits = make([]interface{}, 0, 4) + } else { + out.Hits = []interface{}{} + } + } else { + out.Hits = (out.Hits)[:0] + } + for !in.IsDelim(']') { + var v32 interface{} + if m, ok := v32.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := v32.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + v32 = in.Interface() + } + out.Hits = append(out.Hits, v32) + in.WantComma() + } + in.Delim(']') + } + case "id": + out.ID = string(in.String()) + case "processingTimeMs": + out.ProcessingTimeMS = int64(in.Int64()) + case "limit": + out.Limit = int64(in.Int64()) + case "offset": + out.Offset = int64(in.Int64()) + case "estimatedTotalHits": + out.EstimatedTotalHits = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writer, in SimilarDocumentResult) { + out.RawByte('{') + first := true + _ = first + if len(in.Hits) != 0 { + const prefix string = ",\"hits\":" + first = false + out.RawString(prefix[1:]) + { + out.RawByte('[') + for v33, v34 := range in.Hits { + if v33 > 0 { + out.RawByte(',') + } + if m, ok := v34.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := v34.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(v34)) + } + } + out.RawByte(']') + } + } + if in.ID != "" { + const prefix string = ",\"id\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.ID)) + } + if in.ProcessingTimeMS != 0 { + const prefix string = ",\"processingTimeMs\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.ProcessingTimeMS)) + } + if in.Limit != 0 { + const prefix string = ",\"limit\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Limit)) + } + if in.Offset != 0 { + const prefix string = ",\"offset\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Offset)) + } + if in.EstimatedTotalHits != 0 { + const prefix string = ",\"estimatedTotalHits\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.EstimatedTotalHits)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v SimilarDocumentResult) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v SimilarDocumentResult) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *SimilarDocumentResult) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *SimilarDocumentResult) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(in *jlexer.Lexer, out *SimilarDocumentQuery) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "id": + if m, ok := out.Id.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := out.Id.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + out.Id = in.Interface() + } + case "embedder": + out.Embedder = string(in.String()) + case "attributesToRetrieve": + if in.IsNull() { + in.Skip() + out.AttributesToRetrieve = nil + } else { + in.Delim('[') + if out.AttributesToRetrieve == nil { + if !in.IsDelim(']') { + out.AttributesToRetrieve = make([]string, 0, 4) + } else { + out.AttributesToRetrieve = []string{} + } + } else { + out.AttributesToRetrieve = (out.AttributesToRetrieve)[:0] + } + for !in.IsDelim(']') { + var v35 string + v35 = string(in.String()) + out.AttributesToRetrieve = append(out.AttributesToRetrieve, v35) + in.WantComma() + } + in.Delim(']') + } + case "offset": + out.Offset = int64(in.Int64()) + case "limit": + out.Limit = int64(in.Int64()) + case "filter": + out.Filter = string(in.String()) + case "showRankingScore": + out.ShowRankingScore = bool(in.Bool()) + case "showRankingScoreDetails": + out.ShowRankingScoreDetails = bool(in.Bool()) + case "rankingScoreThreshold": + out.RankingScoreThreshold = float64(in.Float64()) + case "retrieveVectors": + out.RetrieveVectors = bool(in.Bool()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(out *jwriter.Writer, in SimilarDocumentQuery) { + out.RawByte('{') + first := true + _ = first + if in.Id != nil { + const prefix string = ",\"id\":" + first = false + out.RawString(prefix[1:]) + if m, ok := in.Id.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := in.Id.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(in.Id)) + } + } + if in.Embedder != "" { + const prefix string = ",\"embedder\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.Embedder)) + } + if len(in.AttributesToRetrieve) != 0 { + const prefix string = ",\"attributesToRetrieve\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v36, v37 := range in.AttributesToRetrieve { + if v36 > 0 { + out.RawByte(',') + } + out.String(string(v37)) + } + out.RawByte(']') + } + } + if in.Offset != 0 { + const prefix string = ",\"offset\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Offset)) + } + if in.Limit != 0 { + const prefix string = ",\"limit\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Limit)) + } + if in.Filter != "" { + const prefix string = ",\"filter\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.Filter)) + } + if in.ShowRankingScore { + const prefix string = ",\"showRankingScore\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Bool(bool(in.ShowRankingScore)) + } + if in.ShowRankingScoreDetails { + const prefix string = ",\"showRankingScoreDetails\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Bool(bool(in.ShowRankingScoreDetails)) + } + if in.RankingScoreThreshold != 0 { + const prefix string = ",\"rankingScoreThreshold\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Float64(float64(in.RankingScoreThreshold)) + } + if in.RetrieveVectors { + const prefix string = ",\"retrieveVectors\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Bool(bool(in.RetrieveVectors)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v SimilarDocumentQuery) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v SimilarDocumentQuery) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *SimilarDocumentQuery) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *SimilarDocumentQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(in *jlexer.Lexer, out *Settings) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -1663,9 +2049,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out.RankingRules = (out.RankingRules)[:0] } for !in.IsDelim(']') { - var v32 string - v32 = string(in.String()) - out.RankingRules = append(out.RankingRules, v32) + var v38 string + v38 = string(in.String()) + out.RankingRules = append(out.RankingRules, v38) in.WantComma() } in.Delim(']') @@ -1696,9 +2082,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out.SearchableAttributes = (out.SearchableAttributes)[:0] } for !in.IsDelim(']') { - var v33 string - v33 = string(in.String()) - out.SearchableAttributes = append(out.SearchableAttributes, v33) + var v39 string + v39 = string(in.String()) + out.SearchableAttributes = append(out.SearchableAttributes, v39) in.WantComma() } in.Delim(']') @@ -1721,9 +2107,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out.DisplayedAttributes = (out.DisplayedAttributes)[:0] } for !in.IsDelim(']') { - var v34 string - v34 = string(in.String()) - out.DisplayedAttributes = append(out.DisplayedAttributes, v34) + var v40 string + v40 = string(in.String()) + out.DisplayedAttributes = append(out.DisplayedAttributes, v40) in.WantComma() } in.Delim(']') @@ -1744,9 +2130,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out.StopWords = (out.StopWords)[:0] } for !in.IsDelim(']') { - var v35 string - v35 = string(in.String()) - out.StopWords = append(out.StopWords, v35) + var v41 string + v41 = string(in.String()) + out.StopWords = append(out.StopWords, v41) in.WantComma() } in.Delim(']') @@ -1764,30 +2150,30 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, for !in.IsDelim('}') { key := string(in.String()) in.WantColon() - var v36 []string + var v42 []string if in.IsNull() { in.Skip() - v36 = nil + v42 = nil } else { in.Delim('[') - if v36 == nil { + if v42 == nil { if !in.IsDelim(']') { - v36 = make([]string, 0, 4) + v42 = make([]string, 0, 4) } else { - v36 = []string{} + v42 = []string{} } } else { - v36 = (v36)[:0] + v42 = (v42)[:0] } for !in.IsDelim(']') { - var v37 string - v37 = string(in.String()) - v36 = append(v36, v37) + var v43 string + v43 = string(in.String()) + v42 = append(v42, v43) in.WantComma() } in.Delim(']') } - (out.Synonyms)[key] = v36 + (out.Synonyms)[key] = v42 in.WantComma() } in.Delim('}') @@ -1808,9 +2194,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out.FilterableAttributes = (out.FilterableAttributes)[:0] } for !in.IsDelim(']') { - var v38 string - v38 = string(in.String()) - out.FilterableAttributes = append(out.FilterableAttributes, v38) + var v44 string + v44 = string(in.String()) + out.FilterableAttributes = append(out.FilterableAttributes, v44) in.WantComma() } in.Delim(']') @@ -1831,9 +2217,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out.SortableAttributes = (out.SortableAttributes)[:0] } for !in.IsDelim(']') { - var v39 string - v39 = string(in.String()) - out.SortableAttributes = append(out.SortableAttributes, v39) + var v45 string + v45 = string(in.String()) + out.SortableAttributes = append(out.SortableAttributes, v45) in.WantComma() } in.Delim(']') @@ -1881,9 +2267,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, for !in.IsDelim('}') { key := string(in.String()) in.WantColon() - var v40 Embedder - (v40).UnmarshalEasyJSON(in) - (out.Embedders)[key] = v40 + var v46 Embedder + (v46).UnmarshalEasyJSON(in) + (out.Embedders)[key] = v46 in.WantComma() } in.Delim('}') @@ -1898,7 +2284,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writer, in Settings) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(out *jwriter.Writer, in Settings) { out.RawByte('{') first := true _ = first @@ -1908,11 +2294,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ out.RawString(prefix[1:]) { out.RawByte('[') - for v41, v42 := range in.RankingRules { - if v41 > 0 { + for v47, v48 := range in.RankingRules { + if v47 > 0 { out.RawByte(',') } - out.String(string(v42)) + out.String(string(v48)) } out.RawByte(']') } @@ -1937,11 +2323,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('[') - for v43, v44 := range in.SearchableAttributes { - if v43 > 0 { + for v49, v50 := range in.SearchableAttributes { + if v49 > 0 { out.RawByte(',') } - out.String(string(v44)) + out.String(string(v50)) } out.RawByte(']') } @@ -1966,11 +2352,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('[') - for v45, v46 := range in.DisplayedAttributes { - if v45 > 0 { + for v51, v52 := range in.DisplayedAttributes { + if v51 > 0 { out.RawByte(',') } - out.String(string(v46)) + out.String(string(v52)) } out.RawByte(']') } @@ -1985,11 +2371,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('[') - for v47, v48 := range in.StopWords { - if v47 > 0 { + for v53, v54 := range in.StopWords { + if v53 > 0 { out.RawByte(',') } - out.String(string(v48)) + out.String(string(v54)) } out.RawByte(']') } @@ -2004,24 +2390,24 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('{') - v49First := true - for v49Name, v49Value := range in.Synonyms { - if v49First { - v49First = false + v55First := true + for v55Name, v55Value := range in.Synonyms { + if v55First { + v55First = false } else { out.RawByte(',') } - out.String(string(v49Name)) + out.String(string(v55Name)) out.RawByte(':') - if v49Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + if v55Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') - for v50, v51 := range v49Value { - if v50 > 0 { + for v56, v57 := range v55Value { + if v56 > 0 { out.RawByte(',') } - out.String(string(v51)) + out.String(string(v57)) } out.RawByte(']') } @@ -2039,11 +2425,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('[') - for v52, v53 := range in.FilterableAttributes { - if v52 > 0 { + for v58, v59 := range in.FilterableAttributes { + if v58 > 0 { out.RawByte(',') } - out.String(string(v53)) + out.String(string(v59)) } out.RawByte(']') } @@ -2058,11 +2444,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('[') - for v54, v55 := range in.SortableAttributes { - if v54 > 0 { + for v60, v61 := range in.SortableAttributes { + if v60 > 0 { out.RawByte(',') } - out.String(string(v55)) + out.String(string(v61)) } out.RawByte(']') } @@ -2107,16 +2493,16 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ } { out.RawByte('{') - v56First := true - for v56Name, v56Value := range in.Embedders { - if v56First { - v56First = false + v62First := true + for v62Name, v62Value := range in.Embedders { + if v62First { + v62First = false } else { out.RawByte(',') } - out.String(string(v56Name)) + out.String(string(v62Name)) out.RawByte(':') - (v56Value).MarshalEasyJSON(out) + (v62Value).MarshalEasyJSON(out) } out.RawByte('}') } @@ -2127,27 +2513,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Settings) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Settings) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Settings) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Settings) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(in *jlexer.Lexer, out *SearchResponse) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out *SearchResponse) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -2182,15 +2568,15 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(in *jlexer.Lexer, out.Hits = (out.Hits)[:0] } for !in.IsDelim(']') { - var v57 interface{} - if m, ok := v57.(easyjson.Unmarshaler); ok { + var v63 interface{} + if m, ok := v63.(easyjson.Unmarshaler); ok { m.UnmarshalEasyJSON(in) - } else if m, ok := v57.(json.Unmarshaler); ok { + } else if m, ok := v63.(json.Unmarshaler); ok { _ = m.UnmarshalJSON(in.Raw()) } else { - v57 = in.Interface() + v63 = in.Interface() } - out.Hits = append(out.Hits, v57) + out.Hits = append(out.Hits, v63) in.WantComma() } in.Delim(']') @@ -2241,7 +2627,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(out *jwriter.Writer, in SearchResponse) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writer, in SearchResponse) { out.RawByte('{') first := true _ = first @@ -2252,16 +2638,16 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v58, v59 := range in.Hits { - if v58 > 0 { + for v64, v65 := range in.Hits { + if v64 > 0 { out.RawByte(',') } - if m, ok := v59.(easyjson.Marshaler); ok { + if m, ok := v65.(easyjson.Marshaler); ok { m.MarshalEasyJSON(out) - } else if m, ok := v59.(json.Marshaler); ok { + } else if m, ok := v65.(json.Marshaler); ok { out.Raw(m.MarshalJSON()) } else { - out.Raw(json.Marshal(v59)) + out.Raw(json.Marshal(v65)) } } out.RawByte(']') @@ -2345,27 +2731,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v SearchResponse) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v SearchResponse) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *SearchResponse) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *SearchResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(in *jlexer.Lexer, out *SearchRequestHybrid) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(in *jlexer.Lexer, out *SearchRequestHybrid) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -2398,7 +2784,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(out *jwriter.Writer, in SearchRequestHybrid) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(out *jwriter.Writer, in SearchRequestHybrid) { out.RawByte('{') first := true _ = first @@ -2424,27 +2810,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v SearchRequestHybrid) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v SearchRequestHybrid) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *SearchRequestHybrid) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *SearchRequestHybrid) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out *SearchRequest) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(in *jlexer.Lexer, out *SearchRequest) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -2483,9 +2869,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.AttributesToRetrieve = (out.AttributesToRetrieve)[:0] } for !in.IsDelim(']') { - var v60 string - v60 = string(in.String()) - out.AttributesToRetrieve = append(out.AttributesToRetrieve, v60) + var v66 string + v66 = string(in.String()) + out.AttributesToRetrieve = append(out.AttributesToRetrieve, v66) in.WantComma() } in.Delim(']') @@ -2506,9 +2892,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.AttributesToSearchOn = (out.AttributesToSearchOn)[:0] } for !in.IsDelim(']') { - var v61 string - v61 = string(in.String()) - out.AttributesToSearchOn = append(out.AttributesToSearchOn, v61) + var v67 string + v67 = string(in.String()) + out.AttributesToSearchOn = append(out.AttributesToSearchOn, v67) in.WantComma() } in.Delim(']') @@ -2529,9 +2915,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.AttributesToCrop = (out.AttributesToCrop)[:0] } for !in.IsDelim(']') { - var v62 string - v62 = string(in.String()) - out.AttributesToCrop = append(out.AttributesToCrop, v62) + var v68 string + v68 = string(in.String()) + out.AttributesToCrop = append(out.AttributesToCrop, v68) in.WantComma() } in.Delim(']') @@ -2556,9 +2942,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.AttributesToHighlight = (out.AttributesToHighlight)[:0] } for !in.IsDelim(']') { - var v63 string - v63 = string(in.String()) - out.AttributesToHighlight = append(out.AttributesToHighlight, v63) + var v69 string + v69 = string(in.String()) + out.AttributesToHighlight = append(out.AttributesToHighlight, v69) in.WantComma() } in.Delim(']') @@ -2599,9 +2985,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.Facets = (out.Facets)[:0] } for !in.IsDelim(']') { - var v64 string - v64 = string(in.String()) - out.Facets = append(out.Facets, v64) + var v70 string + v70 = string(in.String()) + out.Facets = append(out.Facets, v70) in.WantComma() } in.Delim(']') @@ -2622,9 +3008,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.Sort = (out.Sort)[:0] } for !in.IsDelim(']') { - var v65 string - v65 = string(in.String()) - out.Sort = append(out.Sort, v65) + var v71 string + v71 = string(in.String()) + out.Sort = append(out.Sort, v71) in.WantComma() } in.Delim(']') @@ -2645,9 +3031,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out.Vector = (out.Vector)[:0] } for !in.IsDelim(']') { - var v66 float32 - v66 = float32(in.Float32()) - out.Vector = append(out.Vector, v66) + var v72 float32 + v72 = float32(in.Float32()) + out.Vector = append(out.Vector, v72) in.WantComma() } in.Delim(']') @@ -2686,7 +3072,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writer, in SearchRequest) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(out *jwriter.Writer, in SearchRequest) { out.RawByte('{') first := true _ = first @@ -2716,11 +3102,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ } { out.RawByte('[') - for v67, v68 := range in.AttributesToRetrieve { - if v67 > 0 { + for v73, v74 := range in.AttributesToRetrieve { + if v73 > 0 { out.RawByte(',') } - out.String(string(v68)) + out.String(string(v74)) } out.RawByte(']') } @@ -2735,11 +3121,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ } { out.RawByte('[') - for v69, v70 := range in.AttributesToSearchOn { - if v69 > 0 { + for v75, v76 := range in.AttributesToSearchOn { + if v75 > 0 { out.RawByte(',') } - out.String(string(v70)) + out.String(string(v76)) } out.RawByte(']') } @@ -2754,11 +3140,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ } { out.RawByte('[') - for v71, v72 := range in.AttributesToCrop { - if v71 > 0 { + for v77, v78 := range in.AttributesToCrop { + if v77 > 0 { out.RawByte(',') } - out.String(string(v72)) + out.String(string(v78)) } out.RawByte(']') } @@ -2793,11 +3179,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ } { out.RawByte('[') - for v73, v74 := range in.AttributesToHighlight { - if v73 > 0 { + for v79, v80 := range in.AttributesToHighlight { + if v79 > 0 { out.RawByte(',') } - out.String(string(v74)) + out.String(string(v80)) } out.RawByte(']') } @@ -2888,11 +3274,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ } { out.RawByte('[') - for v75, v76 := range in.Facets { - if v75 > 0 { + for v81, v82 := range in.Facets { + if v81 > 0 { out.RawByte(',') } - out.String(string(v76)) + out.String(string(v82)) } out.RawByte(']') } @@ -2907,11 +3293,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ } { out.RawByte('[') - for v77, v78 := range in.Sort { - if v77 > 0 { + for v83, v84 := range in.Sort { + if v83 > 0 { out.RawByte(',') } - out.String(string(v78)) + out.String(string(v84)) } out.RawByte(']') } @@ -2926,11 +3312,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ } { out.RawByte('[') - for v79, v80 := range in.Vector { - if v79 > 0 { + for v85, v86 := range in.Vector { + if v85 > 0 { out.RawByte(',') } - out.Float32(float32(v80)) + out.Float32(float32(v86)) } out.RawByte(']') } @@ -3001,27 +3387,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v SearchRequest) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v SearchRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *SearchRequest) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *SearchRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(in *jlexer.Lexer, out *Pagination) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(in *jlexer.Lexer, out *Pagination) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3052,7 +3438,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(out *jwriter.Writer, in Pagination) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(out *jwriter.Writer, in Pagination) { out.RawByte('{') first := true _ = first @@ -3067,27 +3453,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Pagination) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Pagination) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Pagination) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Pagination) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(in *jlexer.Lexer, out *MultiSearchResponse) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(in *jlexer.Lexer, out *MultiSearchResponse) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3122,9 +3508,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(in *jlexer.Lexer, out.Results = (out.Results)[:0] } for !in.IsDelim(']') { - var v81 SearchResponse - (v81).UnmarshalEasyJSON(in) - out.Results = append(out.Results, v81) + var v87 SearchResponse + (v87).UnmarshalEasyJSON(in) + out.Results = append(out.Results, v87) in.WantComma() } in.Delim(']') @@ -3139,7 +3525,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(out *jwriter.Writer, in MultiSearchResponse) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(out *jwriter.Writer, in MultiSearchResponse) { out.RawByte('{') first := true _ = first @@ -3150,11 +3536,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v82, v83 := range in.Results { - if v82 > 0 { + for v88, v89 := range in.Results { + if v88 > 0 { out.RawByte(',') } - (v83).MarshalEasyJSON(out) + (v89).MarshalEasyJSON(out) } out.RawByte(']') } @@ -3165,27 +3551,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v MultiSearchResponse) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v MultiSearchResponse) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *MultiSearchResponse) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *MultiSearchResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(in *jlexer.Lexer, out *MultiSearchRequest) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(in *jlexer.Lexer, out *MultiSearchRequest) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3220,17 +3606,17 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(in *jlexer.Lexer, out.Queries = (out.Queries)[:0] } for !in.IsDelim(']') { - var v84 *SearchRequest + var v90 *SearchRequest if in.IsNull() { in.Skip() - v84 = nil + v90 = nil } else { - if v84 == nil { - v84 = new(SearchRequest) + if v90 == nil { + v90 = new(SearchRequest) } - (*v84).UnmarshalEasyJSON(in) + (*v90).UnmarshalEasyJSON(in) } - out.Queries = append(out.Queries, v84) + out.Queries = append(out.Queries, v90) in.WantComma() } in.Delim(']') @@ -3245,7 +3631,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(out *jwriter.Writer, in MultiSearchRequest) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(out *jwriter.Writer, in MultiSearchRequest) { out.RawByte('{') first := true _ = first @@ -3256,14 +3642,14 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v85, v86 := range in.Queries { - if v85 > 0 { + for v91, v92 := range in.Queries { + if v91 > 0 { out.RawByte(',') } - if v86 == nil { + if v92 == nil { out.RawString("null") } else { - (*v86).MarshalEasyJSON(out) + (*v92).MarshalEasyJSON(out) } } out.RawByte(']') @@ -3275,27 +3661,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v MultiSearchRequest) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v MultiSearchRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *MultiSearchRequest) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *MultiSearchRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(in *jlexer.Lexer, out *MinWordSizeForTypos) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(in *jlexer.Lexer, out *MinWordSizeForTypos) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3328,7 +3714,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(out *jwriter.Writer, in MinWordSizeForTypos) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(out *jwriter.Writer, in MinWordSizeForTypos) { out.RawByte('{') first := true _ = first @@ -3354,27 +3740,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v MinWordSizeForTypos) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v MinWordSizeForTypos) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *MinWordSizeForTypos) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *MinWordSizeForTypos) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(in *jlexer.Lexer, out *KeysResults) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(in *jlexer.Lexer, out *KeysResults) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3409,9 +3795,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(in *jlexer.Lexer, out.Results = (out.Results)[:0] } for !in.IsDelim(']') { - var v87 Key - (v87).UnmarshalEasyJSON(in) - out.Results = append(out.Results, v87) + var v93 Key + (v93).UnmarshalEasyJSON(in) + out.Results = append(out.Results, v93) in.WantComma() } in.Delim(']') @@ -3432,7 +3818,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(out *jwriter.Writer, in KeysResults) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(out *jwriter.Writer, in KeysResults) { out.RawByte('{') first := true _ = first @@ -3443,11 +3829,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v88, v89 := range in.Results { - if v88 > 0 { + for v94, v95 := range in.Results { + if v94 > 0 { out.RawByte(',') } - (v89).MarshalEasyJSON(out) + (v95).MarshalEasyJSON(out) } out.RawByte(']') } @@ -3473,27 +3859,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v KeysResults) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v KeysResults) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *KeysResults) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *KeysResults) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(in *jlexer.Lexer, out *KeysQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(in *jlexer.Lexer, out *KeysQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3526,7 +3912,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(out *jwriter.Writer, in KeysQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(out *jwriter.Writer, in KeysQuery) { out.RawByte('{') first := true _ = first @@ -3546,27 +3932,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v KeysQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v KeysQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *KeysQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *KeysQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(in *jlexer.Lexer, out *KeyUpdate) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(in *jlexer.Lexer, out *KeyUpdate) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3599,7 +3985,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(out *jwriter.Writer, in KeyUpdate) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(out *jwriter.Writer, in KeyUpdate) { out.RawByte('{') first := true _ = first @@ -3625,27 +4011,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v KeyUpdate) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v KeyUpdate) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *KeyUpdate) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *KeyUpdate) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(in *jlexer.Lexer, out *KeyParsed) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(in *jlexer.Lexer, out *KeyParsed) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3686,9 +4072,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(in *jlexer.Lexer, out.Actions = (out.Actions)[:0] } for !in.IsDelim(']') { - var v90 string - v90 = string(in.String()) - out.Actions = append(out.Actions, v90) + var v96 string + v96 = string(in.String()) + out.Actions = append(out.Actions, v96) in.WantComma() } in.Delim(']') @@ -3709,9 +4095,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(in *jlexer.Lexer, out.Indexes = (out.Indexes)[:0] } for !in.IsDelim(']') { - var v91 string - v91 = string(in.String()) - out.Indexes = append(out.Indexes, v91) + var v97 string + v97 = string(in.String()) + out.Indexes = append(out.Indexes, v97) in.WantComma() } in.Delim(']') @@ -3736,7 +4122,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(out *jwriter.Writer, in KeyParsed) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(out *jwriter.Writer, in KeyParsed) { out.RawByte('{') first := true _ = first @@ -3760,11 +4146,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(out *jwriter.Writ out.RawString(prefix) { out.RawByte('[') - for v92, v93 := range in.Actions { - if v92 > 0 { + for v98, v99 := range in.Actions { + if v98 > 0 { out.RawByte(',') } - out.String(string(v93)) + out.String(string(v99)) } out.RawByte(']') } @@ -3774,11 +4160,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(out *jwriter.Writ out.RawString(prefix) { out.RawByte('[') - for v94, v95 := range in.Indexes { - if v94 > 0 { + for v100, v101 := range in.Indexes { + if v100 > 0 { out.RawByte(',') } - out.String(string(v95)) + out.String(string(v101)) } out.RawByte(']') } @@ -3798,27 +4184,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v KeyParsed) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v KeyParsed) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *KeyParsed) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *KeyParsed) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(in *jlexer.Lexer, out *Key) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(in *jlexer.Lexer, out *Key) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -3861,9 +4247,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(in *jlexer.Lexer, out.Actions = (out.Actions)[:0] } for !in.IsDelim(']') { - var v96 string - v96 = string(in.String()) - out.Actions = append(out.Actions, v96) + var v102 string + v102 = string(in.String()) + out.Actions = append(out.Actions, v102) in.WantComma() } in.Delim(']') @@ -3884,9 +4270,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(in *jlexer.Lexer, out.Indexes = (out.Indexes)[:0] } for !in.IsDelim(']') { - var v97 string - v97 = string(in.String()) - out.Indexes = append(out.Indexes, v97) + var v103 string + v103 = string(in.String()) + out.Indexes = append(out.Indexes, v103) in.WantComma() } in.Delim(']') @@ -3913,7 +4299,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(out *jwriter.Writer, in Key) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(out *jwriter.Writer, in Key) { out.RawByte('{') first := true _ = first @@ -3942,11 +4328,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(out *jwriter.Writ out.RawString(prefix) { out.RawByte('[') - for v98, v99 := range in.Actions { - if v98 > 0 { + for v104, v105 := range in.Actions { + if v104 > 0 { out.RawByte(',') } - out.String(string(v99)) + out.String(string(v105)) } out.RawByte(']') } @@ -3956,11 +4342,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(out *jwriter.Writ out.RawString(prefix) { out.RawByte('[') - for v100, v101 := range in.Indexes { - if v100 > 0 { + for v106, v107 := range in.Indexes { + if v106 > 0 { out.RawByte(',') } - out.String(string(v101)) + out.String(string(v107)) } out.RawByte(']') } @@ -3986,27 +4372,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Key) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Key) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Key) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Key) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(in *jlexer.Lexer, out *IndexesResults) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(in *jlexer.Lexer, out *IndexesResults) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4041,9 +4427,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(in *jlexer.Lexer, out.Results = (out.Results)[:0] } for !in.IsDelim(']') { - var v102 Index - (v102).UnmarshalEasyJSON(in) - out.Results = append(out.Results, v102) + var v108 Index + (v108).UnmarshalEasyJSON(in) + out.Results = append(out.Results, v108) in.WantComma() } in.Delim(']') @@ -4064,7 +4450,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(out *jwriter.Writer, in IndexesResults) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(out *jwriter.Writer, in IndexesResults) { out.RawByte('{') first := true _ = first @@ -4075,11 +4461,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v103, v104 := range in.Results { - if v103 > 0 { + for v109, v110 := range in.Results { + if v109 > 0 { out.RawByte(',') } - (v104).MarshalEasyJSON(out) + (v110).MarshalEasyJSON(out) } out.RawByte(']') } @@ -4105,27 +4491,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v IndexesResults) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v IndexesResults) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *IndexesResults) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *IndexesResults) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(in *jlexer.Lexer, out *IndexesQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(in *jlexer.Lexer, out *IndexesQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4158,7 +4544,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(out *jwriter.Writer, in IndexesQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(out *jwriter.Writer, in IndexesQuery) { out.RawByte('{') first := true _ = first @@ -4178,27 +4564,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v IndexesQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v IndexesQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *IndexesQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *IndexesQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(in *jlexer.Lexer, out *Index) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(in *jlexer.Lexer, out *Index) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4239,7 +4625,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(out *jwriter.Writer, in Index) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(out *jwriter.Writer, in Index) { out.RawByte('{') first := true _ = first @@ -4269,27 +4655,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Index) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Index) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Index) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Index) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(in *jlexer.Lexer, out *Health) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(in *jlexer.Lexer, out *Health) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4320,7 +4706,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(out *jwriter.Writer, in Health) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(out *jwriter.Writer, in Health) { out.RawByte('{') first := true _ = first @@ -4335,27 +4721,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Health) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Health) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Health) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Health) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(in *jlexer.Lexer, out *Faceting) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, out *Faceting) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4386,7 +4772,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(out *jwriter.Writer, in Faceting) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writer, in Faceting) { out.RawByte('{') first := true _ = first @@ -4401,27 +4787,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Faceting) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Faceting) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Faceting) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Faceting) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(in *jlexer.Lexer, out *FacetSearchResponse) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(in *jlexer.Lexer, out *FacetSearchResponse) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4456,15 +4842,15 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(in *jlexer.Lexer, out.FacetHits = (out.FacetHits)[:0] } for !in.IsDelim(']') { - var v105 interface{} - if m, ok := v105.(easyjson.Unmarshaler); ok { + var v111 interface{} + if m, ok := v111.(easyjson.Unmarshaler); ok { m.UnmarshalEasyJSON(in) - } else if m, ok := v105.(json.Unmarshaler); ok { + } else if m, ok := v111.(json.Unmarshaler); ok { _ = m.UnmarshalJSON(in.Raw()) } else { - v105 = in.Interface() + v111 = in.Interface() } - out.FacetHits = append(out.FacetHits, v105) + out.FacetHits = append(out.FacetHits, v111) in.WantComma() } in.Delim(']') @@ -4483,7 +4869,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(out *jwriter.Writer, in FacetSearchResponse) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(out *jwriter.Writer, in FacetSearchResponse) { out.RawByte('{') first := true _ = first @@ -4494,16 +4880,16 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v106, v107 := range in.FacetHits { - if v106 > 0 { + for v112, v113 := range in.FacetHits { + if v112 > 0 { out.RawByte(',') } - if m, ok := v107.(easyjson.Marshaler); ok { + if m, ok := v113.(easyjson.Marshaler); ok { m.MarshalEasyJSON(out) - } else if m, ok := v107.(json.Marshaler); ok { + } else if m, ok := v113.(json.Marshaler); ok { out.Raw(m.MarshalJSON()) } else { - out.Raw(json.Marshal(v107)) + out.Raw(json.Marshal(v113)) } } out.RawByte(']') @@ -4525,27 +4911,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v FacetSearchResponse) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v FacetSearchResponse) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *FacetSearchResponse) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *FacetSearchResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, out *FacetSearchRequest) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, out *FacetSearchRequest) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4590,9 +4976,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, out.AttributesToSearchOn = (out.AttributesToSearchOn)[:0] } for !in.IsDelim(']') { - var v108 string - v108 = string(in.String()) - out.AttributesToSearchOn = append(out.AttributesToSearchOn, v108) + var v114 string + v114 = string(in.String()) + out.AttributesToSearchOn = append(out.AttributesToSearchOn, v114) in.WantComma() } in.Delim(']') @@ -4607,7 +4993,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writer, in FacetSearchRequest) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writer, in FacetSearchRequest) { out.RawByte('{') first := true _ = first @@ -4667,11 +5053,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writ } { out.RawByte('[') - for v109, v110 := range in.AttributesToSearchOn { - if v109 > 0 { + for v115, v116 := range in.AttributesToSearchOn { + if v115 > 0 { out.RawByte(',') } - out.String(string(v110)) + out.String(string(v116)) } out.RawByte(']') } @@ -4682,27 +5068,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v FacetSearchRequest) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v FacetSearchRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *FacetSearchRequest) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *FacetSearchRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(in *jlexer.Lexer, out *Embedder) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out *Embedder) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4741,7 +5127,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(out *jwriter.Writer, in Embedder) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writer, in Embedder) { out.RawByte('{') first := true _ = first @@ -4776,27 +5162,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Embedder) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Embedder) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Embedder) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Embedder) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, out *DocumentsResult) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out *DocumentsResult) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4831,29 +5217,29 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, out.Results = (out.Results)[:0] } for !in.IsDelim(']') { - var v111 map[string]interface{} + var v117 map[string]interface{} if in.IsNull() { in.Skip() } else { in.Delim('{') - v111 = make(map[string]interface{}) + v117 = make(map[string]interface{}) for !in.IsDelim('}') { key := string(in.String()) in.WantColon() - var v112 interface{} - if m, ok := v112.(easyjson.Unmarshaler); ok { + var v118 interface{} + if m, ok := v118.(easyjson.Unmarshaler); ok { m.UnmarshalEasyJSON(in) - } else if m, ok := v112.(json.Unmarshaler); ok { + } else if m, ok := v118.(json.Unmarshaler); ok { _ = m.UnmarshalJSON(in.Raw()) } else { - v112 = in.Interface() + v118 = in.Interface() } - (v111)[key] = v112 + (v117)[key] = v118 in.WantComma() } in.Delim('}') } - out.Results = append(out.Results, v111) + out.Results = append(out.Results, v117) in.WantComma() } in.Delim(']') @@ -4874,7 +5260,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writer, in DocumentsResult) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writer, in DocumentsResult) { out.RawByte('{') first := true _ = first @@ -4885,29 +5271,29 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v113, v114 := range in.Results { - if v113 > 0 { + for v119, v120 := range in.Results { + if v119 > 0 { out.RawByte(',') } - if v114 == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { + if v120 == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { out.RawString(`null`) } else { out.RawByte('{') - v115First := true - for v115Name, v115Value := range v114 { - if v115First { - v115First = false + v121First := true + for v121Name, v121Value := range v120 { + if v121First { + v121First = false } else { out.RawByte(',') } - out.String(string(v115Name)) + out.String(string(v121Name)) out.RawByte(':') - if m, ok := v115Value.(easyjson.Marshaler); ok { + if m, ok := v121Value.(easyjson.Marshaler); ok { m.MarshalEasyJSON(out) - } else if m, ok := v115Value.(json.Marshaler); ok { + } else if m, ok := v121Value.(json.Marshaler); ok { out.Raw(m.MarshalJSON()) } else { - out.Raw(json.Marshal(v115Value)) + out.Raw(json.Marshal(v121Value)) } } out.RawByte('}') @@ -4937,27 +5323,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v DocumentsResult) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DocumentsResult) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DocumentsResult) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DocumentsResult) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out *DocumentsQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out *DocumentsQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -4996,9 +5382,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out.Fields = (out.Fields)[:0] } for !in.IsDelim(']') { - var v116 string - v116 = string(in.String()) - out.Fields = append(out.Fields, v116) + var v122 string + v122 = string(in.String()) + out.Fields = append(out.Fields, v122) in.WantComma() } in.Delim(']') @@ -5021,7 +5407,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writer, in DocumentsQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writer, in DocumentsQuery) { out.RawByte('{') first := true _ = first @@ -5051,11 +5437,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writ } { out.RawByte('[') - for v117, v118 := range in.Fields { - if v117 > 0 { + for v123, v124 := range in.Fields { + if v123 > 0 { out.RawByte(',') } - out.String(string(v118)) + out.String(string(v124)) } out.RawByte(']') } @@ -5082,27 +5468,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v DocumentsQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DocumentsQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DocumentsQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DocumentsQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out *DocumentQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out *DocumentQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -5137,9 +5523,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out.Fields = (out.Fields)[:0] } for !in.IsDelim(']') { - var v119 string - v119 = string(in.String()) - out.Fields = append(out.Fields, v119) + var v125 string + v125 = string(in.String()) + out.Fields = append(out.Fields, v125) in.WantComma() } in.Delim(']') @@ -5154,7 +5540,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writer, in DocumentQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writer, in DocumentQuery) { out.RawByte('{') first := true _ = first @@ -5164,11 +5550,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writ out.RawString(prefix[1:]) { out.RawByte('[') - for v120, v121 := range in.Fields { - if v120 > 0 { + for v126, v127 := range in.Fields { + if v126 > 0 { out.RawByte(',') } - out.String(string(v121)) + out.String(string(v127)) } out.RawByte(']') } @@ -5179,27 +5565,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v DocumentQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DocumentQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DocumentQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DocumentQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out *Details) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(in *jlexer.Lexer, out *Details) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -5244,9 +5630,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.RankingRules = (out.RankingRules)[:0] } for !in.IsDelim(']') { - var v122 string - v122 = string(in.String()) - out.RankingRules = append(out.RankingRules, v122) + var v128 string + v128 = string(in.String()) + out.RankingRules = append(out.RankingRules, v128) in.WantComma() } in.Delim(']') @@ -5277,9 +5663,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.SearchableAttributes = (out.SearchableAttributes)[:0] } for !in.IsDelim(']') { - var v123 string - v123 = string(in.String()) - out.SearchableAttributes = append(out.SearchableAttributes, v123) + var v129 string + v129 = string(in.String()) + out.SearchableAttributes = append(out.SearchableAttributes, v129) in.WantComma() } in.Delim(']') @@ -5300,9 +5686,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.DisplayedAttributes = (out.DisplayedAttributes)[:0] } for !in.IsDelim(']') { - var v124 string - v124 = string(in.String()) - out.DisplayedAttributes = append(out.DisplayedAttributes, v124) + var v130 string + v130 = string(in.String()) + out.DisplayedAttributes = append(out.DisplayedAttributes, v130) in.WantComma() } in.Delim(']') @@ -5323,9 +5709,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.StopWords = (out.StopWords)[:0] } for !in.IsDelim(']') { - var v125 string - v125 = string(in.String()) - out.StopWords = append(out.StopWords, v125) + var v131 string + v131 = string(in.String()) + out.StopWords = append(out.StopWords, v131) in.WantComma() } in.Delim(']') @@ -5343,30 +5729,30 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, for !in.IsDelim('}') { key := string(in.String()) in.WantColon() - var v126 []string + var v132 []string if in.IsNull() { in.Skip() - v126 = nil + v132 = nil } else { in.Delim('[') - if v126 == nil { + if v132 == nil { if !in.IsDelim(']') { - v126 = make([]string, 0, 4) + v132 = make([]string, 0, 4) } else { - v126 = []string{} + v132 = []string{} } } else { - v126 = (v126)[:0] + v132 = (v132)[:0] } for !in.IsDelim(']') { - var v127 string - v127 = string(in.String()) - v126 = append(v126, v127) + var v133 string + v133 = string(in.String()) + v132 = append(v132, v133) in.WantComma() } in.Delim(']') } - (out.Synonyms)[key] = v126 + (out.Synonyms)[key] = v132 in.WantComma() } in.Delim('}') @@ -5387,9 +5773,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.FilterableAttributes = (out.FilterableAttributes)[:0] } for !in.IsDelim(']') { - var v128 string - v128 = string(in.String()) - out.FilterableAttributes = append(out.FilterableAttributes, v128) + var v134 string + v134 = string(in.String()) + out.FilterableAttributes = append(out.FilterableAttributes, v134) in.WantComma() } in.Delim(']') @@ -5410,9 +5796,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.SortableAttributes = (out.SortableAttributes)[:0] } for !in.IsDelim(']') { - var v129 string - v129 = string(in.String()) - out.SortableAttributes = append(out.SortableAttributes, v129) + var v135 string + v135 = string(in.String()) + out.SortableAttributes = append(out.SortableAttributes, v135) in.WantComma() } in.Delim(']') @@ -5471,9 +5857,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out.Swaps = (out.Swaps)[:0] } for !in.IsDelim(']') { - var v130 SwapIndexesParams - (v130).UnmarshalEasyJSON(in) - out.Swaps = append(out.Swaps, v130) + var v136 SwapIndexesParams + (v136).UnmarshalEasyJSON(in) + out.Swaps = append(out.Swaps, v136) in.WantComma() } in.Delim(']') @@ -5490,7 +5876,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writer, in Details) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(out *jwriter.Writer, in Details) { out.RawByte('{') first := true _ = first @@ -5550,11 +5936,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v131, v132 := range in.RankingRules { - if v131 > 0 { + for v137, v138 := range in.RankingRules { + if v137 > 0 { out.RawByte(',') } - out.String(string(v132)) + out.String(string(v138)) } out.RawByte(']') } @@ -5579,11 +5965,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v133, v134 := range in.SearchableAttributes { - if v133 > 0 { + for v139, v140 := range in.SearchableAttributes { + if v139 > 0 { out.RawByte(',') } - out.String(string(v134)) + out.String(string(v140)) } out.RawByte(']') } @@ -5598,11 +5984,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v135, v136 := range in.DisplayedAttributes { - if v135 > 0 { + for v141, v142 := range in.DisplayedAttributes { + if v141 > 0 { out.RawByte(',') } - out.String(string(v136)) + out.String(string(v142)) } out.RawByte(']') } @@ -5617,11 +6003,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v137, v138 := range in.StopWords { - if v137 > 0 { + for v143, v144 := range in.StopWords { + if v143 > 0 { out.RawByte(',') } - out.String(string(v138)) + out.String(string(v144)) } out.RawByte(']') } @@ -5636,24 +6022,24 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('{') - v139First := true - for v139Name, v139Value := range in.Synonyms { - if v139First { - v139First = false + v145First := true + for v145Name, v145Value := range in.Synonyms { + if v145First { + v145First = false } else { out.RawByte(',') } - out.String(string(v139Name)) + out.String(string(v145Name)) out.RawByte(':') - if v139Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + if v145Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') - for v140, v141 := range v139Value { - if v140 > 0 { + for v146, v147 := range v145Value { + if v146 > 0 { out.RawByte(',') } - out.String(string(v141)) + out.String(string(v147)) } out.RawByte(']') } @@ -5671,11 +6057,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v142, v143 := range in.FilterableAttributes { - if v142 > 0 { + for v148, v149 := range in.FilterableAttributes { + if v148 > 0 { out.RawByte(',') } - out.String(string(v143)) + out.String(string(v149)) } out.RawByte(']') } @@ -5690,11 +6076,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v144, v145 := range in.SortableAttributes { - if v144 > 0 { + for v150, v151 := range in.SortableAttributes { + if v150 > 0 { out.RawByte(',') } - out.String(string(v145)) + out.String(string(v151)) } out.RawByte(']') } @@ -5779,11 +6165,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ } { out.RawByte('[') - for v146, v147 := range in.Swaps { - if v146 > 0 { + for v152, v153 := range in.Swaps { + if v152 > 0 { out.RawByte(',') } - (v147).MarshalEasyJSON(out) + (v153).MarshalEasyJSON(out) } out.RawByte(']') } @@ -5804,27 +6190,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Details) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Details) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Details) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Details) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out *DeleteTasksQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, out *DeleteTasksQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -5859,9 +6245,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out.UIDS = (out.UIDS)[:0] } for !in.IsDelim(']') { - var v148 int64 - v148 = int64(in.Int64()) - out.UIDS = append(out.UIDS, v148) + var v154 int64 + v154 = int64(in.Int64()) + out.UIDS = append(out.UIDS, v154) in.WantComma() } in.Delim(']') @@ -5882,9 +6268,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out.IndexUIDS = (out.IndexUIDS)[:0] } for !in.IsDelim(']') { - var v149 string - v149 = string(in.String()) - out.IndexUIDS = append(out.IndexUIDS, v149) + var v155 string + v155 = string(in.String()) + out.IndexUIDS = append(out.IndexUIDS, v155) in.WantComma() } in.Delim(']') @@ -5905,9 +6291,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out.Statuses = (out.Statuses)[:0] } for !in.IsDelim(']') { - var v150 TaskStatus - v150 = TaskStatus(in.String()) - out.Statuses = append(out.Statuses, v150) + var v156 TaskStatus + v156 = TaskStatus(in.String()) + out.Statuses = append(out.Statuses, v156) in.WantComma() } in.Delim(']') @@ -5928,9 +6314,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out.Types = (out.Types)[:0] } for !in.IsDelim(']') { - var v151 TaskType - v151 = TaskType(in.String()) - out.Types = append(out.Types, v151) + var v157 TaskType + v157 = TaskType(in.String()) + out.Types = append(out.Types, v157) in.WantComma() } in.Delim(']') @@ -5951,9 +6337,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out.CanceledBy = (out.CanceledBy)[:0] } for !in.IsDelim(']') { - var v152 int64 - v152 = int64(in.Int64()) - out.CanceledBy = append(out.CanceledBy, v152) + var v158 int64 + v158 = int64(in.Int64()) + out.CanceledBy = append(out.CanceledBy, v158) in.WantComma() } in.Delim(']') @@ -5992,7 +6378,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writer, in DeleteTasksQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writer, in DeleteTasksQuery) { out.RawByte('{') first := true _ = first @@ -6003,11 +6389,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v153, v154 := range in.UIDS { - if v153 > 0 { + for v159, v160 := range in.UIDS { + if v159 > 0 { out.RawByte(',') } - out.Int64(int64(v154)) + out.Int64(int64(v160)) } out.RawByte(']') } @@ -6019,11 +6405,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v155, v156 := range in.IndexUIDS { - if v155 > 0 { + for v161, v162 := range in.IndexUIDS { + if v161 > 0 { out.RawByte(',') } - out.String(string(v156)) + out.String(string(v162)) } out.RawByte(']') } @@ -6035,11 +6421,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v157, v158 := range in.Statuses { - if v157 > 0 { + for v163, v164 := range in.Statuses { + if v163 > 0 { out.RawByte(',') } - out.String(string(v158)) + out.String(string(v164)) } out.RawByte(']') } @@ -6051,11 +6437,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v159, v160 := range in.Types { - if v159 > 0 { + for v165, v166 := range in.Types { + if v165 > 0 { out.RawByte(',') } - out.String(string(v160)) + out.String(string(v166)) } out.RawByte(']') } @@ -6067,11 +6453,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v161, v162 := range in.CanceledBy { - if v161 > 0 { + for v167, v168 := range in.CanceledBy { + if v167 > 0 { out.RawByte(',') } - out.Int64(int64(v162)) + out.Int64(int64(v168)) } out.RawByte(']') } @@ -6112,27 +6498,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v DeleteTasksQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DeleteTasksQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DeleteTasksQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DeleteTasksQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(in *jlexer.Lexer, out *CsvDocumentsQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(in *jlexer.Lexer, out *CsvDocumentsQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -6165,7 +6551,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(out *jwriter.Writer, in CsvDocumentsQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(out *jwriter.Writer, in CsvDocumentsQuery) { out.RawByte('{') first := true _ = first @@ -6191,27 +6577,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v CsvDocumentsQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CsvDocumentsQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CsvDocumentsQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CsvDocumentsQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, out *CreateIndexRequest) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out *CreateIndexRequest) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -6244,7 +6630,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writer, in CreateIndexRequest) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writer, in CreateIndexRequest) { out.RawByte('{') first := true _ = first @@ -6270,27 +6656,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v CreateIndexRequest) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CreateIndexRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CreateIndexRequest) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CreateIndexRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(in *jlexer.Lexer, out *Client) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo43(in *jlexer.Lexer, out *Client) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -6319,7 +6705,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(out *jwriter.Writer, in Client) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo43(out *jwriter.Writer, in Client) { out.RawByte('{') first := true _ = first @@ -6329,27 +6715,27 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v Client) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo43(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v Client) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo43(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *Client) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo43(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *Client) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo43(l, v) } -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out *CancelTasksQuery) { +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo44(in *jlexer.Lexer, out *CancelTasksQuery) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { @@ -6384,9 +6770,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out.UIDS = (out.UIDS)[:0] } for !in.IsDelim(']') { - var v163 int64 - v163 = int64(in.Int64()) - out.UIDS = append(out.UIDS, v163) + var v169 int64 + v169 = int64(in.Int64()) + out.UIDS = append(out.UIDS, v169) in.WantComma() } in.Delim(']') @@ -6407,9 +6793,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out.IndexUIDS = (out.IndexUIDS)[:0] } for !in.IsDelim(']') { - var v164 string - v164 = string(in.String()) - out.IndexUIDS = append(out.IndexUIDS, v164) + var v170 string + v170 = string(in.String()) + out.IndexUIDS = append(out.IndexUIDS, v170) in.WantComma() } in.Delim(']') @@ -6430,9 +6816,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out.Statuses = (out.Statuses)[:0] } for !in.IsDelim(']') { - var v165 TaskStatus - v165 = TaskStatus(in.String()) - out.Statuses = append(out.Statuses, v165) + var v171 TaskStatus + v171 = TaskStatus(in.String()) + out.Statuses = append(out.Statuses, v171) in.WantComma() } in.Delim(']') @@ -6453,9 +6839,9 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out.Types = (out.Types)[:0] } for !in.IsDelim(']') { - var v166 TaskType - v166 = TaskType(in.String()) - out.Types = append(out.Types, v166) + var v172 TaskType + v172 = TaskType(in.String()) + out.Types = append(out.Types, v172) in.WantComma() } in.Delim(']') @@ -6486,7 +6872,7 @@ func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, in.Consumed() } } -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writer, in CancelTasksQuery) { +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo44(out *jwriter.Writer, in CancelTasksQuery) { out.RawByte('{') first := true _ = first @@ -6497,11 +6883,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v167, v168 := range in.UIDS { - if v167 > 0 { + for v173, v174 := range in.UIDS { + if v173 > 0 { out.RawByte(',') } - out.Int64(int64(v168)) + out.Int64(int64(v174)) } out.RawByte(']') } @@ -6513,11 +6899,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v169, v170 := range in.IndexUIDS { - if v169 > 0 { + for v175, v176 := range in.IndexUIDS { + if v175 > 0 { out.RawByte(',') } - out.String(string(v170)) + out.String(string(v176)) } out.RawByte(']') } @@ -6529,11 +6915,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v171, v172 := range in.Statuses { - if v171 > 0 { + for v177, v178 := range in.Statuses { + if v177 > 0 { out.RawByte(',') } - out.String(string(v172)) + out.String(string(v178)) } out.RawByte(']') } @@ -6545,11 +6931,11 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writ out.RawString("null") } else { out.RawByte('[') - for v173, v174 := range in.Types { - if v173 > 0 { + for v179, v180 := range in.Types { + if v179 > 0 { out.RawByte(',') } - out.String(string(v174)) + out.String(string(v180)) } out.RawByte(']') } @@ -6580,23 +6966,23 @@ func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writ // MarshalJSON supports json.Marshaler interface func (v CancelTasksQuery) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(&w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo44(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CancelTasksQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(w, v) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo44(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CancelTasksQuery) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(&r, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo44(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CancelTasksQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(l, v) + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo44(l, v) } From c9e06f872fda16be25c1aa62fef75ecd873d8d41 Mon Sep 17 00:00:00 2001 From: Javad Date: Thu, 1 Aug 2024 18:39:05 +0330 Subject: [PATCH 22/43] fix: improve test coverage in index_search --- index_search_test.go | 46 ++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 42 insertions(+), 4 deletions(-) diff --git a/index_search_test.go b/index_search_test.go index 133ba7f8..c2fa058a 100644 --- a/index_search_test.go +++ b/index_search_test.go @@ -25,10 +25,12 @@ func TestIndex_SearchRaw(t *testing.T) { { name: "TestIndexBasicSearch", args: args{ - UID: "indexUID", - client: defaultClient, - query: "prince", - request: &SearchRequest{}, + UID: "indexUID", + client: defaultClient, + query: "prince", + request: &SearchRequest{ + IndexUID: "foobar", + }, }, want: &SearchResponse{ Hits: []interface{}{ @@ -45,6 +47,17 @@ func TestIndex_SearchRaw(t *testing.T) { }, wantErr: false, }, + { + name: "TestNullRequestInSearchRow", + args: args{ + UID: "indexUID", + client: defaultClient, + query: "prince", + request: nil, + }, + want: nil, + wantErr: true, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -131,6 +144,31 @@ func TestIndex_Search(t *testing.T) { }, wantErr: false, }, + { + name: "TestIndexBasicSearchWithIndexUIDInRequest", + args: args{ + UID: "indexUID", + client: defaultClient, + query: "prince", + request: &SearchRequest{ + IndexUID: "foobar", + }, + }, + want: &SearchResponse{ + Hits: []interface{}{ + map[string]interface{}{ + "book_id": float64(456), "title": "Le Petit Prince", + }, + map[string]interface{}{ + "Tag": "Epic fantasy", "book_id": float64(4), "title": "Harry Potter and the Half-Blood Prince", + }, + }, + EstimatedTotalHits: 2, + Offset: 0, + Limit: 20, + }, + wantErr: false, + }, { name: "TestIndexSearchWithCustomClient", args: args{ From a60bb2d6a5e5281d92033cf71b815964b2e567b7 Mon Sep 17 00:00:00 2001 From: Javad Date: Thu, 1 Aug 2024 18:57:17 +0330 Subject: [PATCH 23/43] fix: add test for types methods --- types_test.go | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 types_test.go diff --git a/types_test.go b/types_test.go new file mode 100644 index 00000000..2ae781e2 --- /dev/null +++ b/types_test.go @@ -0,0 +1,43 @@ +package meilisearch + +import ( + "encoding/json" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "testing" +) + +func TestTypes_UnmarshalJSON(t *testing.T) { + var raw RawType + data := []byte(`"some data"`) + + err := json.Unmarshal(data, &raw) + require.NoError(t, err) + + expected := RawType(data) + require.Equal(t, expected, raw) +} + +func TestTypes_MarshalJSON(t *testing.T) { + raw := RawType(`"some data"`) + + data, err := json.Marshal(raw) + require.NoError(t, err) + + expected := []byte(`"some data"`) + require.Equal(t, data, expected) +} + +func TestTypes_ValidateSearchRequest(t *testing.T) { + req := &SearchRequest{ + Limit: 0, + Hybrid: &SearchRequestHybrid{ + Embedder: "", + }, + } + + req.validate() + + assert.Equal(t, req.Limit, DefaultLimit) + assert.Equal(t, req.Hybrid.Embedder, "default") +} From 8e19813d51c20a8e333583d0de96882a9df2a6c0 Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 08:57:11 +0330 Subject: [PATCH 24/43] docs: add package documentation --- client_request.go | 203 -- doc.go | 18 + types_easyjson.go | 6988 --------------------------------------------- 3 files changed, 18 insertions(+), 7191 deletions(-) delete mode 100644 client_request.go create mode 100644 doc.go delete mode 100644 types_easyjson.go diff --git a/client_request.go b/client_request.go deleted file mode 100644 index 29b694b6..00000000 --- a/client_request.go +++ /dev/null @@ -1,203 +0,0 @@ -package meilisearch - -import ( - "fmt" - "io" - "net/http" - "net/url" - - "github.com/valyala/fasthttp" - - "encoding/json" -) - -const ( - contentTypeJSON string = "application/json" - contentTypeNDJSON string = "application/x-ndjson" - contentTypeCSV string = "text/csv" -) - -type internalRequest struct { - endpoint string - method string - contentType string - - withRequest interface{} - withResponse interface{} - withQueryParams map[string]string - - acceptedStatusCodes []int - - functionName string -} - -func (c *Client) executeRequest(req internalRequest) error { - internalError := &Error{ - Endpoint: req.endpoint, - Method: req.method, - Function: req.functionName, - RequestToString: "empty request", - ResponseToString: "empty response", - MeilisearchApiError: meilisearchApiError{ - Message: "empty Meilisearch message", - }, - StatusCodeExpected: req.acceptedStatusCodes, - } - - response := fasthttp.AcquireResponse() - defer fasthttp.ReleaseResponse(response) - err := c.sendRequest(&req, internalError, response) - if err != nil { - return err - } - internalError.StatusCode = response.StatusCode() - - err = c.handleStatusCode(&req, response, internalError) - if err != nil { - return err - } - - err = c.handleResponse(&req, response, internalError) - if err != nil { - return err - } - return nil -} - -func (c *Client) sendRequest(req *internalRequest, internalError *Error, response *fasthttp.Response) error { - var ( - request *fasthttp.Request - - err error - ) - - // Setup URL - requestURL, err := url.Parse(c.config.Host + req.endpoint) - if err != nil { - return fmt.Errorf("unable to parse url: %w", err) - } - - // Build query parameters - if req.withQueryParams != nil { - query := requestURL.Query() - for key, value := range req.withQueryParams { - query.Set(key, value) - } - - requestURL.RawQuery = query.Encode() - } - - request = fasthttp.AcquireRequest() - defer fasthttp.ReleaseRequest(request) - - request.SetRequestURI(requestURL.String()) - request.Header.SetMethod(req.method) - - if req.withRequest != nil { - if req.method == http.MethodGet || req.method == http.MethodHead { - return fmt.Errorf("sendRequest: request body is not expected for GET and HEAD requests") - } - if req.contentType == "" { - return fmt.Errorf("sendRequest: request body without Content-Type is not allowed") - } - - rawRequest := req.withRequest - if bytes, ok := rawRequest.([]byte); ok { - // If the request body is already a []byte then use it directly - request.SetBody(bytes) - } else if reader, ok := rawRequest.(io.Reader); ok { - // If the request body is an io.Reader then stream it directly until io.EOF - // NOTE: Avoid using this, due to problems with streamed request bodies - request.SetBodyStream(reader, -1) - } else { - // Otherwise convert it to JSON - var ( - data []byte - err error - ) - if marshaler, ok := rawRequest.(json.Marshaler); ok { - data, err = marshaler.MarshalJSON() - } else { - data, err = json.Marshal(rawRequest) - } - internalError.RequestToString = string(data) - if err != nil { - return internalError.WithErrCode(ErrCodeMarshalRequest, err) - } - request.SetBody(data) - } - } - - // adding request headers - if req.contentType != "" { - request.Header.Set("Content-Type", req.contentType) - } - if c.config.APIKey != "" { - request.Header.Set("Authorization", "Bearer "+c.config.APIKey) - } - - request.Header.Set("User-Agent", GetQualifiedVersion()) - - // request is sent - if c.config.Timeout != 0 { - err = c.httpClient.DoTimeout(request, response, c.config.Timeout) - } else { - err = c.httpClient.Do(request, response) - } - - // request execution timeout - if err == fasthttp.ErrTimeout { - return internalError.WithErrCode(MeilisearchTimeoutError, err) - } - // request execution fail - if err != nil { - return internalError.WithErrCode(MeilisearchCommunicationError, err) - } - - return nil -} - -func (c *Client) handleStatusCode(req *internalRequest, response *fasthttp.Response, internalError *Error) error { - if req.acceptedStatusCodes != nil { - - // A successful status code is required so check if the response status code is in the - // expected status code list. - for _, acceptedCode := range req.acceptedStatusCodes { - if response.StatusCode() == acceptedCode { - return nil - } - } - // At this point the response status code is a failure. - rawBody := response.Body() - - internalError.ErrorBody(rawBody) - - if internalError.MeilisearchApiError.Code == "" { - return internalError.WithErrCode(MeilisearchApiErrorWithoutMessage) - } - return internalError.WithErrCode(MeilisearchApiError) - } - - return nil -} - -func (c *Client) handleResponse(req *internalRequest, response *fasthttp.Response, internalError *Error) (err error) { - if req.withResponse != nil { - - // A json response is mandatory, so the response interface{} need to be unmarshal from the response payload. - rawBody := response.Body() - internalError.ResponseToString = string(rawBody) - - var err error - if resp, ok := req.withResponse.(json.Unmarshaler); ok { - err = resp.UnmarshalJSON(rawBody) - req.withResponse = resp - } else { - err = json.Unmarshal(rawBody, req.withResponse) - } - if err != nil { - return internalError.WithErrCode(ErrCodeResponseUnmarshalBody, err) - } - } - return nil -} diff --git a/doc.go b/doc.go new file mode 100644 index 00000000..b3f90523 --- /dev/null +++ b/doc.go @@ -0,0 +1,18 @@ +// Package meilisearch is the official Meilisearch SDK for the Go programming language. +// +// The meilisearch-go SDK for Go provides APIs and utilities that developers can use to +// build Go applications that use meilisearch service. +// +// See the meilisearch package documentation for more information. +// https://www.meilisearch.com/docs/reference +// +// Example: +// +// sv, err := New("http://localhost:7700", WithAPIKey("foobar")) +// if err != nil { +// fmt.Println(err) +// return +// } +// +// fmt.Println(sv.IsHealthy(context.Background())) +package meilisearch diff --git a/types_easyjson.go b/types_easyjson.go deleted file mode 100644 index af3c37b2..00000000 --- a/types_easyjson.go +++ /dev/null @@ -1,6988 +0,0 @@ -// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT. - -package meilisearch - -import ( - json "encoding/json" - _v4 "github.com/golang-jwt/jwt/v4" - easyjson "github.com/mailru/easyjson" - jlexer "github.com/mailru/easyjson/jlexer" - jwriter "github.com/mailru/easyjson/jwriter" -) - -// suppress unused package warning -var ( - _ *json.RawMessage - _ *jlexer.Lexer - _ *jwriter.Writer - _ easyjson.Marshaler -) - -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo(in *jlexer.Lexer, out *Version) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "commitSha": - out.CommitSha = string(in.String()) - case "commitDate": - out.CommitDate = string(in.String()) - case "pkgVersion": - out.PkgVersion = string(in.String()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo(out *jwriter.Writer, in Version) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"commitSha\":" - out.RawString(prefix[1:]) - out.String(string(in.CommitSha)) - } - { - const prefix string = ",\"commitDate\":" - out.RawString(prefix) - out.String(string(in.CommitDate)) - } - { - const prefix string = ",\"pkgVersion\":" - out.RawString(prefix) - out.String(string(in.PkgVersion)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v Version) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v Version) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *Version) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *Version) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo1(in *jlexer.Lexer, out *UpdateIndexRequest) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "primaryKey": - out.PrimaryKey = string(in.String()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo1(out *jwriter.Writer, in UpdateIndexRequest) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"primaryKey\":" - out.RawString(prefix[1:]) - out.String(string(in.PrimaryKey)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v UpdateIndexRequest) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo1(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v UpdateIndexRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo1(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *UpdateIndexRequest) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo1(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *UpdateIndexRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo1(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo2(in *jlexer.Lexer, out *TypoTolerance) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "enabled": - out.Enabled = bool(in.Bool()) - case "minWordSizeForTypos": - (out.MinWordSizeForTypos).UnmarshalEasyJSON(in) - case "disableOnWords": - if in.IsNull() { - in.Skip() - out.DisableOnWords = nil - } else { - in.Delim('[') - if out.DisableOnWords == nil { - if !in.IsDelim(']') { - out.DisableOnWords = make([]string, 0, 4) - } else { - out.DisableOnWords = []string{} - } - } else { - out.DisableOnWords = (out.DisableOnWords)[:0] - } - for !in.IsDelim(']') { - var v1 string - v1 = string(in.String()) - out.DisableOnWords = append(out.DisableOnWords, v1) - in.WantComma() - } - in.Delim(']') - } - case "disableOnAttributes": - if in.IsNull() { - in.Skip() - out.DisableOnAttributes = nil - } else { - in.Delim('[') - if out.DisableOnAttributes == nil { - if !in.IsDelim(']') { - out.DisableOnAttributes = make([]string, 0, 4) - } else { - out.DisableOnAttributes = []string{} - } - } else { - out.DisableOnAttributes = (out.DisableOnAttributes)[:0] - } - for !in.IsDelim(']') { - var v2 string - v2 = string(in.String()) - out.DisableOnAttributes = append(out.DisableOnAttributes, v2) - in.WantComma() - } - in.Delim(']') - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo2(out *jwriter.Writer, in TypoTolerance) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"enabled\":" - out.RawString(prefix[1:]) - out.Bool(bool(in.Enabled)) - } - if true { - const prefix string = ",\"minWordSizeForTypos\":" - out.RawString(prefix) - (in.MinWordSizeForTypos).MarshalEasyJSON(out) - } - if len(in.DisableOnWords) != 0 { - const prefix string = ",\"disableOnWords\":" - out.RawString(prefix) - { - out.RawByte('[') - for v3, v4 := range in.DisableOnWords { - if v3 > 0 { - out.RawByte(',') - } - out.String(string(v4)) - } - out.RawByte(']') - } - } - if len(in.DisableOnAttributes) != 0 { - const prefix string = ",\"disableOnAttributes\":" - out.RawString(prefix) - { - out.RawByte('[') - for v5, v6 := range in.DisableOnAttributes { - if v5 > 0 { - out.RawByte(',') - } - out.String(string(v6)) - } - out.RawByte(']') - } - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v TypoTolerance) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo2(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v TypoTolerance) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo2(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *TypoTolerance) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo2(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *TypoTolerance) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo2(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo3(in *jlexer.Lexer, out *TenantTokenOptions) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "APIKey": - out.APIKey = string(in.String()) - case "ExpiresAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.ExpiresAt).UnmarshalJSON(data)) - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo3(out *jwriter.Writer, in TenantTokenOptions) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"APIKey\":" - out.RawString(prefix[1:]) - out.String(string(in.APIKey)) - } - { - const prefix string = ",\"ExpiresAt\":" - out.RawString(prefix) - out.Raw((in.ExpiresAt).MarshalJSON()) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v TenantTokenOptions) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo3(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v TenantTokenOptions) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo3(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *TenantTokenOptions) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo3(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *TenantTokenOptions) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo3(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo4(in *jlexer.Lexer, out *TenantTokenClaims) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "apiKeyUid": - out.APIKeyUID = string(in.String()) - case "searchRules": - if m, ok := out.SearchRules.(easyjson.Unmarshaler); ok { - m.UnmarshalEasyJSON(in) - } else if m, ok := out.SearchRules.(json.Unmarshaler); ok { - _ = m.UnmarshalJSON(in.Raw()) - } else { - out.SearchRules = in.Interface() - } - case "iss": - out.Issuer = string(in.String()) - case "sub": - out.Subject = string(in.String()) - case "aud": - if data := in.Raw(); in.Ok() { - in.AddError((out.Audience).UnmarshalJSON(data)) - } - case "exp": - if in.IsNull() { - in.Skip() - out.ExpiresAt = nil - } else { - if out.ExpiresAt == nil { - out.ExpiresAt = new(_v4.NumericDate) - } - if data := in.Raw(); in.Ok() { - in.AddError((*out.ExpiresAt).UnmarshalJSON(data)) - } - } - case "nbf": - if in.IsNull() { - in.Skip() - out.NotBefore = nil - } else { - if out.NotBefore == nil { - out.NotBefore = new(_v4.NumericDate) - } - if data := in.Raw(); in.Ok() { - in.AddError((*out.NotBefore).UnmarshalJSON(data)) - } - } - case "iat": - if in.IsNull() { - in.Skip() - out.IssuedAt = nil - } else { - if out.IssuedAt == nil { - out.IssuedAt = new(_v4.NumericDate) - } - if data := in.Raw(); in.Ok() { - in.AddError((*out.IssuedAt).UnmarshalJSON(data)) - } - } - case "jti": - out.ID = string(in.String()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo4(out *jwriter.Writer, in TenantTokenClaims) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"apiKeyUid\":" - out.RawString(prefix[1:]) - out.String(string(in.APIKeyUID)) - } - { - const prefix string = ",\"searchRules\":" - out.RawString(prefix) - if m, ok := in.SearchRules.(easyjson.Marshaler); ok { - m.MarshalEasyJSON(out) - } else if m, ok := in.SearchRules.(json.Marshaler); ok { - out.Raw(m.MarshalJSON()) - } else { - out.Raw(json.Marshal(in.SearchRules)) - } - } - if in.Issuer != "" { - const prefix string = ",\"iss\":" - out.RawString(prefix) - out.String(string(in.Issuer)) - } - if in.Subject != "" { - const prefix string = ",\"sub\":" - out.RawString(prefix) - out.String(string(in.Subject)) - } - if len(in.Audience) != 0 { - const prefix string = ",\"aud\":" - out.RawString(prefix) - out.Raw((in.Audience).MarshalJSON()) - } - if in.ExpiresAt != nil { - const prefix string = ",\"exp\":" - out.RawString(prefix) - out.Raw((*in.ExpiresAt).MarshalJSON()) - } - if in.NotBefore != nil { - const prefix string = ",\"nbf\":" - out.RawString(prefix) - out.Raw((*in.NotBefore).MarshalJSON()) - } - if in.IssuedAt != nil { - const prefix string = ",\"iat\":" - out.RawString(prefix) - out.Raw((*in.IssuedAt).MarshalJSON()) - } - if in.ID != "" { - const prefix string = ",\"jti\":" - out.RawString(prefix) - out.String(string(in.ID)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v TenantTokenClaims) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo4(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v TenantTokenClaims) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo4(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *TenantTokenClaims) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo4(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *TenantTokenClaims) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo4(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo5(in *jlexer.Lexer, out *TasksQuery) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "UIDS": - if in.IsNull() { - in.Skip() - out.UIDS = nil - } else { - in.Delim('[') - if out.UIDS == nil { - if !in.IsDelim(']') { - out.UIDS = make([]int64, 0, 8) - } else { - out.UIDS = []int64{} - } - } else { - out.UIDS = (out.UIDS)[:0] - } - for !in.IsDelim(']') { - var v7 int64 - v7 = int64(in.Int64()) - out.UIDS = append(out.UIDS, v7) - in.WantComma() - } - in.Delim(']') - } - case "Limit": - out.Limit = int64(in.Int64()) - case "From": - out.From = int64(in.Int64()) - case "IndexUIDS": - if in.IsNull() { - in.Skip() - out.IndexUIDS = nil - } else { - in.Delim('[') - if out.IndexUIDS == nil { - if !in.IsDelim(']') { - out.IndexUIDS = make([]string, 0, 4) - } else { - out.IndexUIDS = []string{} - } - } else { - out.IndexUIDS = (out.IndexUIDS)[:0] - } - for !in.IsDelim(']') { - var v8 string - v8 = string(in.String()) - out.IndexUIDS = append(out.IndexUIDS, v8) - in.WantComma() - } - in.Delim(']') - } - case "Statuses": - if in.IsNull() { - in.Skip() - out.Statuses = nil - } else { - in.Delim('[') - if out.Statuses == nil { - if !in.IsDelim(']') { - out.Statuses = make([]TaskStatus, 0, 4) - } else { - out.Statuses = []TaskStatus{} - } - } else { - out.Statuses = (out.Statuses)[:0] - } - for !in.IsDelim(']') { - var v9 TaskStatus - v9 = TaskStatus(in.String()) - out.Statuses = append(out.Statuses, v9) - in.WantComma() - } - in.Delim(']') - } - case "Types": - if in.IsNull() { - in.Skip() - out.Types = nil - } else { - in.Delim('[') - if out.Types == nil { - if !in.IsDelim(']') { - out.Types = make([]TaskType, 0, 4) - } else { - out.Types = []TaskType{} - } - } else { - out.Types = (out.Types)[:0] - } - for !in.IsDelim(']') { - var v10 TaskType - v10 = TaskType(in.String()) - out.Types = append(out.Types, v10) - in.WantComma() - } - in.Delim(']') - } - case "CanceledBy": - if in.IsNull() { - in.Skip() - out.CanceledBy = nil - } else { - in.Delim('[') - if out.CanceledBy == nil { - if !in.IsDelim(']') { - out.CanceledBy = make([]int64, 0, 8) - } else { - out.CanceledBy = []int64{} - } - } else { - out.CanceledBy = (out.CanceledBy)[:0] - } - for !in.IsDelim(']') { - var v11 int64 - v11 = int64(in.Int64()) - out.CanceledBy = append(out.CanceledBy, v11) - in.WantComma() - } - in.Delim(']') - } - case "BeforeEnqueuedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.BeforeEnqueuedAt).UnmarshalJSON(data)) - } - case "AfterEnqueuedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.AfterEnqueuedAt).UnmarshalJSON(data)) - } - case "BeforeStartedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.BeforeStartedAt).UnmarshalJSON(data)) - } - case "AfterStartedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.AfterStartedAt).UnmarshalJSON(data)) - } - case "BeforeFinishedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.BeforeFinishedAt).UnmarshalJSON(data)) - } - case "AfterFinishedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.AfterFinishedAt).UnmarshalJSON(data)) - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo5(out *jwriter.Writer, in TasksQuery) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"UIDS\":" - out.RawString(prefix[1:]) - if in.UIDS == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v12, v13 := range in.UIDS { - if v12 > 0 { - out.RawByte(',') - } - out.Int64(int64(v13)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"Limit\":" - out.RawString(prefix) - out.Int64(int64(in.Limit)) - } - { - const prefix string = ",\"From\":" - out.RawString(prefix) - out.Int64(int64(in.From)) - } - { - const prefix string = ",\"IndexUIDS\":" - out.RawString(prefix) - if in.IndexUIDS == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v14, v15 := range in.IndexUIDS { - if v14 > 0 { - out.RawByte(',') - } - out.String(string(v15)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"Statuses\":" - out.RawString(prefix) - if in.Statuses == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v16, v17 := range in.Statuses { - if v16 > 0 { - out.RawByte(',') - } - out.String(string(v17)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"Types\":" - out.RawString(prefix) - if in.Types == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v18, v19 := range in.Types { - if v18 > 0 { - out.RawByte(',') - } - out.String(string(v19)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"CanceledBy\":" - out.RawString(prefix) - if in.CanceledBy == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v20, v21 := range in.CanceledBy { - if v20 > 0 { - out.RawByte(',') - } - out.Int64(int64(v21)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"BeforeEnqueuedAt\":" - out.RawString(prefix) - out.Raw((in.BeforeEnqueuedAt).MarshalJSON()) - } - { - const prefix string = ",\"AfterEnqueuedAt\":" - out.RawString(prefix) - out.Raw((in.AfterEnqueuedAt).MarshalJSON()) - } - { - const prefix string = ",\"BeforeStartedAt\":" - out.RawString(prefix) - out.Raw((in.BeforeStartedAt).MarshalJSON()) - } - { - const prefix string = ",\"AfterStartedAt\":" - out.RawString(prefix) - out.Raw((in.AfterStartedAt).MarshalJSON()) - } - { - const prefix string = ",\"BeforeFinishedAt\":" - out.RawString(prefix) - out.Raw((in.BeforeFinishedAt).MarshalJSON()) - } - { - const prefix string = ",\"AfterFinishedAt\":" - out.RawString(prefix) - out.Raw((in.AfterFinishedAt).MarshalJSON()) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v TasksQuery) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo5(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v TasksQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo5(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *TasksQuery) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo5(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *TasksQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo5(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo6(in *jlexer.Lexer, out *TaskResult) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "results": - if in.IsNull() { - in.Skip() - out.Results = nil - } else { - in.Delim('[') - if out.Results == nil { - if !in.IsDelim(']') { - out.Results = make([]Task, 0, 0) - } else { - out.Results = []Task{} - } - } else { - out.Results = (out.Results)[:0] - } - for !in.IsDelim(']') { - var v22 Task - (v22).UnmarshalEasyJSON(in) - out.Results = append(out.Results, v22) - in.WantComma() - } - in.Delim(']') - } - case "limit": - out.Limit = int64(in.Int64()) - case "from": - out.From = int64(in.Int64()) - case "next": - out.Next = int64(in.Int64()) - case "total": - out.Total = int64(in.Int64()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo6(out *jwriter.Writer, in TaskResult) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"results\":" - out.RawString(prefix[1:]) - if in.Results == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v23, v24 := range in.Results { - if v23 > 0 { - out.RawByte(',') - } - (v24).MarshalEasyJSON(out) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"limit\":" - out.RawString(prefix) - out.Int64(int64(in.Limit)) - } - { - const prefix string = ",\"from\":" - out.RawString(prefix) - out.Int64(int64(in.From)) - } - { - const prefix string = ",\"next\":" - out.RawString(prefix) - out.Int64(int64(in.Next)) - } - { - const prefix string = ",\"total\":" - out.RawString(prefix) - out.Int64(int64(in.Total)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v TaskResult) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo6(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v TaskResult) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo6(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *TaskResult) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo6(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *TaskResult) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo6(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo7(in *jlexer.Lexer, out *TaskInfo) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "status": - out.Status = TaskStatus(in.String()) - case "taskUid": - out.TaskUID = int64(in.Int64()) - case "indexUid": - out.IndexUID = string(in.String()) - case "type": - out.Type = TaskType(in.String()) - case "enqueuedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.EnqueuedAt).UnmarshalJSON(data)) - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo7(out *jwriter.Writer, in TaskInfo) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"status\":" - out.RawString(prefix[1:]) - out.String(string(in.Status)) - } - { - const prefix string = ",\"taskUid\":" - out.RawString(prefix) - out.Int64(int64(in.TaskUID)) - } - { - const prefix string = ",\"indexUid\":" - out.RawString(prefix) - out.String(string(in.IndexUID)) - } - { - const prefix string = ",\"type\":" - out.RawString(prefix) - out.String(string(in.Type)) - } - { - const prefix string = ",\"enqueuedAt\":" - out.RawString(prefix) - out.Raw((in.EnqueuedAt).MarshalJSON()) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v TaskInfo) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo7(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v TaskInfo) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo7(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *TaskInfo) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo7(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *TaskInfo) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo7(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo8(in *jlexer.Lexer, out *Task) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "status": - out.Status = TaskStatus(in.String()) - case "uid": - out.UID = int64(in.Int64()) - case "taskUid": - out.TaskUID = int64(in.Int64()) - case "indexUid": - out.IndexUID = string(in.String()) - case "type": - out.Type = TaskType(in.String()) - case "error": - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo9(in, &out.Error) - case "duration": - out.Duration = string(in.String()) - case "enqueuedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.EnqueuedAt).UnmarshalJSON(data)) - } - case "startedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.StartedAt).UnmarshalJSON(data)) - } - case "finishedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.FinishedAt).UnmarshalJSON(data)) - } - case "details": - (out.Details).UnmarshalEasyJSON(in) - case "canceledBy": - out.CanceledBy = int64(in.Int64()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo8(out *jwriter.Writer, in Task) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"status\":" - out.RawString(prefix[1:]) - out.String(string(in.Status)) - } - if in.UID != 0 { - const prefix string = ",\"uid\":" - out.RawString(prefix) - out.Int64(int64(in.UID)) - } - if in.TaskUID != 0 { - const prefix string = ",\"taskUid\":" - out.RawString(prefix) - out.Int64(int64(in.TaskUID)) - } - { - const prefix string = ",\"indexUid\":" - out.RawString(prefix) - out.String(string(in.IndexUID)) - } - { - const prefix string = ",\"type\":" - out.RawString(prefix) - out.String(string(in.Type)) - } - if true { - const prefix string = ",\"error\":" - out.RawString(prefix) - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo9(out, in.Error) - } - if in.Duration != "" { - const prefix string = ",\"duration\":" - out.RawString(prefix) - out.String(string(in.Duration)) - } - { - const prefix string = ",\"enqueuedAt\":" - out.RawString(prefix) - out.Raw((in.EnqueuedAt).MarshalJSON()) - } - if true { - const prefix string = ",\"startedAt\":" - out.RawString(prefix) - out.Raw((in.StartedAt).MarshalJSON()) - } - if true { - const prefix string = ",\"finishedAt\":" - out.RawString(prefix) - out.Raw((in.FinishedAt).MarshalJSON()) - } - if true { - const prefix string = ",\"details\":" - out.RawString(prefix) - (in.Details).MarshalEasyJSON(out) - } - if in.CanceledBy != 0 { - const prefix string = ",\"canceledBy\":" - out.RawString(prefix) - out.Int64(int64(in.CanceledBy)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v Task) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo8(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v Task) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo8(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *Task) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo8(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *Task) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo8(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo9(in *jlexer.Lexer, out *meilisearchApiError) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "message": - out.Message = string(in.String()) - case "code": - out.Code = string(in.String()) - case "type": - out.Type = string(in.String()) - case "link": - out.Link = string(in.String()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo9(out *jwriter.Writer, in meilisearchApiError) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"message\":" - out.RawString(prefix[1:]) - out.String(string(in.Message)) - } - { - const prefix string = ",\"code\":" - out.RawString(prefix) - out.String(string(in.Code)) - } - { - const prefix string = ",\"type\":" - out.RawString(prefix) - out.String(string(in.Type)) - } - { - const prefix string = ",\"link\":" - out.RawString(prefix) - out.String(string(in.Link)) - } - out.RawByte('}') -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo10(in *jlexer.Lexer, out *SwapIndexesParams) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "indexes": - if in.IsNull() { - in.Skip() - out.Indexes = nil - } else { - in.Delim('[') - if out.Indexes == nil { - if !in.IsDelim(']') { - out.Indexes = make([]string, 0, 4) - } else { - out.Indexes = []string{} - } - } else { - out.Indexes = (out.Indexes)[:0] - } - for !in.IsDelim(']') { - var v25 string - v25 = string(in.String()) - out.Indexes = append(out.Indexes, v25) - in.WantComma() - } - in.Delim(']') - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo10(out *jwriter.Writer, in SwapIndexesParams) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"indexes\":" - out.RawString(prefix[1:]) - if in.Indexes == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v26, v27 := range in.Indexes { - if v26 > 0 { - out.RawByte(',') - } - out.String(string(v27)) - } - out.RawByte(']') - } - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v SwapIndexesParams) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo10(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v SwapIndexesParams) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo10(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *SwapIndexesParams) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo10(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *SwapIndexesParams) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo10(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo11(in *jlexer.Lexer, out *StatsIndex) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "numberOfDocuments": - out.NumberOfDocuments = int64(in.Int64()) - case "isIndexing": - out.IsIndexing = bool(in.Bool()) - case "fieldDistribution": - if in.IsNull() { - in.Skip() - } else { - in.Delim('{') - out.FieldDistribution = make(map[string]int64) - for !in.IsDelim('}') { - key := string(in.String()) - in.WantColon() - var v28 int64 - v28 = int64(in.Int64()) - (out.FieldDistribution)[key] = v28 - in.WantComma() - } - in.Delim('}') - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo11(out *jwriter.Writer, in StatsIndex) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"numberOfDocuments\":" - out.RawString(prefix[1:]) - out.Int64(int64(in.NumberOfDocuments)) - } - { - const prefix string = ",\"isIndexing\":" - out.RawString(prefix) - out.Bool(bool(in.IsIndexing)) - } - { - const prefix string = ",\"fieldDistribution\":" - out.RawString(prefix) - if in.FieldDistribution == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { - out.RawString(`null`) - } else { - out.RawByte('{') - v29First := true - for v29Name, v29Value := range in.FieldDistribution { - if v29First { - v29First = false - } else { - out.RawByte(',') - } - out.String(string(v29Name)) - out.RawByte(':') - out.Int64(int64(v29Value)) - } - out.RawByte('}') - } - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v StatsIndex) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo11(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v StatsIndex) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo11(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *StatsIndex) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo11(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *StatsIndex) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo11(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo12(in *jlexer.Lexer, out *Stats) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "databaseSize": - out.DatabaseSize = int64(in.Int64()) - case "lastUpdate": - if data := in.Raw(); in.Ok() { - in.AddError((out.LastUpdate).UnmarshalJSON(data)) - } - case "indexes": - if in.IsNull() { - in.Skip() - } else { - in.Delim('{') - out.Indexes = make(map[string]StatsIndex) - for !in.IsDelim('}') { - key := string(in.String()) - in.WantColon() - var v30 StatsIndex - (v30).UnmarshalEasyJSON(in) - (out.Indexes)[key] = v30 - in.WantComma() - } - in.Delim('}') - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo12(out *jwriter.Writer, in Stats) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"databaseSize\":" - out.RawString(prefix[1:]) - out.Int64(int64(in.DatabaseSize)) - } - { - const prefix string = ",\"lastUpdate\":" - out.RawString(prefix) - out.Raw((in.LastUpdate).MarshalJSON()) - } - { - const prefix string = ",\"indexes\":" - out.RawString(prefix) - if in.Indexes == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { - out.RawString(`null`) - } else { - out.RawByte('{') - v31First := true - for v31Name, v31Value := range in.Indexes { - if v31First { - v31First = false - } else { - out.RawByte(',') - } - out.String(string(v31Name)) - out.RawByte(':') - (v31Value).MarshalEasyJSON(out) - } - out.RawByte('}') - } - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v Stats) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo12(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v Stats) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo12(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *Stats) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo12(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *Stats) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo12(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out *SimilarDocumentResult) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "hits": - if in.IsNull() { - in.Skip() - out.Hits = nil - } else { - in.Delim('[') - if out.Hits == nil { - if !in.IsDelim(']') { - out.Hits = make([]interface{}, 0, 4) - } else { - out.Hits = []interface{}{} - } - } else { - out.Hits = (out.Hits)[:0] - } - for !in.IsDelim(']') { - var v32 interface{} - if m, ok := v32.(easyjson.Unmarshaler); ok { - m.UnmarshalEasyJSON(in) - } else if m, ok := v32.(json.Unmarshaler); ok { - _ = m.UnmarshalJSON(in.Raw()) - } else { - v32 = in.Interface() - } - out.Hits = append(out.Hits, v32) - in.WantComma() - } - in.Delim(']') - } - case "id": - out.ID = string(in.String()) - case "processingTimeMs": - out.ProcessingTimeMS = int64(in.Int64()) - case "limit": - out.Limit = int64(in.Int64()) - case "offset": - out.Offset = int64(in.Int64()) - case "estimatedTotalHits": - out.EstimatedTotalHits = int64(in.Int64()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writer, in SimilarDocumentResult) { - out.RawByte('{') - first := true - _ = first - if len(in.Hits) != 0 { - const prefix string = ",\"hits\":" - first = false - out.RawString(prefix[1:]) - { - out.RawByte('[') - for v33, v34 := range in.Hits { - if v33 > 0 { - out.RawByte(',') - } - if m, ok := v34.(easyjson.Marshaler); ok { - m.MarshalEasyJSON(out) - } else if m, ok := v34.(json.Marshaler); ok { - out.Raw(m.MarshalJSON()) - } else { - out.Raw(json.Marshal(v34)) - } - } - out.RawByte(']') - } - } - if in.ID != "" { - const prefix string = ",\"id\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.ID)) - } - if in.ProcessingTimeMS != 0 { - const prefix string = ",\"processingTimeMs\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.ProcessingTimeMS)) - } - if in.Limit != 0 { - const prefix string = ",\"limit\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.Limit)) - } - if in.Offset != 0 { - const prefix string = ",\"offset\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.Offset)) - } - if in.EstimatedTotalHits != 0 { - const prefix string = ",\"estimatedTotalHits\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.EstimatedTotalHits)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v SimilarDocumentResult) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v SimilarDocumentResult) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *SimilarDocumentResult) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *SimilarDocumentResult) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(in *jlexer.Lexer, out *SimilarDocumentQuery) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "id": - if m, ok := out.Id.(easyjson.Unmarshaler); ok { - m.UnmarshalEasyJSON(in) - } else if m, ok := out.Id.(json.Unmarshaler); ok { - _ = m.UnmarshalJSON(in.Raw()) - } else { - out.Id = in.Interface() - } - case "embedder": - out.Embedder = string(in.String()) - case "attributesToRetrieve": - if in.IsNull() { - in.Skip() - out.AttributesToRetrieve = nil - } else { - in.Delim('[') - if out.AttributesToRetrieve == nil { - if !in.IsDelim(']') { - out.AttributesToRetrieve = make([]string, 0, 4) - } else { - out.AttributesToRetrieve = []string{} - } - } else { - out.AttributesToRetrieve = (out.AttributesToRetrieve)[:0] - } - for !in.IsDelim(']') { - var v35 string - v35 = string(in.String()) - out.AttributesToRetrieve = append(out.AttributesToRetrieve, v35) - in.WantComma() - } - in.Delim(']') - } - case "offset": - out.Offset = int64(in.Int64()) - case "limit": - out.Limit = int64(in.Int64()) - case "filter": - out.Filter = string(in.String()) - case "showRankingScore": - out.ShowRankingScore = bool(in.Bool()) - case "showRankingScoreDetails": - out.ShowRankingScoreDetails = bool(in.Bool()) - case "rankingScoreThreshold": - out.RankingScoreThreshold = float64(in.Float64()) - case "retrieveVectors": - out.RetrieveVectors = bool(in.Bool()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(out *jwriter.Writer, in SimilarDocumentQuery) { - out.RawByte('{') - first := true - _ = first - if in.Id != nil { - const prefix string = ",\"id\":" - first = false - out.RawString(prefix[1:]) - if m, ok := in.Id.(easyjson.Marshaler); ok { - m.MarshalEasyJSON(out) - } else if m, ok := in.Id.(json.Marshaler); ok { - out.Raw(m.MarshalJSON()) - } else { - out.Raw(json.Marshal(in.Id)) - } - } - if in.Embedder != "" { - const prefix string = ",\"embedder\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.Embedder)) - } - if len(in.AttributesToRetrieve) != 0 { - const prefix string = ",\"attributesToRetrieve\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v36, v37 := range in.AttributesToRetrieve { - if v36 > 0 { - out.RawByte(',') - } - out.String(string(v37)) - } - out.RawByte(']') - } - } - if in.Offset != 0 { - const prefix string = ",\"offset\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.Offset)) - } - if in.Limit != 0 { - const prefix string = ",\"limit\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.Limit)) - } - if in.Filter != "" { - const prefix string = ",\"filter\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.Filter)) - } - if in.ShowRankingScore { - const prefix string = ",\"showRankingScore\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Bool(bool(in.ShowRankingScore)) - } - if in.ShowRankingScoreDetails { - const prefix string = ",\"showRankingScoreDetails\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Bool(bool(in.ShowRankingScoreDetails)) - } - if in.RankingScoreThreshold != 0 { - const prefix string = ",\"rankingScoreThreshold\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Float64(float64(in.RankingScoreThreshold)) - } - if in.RetrieveVectors { - const prefix string = ",\"retrieveVectors\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Bool(bool(in.RetrieveVectors)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v SimilarDocumentQuery) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v SimilarDocumentQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *SimilarDocumentQuery) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *SimilarDocumentQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(in *jlexer.Lexer, out *Settings) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "rankingRules": - if in.IsNull() { - in.Skip() - out.RankingRules = nil - } else { - in.Delim('[') - if out.RankingRules == nil { - if !in.IsDelim(']') { - out.RankingRules = make([]string, 0, 4) - } else { - out.RankingRules = []string{} - } - } else { - out.RankingRules = (out.RankingRules)[:0] - } - for !in.IsDelim(']') { - var v38 string - v38 = string(in.String()) - out.RankingRules = append(out.RankingRules, v38) - in.WantComma() - } - in.Delim(']') - } - case "distinctAttribute": - if in.IsNull() { - in.Skip() - out.DistinctAttribute = nil - } else { - if out.DistinctAttribute == nil { - out.DistinctAttribute = new(string) - } - *out.DistinctAttribute = string(in.String()) - } - case "searchableAttributes": - if in.IsNull() { - in.Skip() - out.SearchableAttributes = nil - } else { - in.Delim('[') - if out.SearchableAttributes == nil { - if !in.IsDelim(']') { - out.SearchableAttributes = make([]string, 0, 4) - } else { - out.SearchableAttributes = []string{} - } - } else { - out.SearchableAttributes = (out.SearchableAttributes)[:0] - } - for !in.IsDelim(']') { - var v39 string - v39 = string(in.String()) - out.SearchableAttributes = append(out.SearchableAttributes, v39) - in.WantComma() - } - in.Delim(']') - } - case "searchCutoffMs": - out.SearchCutoffMs = int64(in.Int64()) - case "displayedAttributes": - if in.IsNull() { - in.Skip() - out.DisplayedAttributes = nil - } else { - in.Delim('[') - if out.DisplayedAttributes == nil { - if !in.IsDelim(']') { - out.DisplayedAttributes = make([]string, 0, 4) - } else { - out.DisplayedAttributes = []string{} - } - } else { - out.DisplayedAttributes = (out.DisplayedAttributes)[:0] - } - for !in.IsDelim(']') { - var v40 string - v40 = string(in.String()) - out.DisplayedAttributes = append(out.DisplayedAttributes, v40) - in.WantComma() - } - in.Delim(']') - } - case "stopWords": - if in.IsNull() { - in.Skip() - out.StopWords = nil - } else { - in.Delim('[') - if out.StopWords == nil { - if !in.IsDelim(']') { - out.StopWords = make([]string, 0, 4) - } else { - out.StopWords = []string{} - } - } else { - out.StopWords = (out.StopWords)[:0] - } - for !in.IsDelim(']') { - var v41 string - v41 = string(in.String()) - out.StopWords = append(out.StopWords, v41) - in.WantComma() - } - in.Delim(']') - } - case "synonyms": - if in.IsNull() { - in.Skip() - } else { - in.Delim('{') - if !in.IsDelim('}') { - out.Synonyms = make(map[string][]string) - } else { - out.Synonyms = nil - } - for !in.IsDelim('}') { - key := string(in.String()) - in.WantColon() - var v42 []string - if in.IsNull() { - in.Skip() - v42 = nil - } else { - in.Delim('[') - if v42 == nil { - if !in.IsDelim(']') { - v42 = make([]string, 0, 4) - } else { - v42 = []string{} - } - } else { - v42 = (v42)[:0] - } - for !in.IsDelim(']') { - var v43 string - v43 = string(in.String()) - v42 = append(v42, v43) - in.WantComma() - } - in.Delim(']') - } - (out.Synonyms)[key] = v42 - in.WantComma() - } - in.Delim('}') - } - case "filterableAttributes": - if in.IsNull() { - in.Skip() - out.FilterableAttributes = nil - } else { - in.Delim('[') - if out.FilterableAttributes == nil { - if !in.IsDelim(']') { - out.FilterableAttributes = make([]string, 0, 4) - } else { - out.FilterableAttributes = []string{} - } - } else { - out.FilterableAttributes = (out.FilterableAttributes)[:0] - } - for !in.IsDelim(']') { - var v44 string - v44 = string(in.String()) - out.FilterableAttributes = append(out.FilterableAttributes, v44) - in.WantComma() - } - in.Delim(']') - } - case "sortableAttributes": - if in.IsNull() { - in.Skip() - out.SortableAttributes = nil - } else { - in.Delim('[') - if out.SortableAttributes == nil { - if !in.IsDelim(']') { - out.SortableAttributes = make([]string, 0, 4) - } else { - out.SortableAttributes = []string{} - } - } else { - out.SortableAttributes = (out.SortableAttributes)[:0] - } - for !in.IsDelim(']') { - var v45 string - v45 = string(in.String()) - out.SortableAttributes = append(out.SortableAttributes, v45) - in.WantComma() - } - in.Delim(']') - } - case "typoTolerance": - if in.IsNull() { - in.Skip() - out.TypoTolerance = nil - } else { - if out.TypoTolerance == nil { - out.TypoTolerance = new(TypoTolerance) - } - (*out.TypoTolerance).UnmarshalEasyJSON(in) - } - case "pagination": - if in.IsNull() { - in.Skip() - out.Pagination = nil - } else { - if out.Pagination == nil { - out.Pagination = new(Pagination) - } - (*out.Pagination).UnmarshalEasyJSON(in) - } - case "faceting": - if in.IsNull() { - in.Skip() - out.Faceting = nil - } else { - if out.Faceting == nil { - out.Faceting = new(Faceting) - } - (*out.Faceting).UnmarshalEasyJSON(in) - } - case "embedders": - if in.IsNull() { - in.Skip() - } else { - in.Delim('{') - if !in.IsDelim('}') { - out.Embedders = make(map[string]Embedder) - } else { - out.Embedders = nil - } - for !in.IsDelim('}') { - key := string(in.String()) - in.WantColon() - var v46 Embedder - (v46).UnmarshalEasyJSON(in) - (out.Embedders)[key] = v46 - in.WantComma() - } - in.Delim('}') - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(out *jwriter.Writer, in Settings) { - out.RawByte('{') - first := true - _ = first - if len(in.RankingRules) != 0 { - const prefix string = ",\"rankingRules\":" - first = false - out.RawString(prefix[1:]) - { - out.RawByte('[') - for v47, v48 := range in.RankingRules { - if v47 > 0 { - out.RawByte(',') - } - out.String(string(v48)) - } - out.RawByte(']') - } - } - if in.DistinctAttribute != nil { - const prefix string = ",\"distinctAttribute\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(*in.DistinctAttribute)) - } - if len(in.SearchableAttributes) != 0 { - const prefix string = ",\"searchableAttributes\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v49, v50 := range in.SearchableAttributes { - if v49 > 0 { - out.RawByte(',') - } - out.String(string(v50)) - } - out.RawByte(']') - } - } - if in.SearchCutoffMs != 0 { - const prefix string = ",\"searchCutoffMs\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.SearchCutoffMs)) - } - if len(in.DisplayedAttributes) != 0 { - const prefix string = ",\"displayedAttributes\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v51, v52 := range in.DisplayedAttributes { - if v51 > 0 { - out.RawByte(',') - } - out.String(string(v52)) - } - out.RawByte(']') - } - } - if len(in.StopWords) != 0 { - const prefix string = ",\"stopWords\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v53, v54 := range in.StopWords { - if v53 > 0 { - out.RawByte(',') - } - out.String(string(v54)) - } - out.RawByte(']') - } - } - if len(in.Synonyms) != 0 { - const prefix string = ",\"synonyms\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('{') - v55First := true - for v55Name, v55Value := range in.Synonyms { - if v55First { - v55First = false - } else { - out.RawByte(',') - } - out.String(string(v55Name)) - out.RawByte(':') - if v55Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v56, v57 := range v55Value { - if v56 > 0 { - out.RawByte(',') - } - out.String(string(v57)) - } - out.RawByte(']') - } - } - out.RawByte('}') - } - } - if len(in.FilterableAttributes) != 0 { - const prefix string = ",\"filterableAttributes\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v58, v59 := range in.FilterableAttributes { - if v58 > 0 { - out.RawByte(',') - } - out.String(string(v59)) - } - out.RawByte(']') - } - } - if len(in.SortableAttributes) != 0 { - const prefix string = ",\"sortableAttributes\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v60, v61 := range in.SortableAttributes { - if v60 > 0 { - out.RawByte(',') - } - out.String(string(v61)) - } - out.RawByte(']') - } - } - if in.TypoTolerance != nil { - const prefix string = ",\"typoTolerance\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - (*in.TypoTolerance).MarshalEasyJSON(out) - } - if in.Pagination != nil { - const prefix string = ",\"pagination\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - (*in.Pagination).MarshalEasyJSON(out) - } - if in.Faceting != nil { - const prefix string = ",\"faceting\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - (*in.Faceting).MarshalEasyJSON(out) - } - if len(in.Embedders) != 0 { - const prefix string = ",\"embedders\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('{') - v62First := true - for v62Name, v62Value := range in.Embedders { - if v62First { - v62First = false - } else { - out.RawByte(',') - } - out.String(string(v62Name)) - out.RawByte(':') - (v62Value).MarshalEasyJSON(out) - } - out.RawByte('}') - } - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v Settings) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v Settings) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *Settings) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *Settings) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out *SearchResponse) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "hits": - if in.IsNull() { - in.Skip() - out.Hits = nil - } else { - in.Delim('[') - if out.Hits == nil { - if !in.IsDelim(']') { - out.Hits = make([]interface{}, 0, 4) - } else { - out.Hits = []interface{}{} - } - } else { - out.Hits = (out.Hits)[:0] - } - for !in.IsDelim(']') { - var v63 interface{} - if m, ok := v63.(easyjson.Unmarshaler); ok { - m.UnmarshalEasyJSON(in) - } else if m, ok := v63.(json.Unmarshaler); ok { - _ = m.UnmarshalJSON(in.Raw()) - } else { - v63 = in.Interface() - } - out.Hits = append(out.Hits, v63) - in.WantComma() - } - in.Delim(']') - } - case "estimatedTotalHits": - out.EstimatedTotalHits = int64(in.Int64()) - case "offset": - out.Offset = int64(in.Int64()) - case "limit": - out.Limit = int64(in.Int64()) - case "processingTimeMs": - out.ProcessingTimeMs = int64(in.Int64()) - case "query": - out.Query = string(in.String()) - case "facetDistribution": - if m, ok := out.FacetDistribution.(easyjson.Unmarshaler); ok { - m.UnmarshalEasyJSON(in) - } else if m, ok := out.FacetDistribution.(json.Unmarshaler); ok { - _ = m.UnmarshalJSON(in.Raw()) - } else { - out.FacetDistribution = in.Interface() - } - case "totalHits": - out.TotalHits = int64(in.Int64()) - case "hitsPerPage": - out.HitsPerPage = int64(in.Int64()) - case "page": - out.Page = int64(in.Int64()) - case "totalPages": - out.TotalPages = int64(in.Int64()) - case "facetStats": - if m, ok := out.FacetStats.(easyjson.Unmarshaler); ok { - m.UnmarshalEasyJSON(in) - } else if m, ok := out.FacetStats.(json.Unmarshaler); ok { - _ = m.UnmarshalJSON(in.Raw()) - } else { - out.FacetStats = in.Interface() - } - case "indexUid": - out.IndexUID = string(in.String()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writer, in SearchResponse) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"hits\":" - out.RawString(prefix[1:]) - if in.Hits == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v64, v65 := range in.Hits { - if v64 > 0 { - out.RawByte(',') - } - if m, ok := v65.(easyjson.Marshaler); ok { - m.MarshalEasyJSON(out) - } else if m, ok := v65.(json.Marshaler); ok { - out.Raw(m.MarshalJSON()) - } else { - out.Raw(json.Marshal(v65)) - } - } - out.RawByte(']') - } - } - if in.EstimatedTotalHits != 0 { - const prefix string = ",\"estimatedTotalHits\":" - out.RawString(prefix) - out.Int64(int64(in.EstimatedTotalHits)) - } - if in.Offset != 0 { - const prefix string = ",\"offset\":" - out.RawString(prefix) - out.Int64(int64(in.Offset)) - } - if in.Limit != 0 { - const prefix string = ",\"limit\":" - out.RawString(prefix) - out.Int64(int64(in.Limit)) - } - { - const prefix string = ",\"processingTimeMs\":" - out.RawString(prefix) - out.Int64(int64(in.ProcessingTimeMs)) - } - { - const prefix string = ",\"query\":" - out.RawString(prefix) - out.String(string(in.Query)) - } - if in.FacetDistribution != nil { - const prefix string = ",\"facetDistribution\":" - out.RawString(prefix) - if m, ok := in.FacetDistribution.(easyjson.Marshaler); ok { - m.MarshalEasyJSON(out) - } else if m, ok := in.FacetDistribution.(json.Marshaler); ok { - out.Raw(m.MarshalJSON()) - } else { - out.Raw(json.Marshal(in.FacetDistribution)) - } - } - if in.TotalHits != 0 { - const prefix string = ",\"totalHits\":" - out.RawString(prefix) - out.Int64(int64(in.TotalHits)) - } - if in.HitsPerPage != 0 { - const prefix string = ",\"hitsPerPage\":" - out.RawString(prefix) - out.Int64(int64(in.HitsPerPage)) - } - if in.Page != 0 { - const prefix string = ",\"page\":" - out.RawString(prefix) - out.Int64(int64(in.Page)) - } - if in.TotalPages != 0 { - const prefix string = ",\"totalPages\":" - out.RawString(prefix) - out.Int64(int64(in.TotalPages)) - } - if in.FacetStats != nil { - const prefix string = ",\"facetStats\":" - out.RawString(prefix) - if m, ok := in.FacetStats.(easyjson.Marshaler); ok { - m.MarshalEasyJSON(out) - } else if m, ok := in.FacetStats.(json.Marshaler); ok { - out.Raw(m.MarshalJSON()) - } else { - out.Raw(json.Marshal(in.FacetStats)) - } - } - if in.IndexUID != "" { - const prefix string = ",\"indexUid\":" - out.RawString(prefix) - out.String(string(in.IndexUID)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v SearchResponse) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v SearchResponse) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *SearchResponse) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *SearchResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(in *jlexer.Lexer, out *SearchRequestHybrid) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "semanticRatio": - out.SemanticRatio = float64(in.Float64()) - case "embedder": - out.Embedder = string(in.String()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(out *jwriter.Writer, in SearchRequestHybrid) { - out.RawByte('{') - first := true - _ = first - if in.SemanticRatio != 0 { - const prefix string = ",\"semanticRatio\":" - first = false - out.RawString(prefix[1:]) - out.Float64(float64(in.SemanticRatio)) - } - if in.Embedder != "" { - const prefix string = ",\"embedder\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.Embedder)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v SearchRequestHybrid) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v SearchRequestHybrid) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *SearchRequestHybrid) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *SearchRequestHybrid) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(in *jlexer.Lexer, out *SearchRequest) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "offset": - out.Offset = int64(in.Int64()) - case "limit": - out.Limit = int64(in.Int64()) - case "attributesToRetrieve": - if in.IsNull() { - in.Skip() - out.AttributesToRetrieve = nil - } else { - in.Delim('[') - if out.AttributesToRetrieve == nil { - if !in.IsDelim(']') { - out.AttributesToRetrieve = make([]string, 0, 4) - } else { - out.AttributesToRetrieve = []string{} - } - } else { - out.AttributesToRetrieve = (out.AttributesToRetrieve)[:0] - } - for !in.IsDelim(']') { - var v66 string - v66 = string(in.String()) - out.AttributesToRetrieve = append(out.AttributesToRetrieve, v66) - in.WantComma() - } - in.Delim(']') - } - case "attributesToSearchOn": - if in.IsNull() { - in.Skip() - out.AttributesToSearchOn = nil - } else { - in.Delim('[') - if out.AttributesToSearchOn == nil { - if !in.IsDelim(']') { - out.AttributesToSearchOn = make([]string, 0, 4) - } else { - out.AttributesToSearchOn = []string{} - } - } else { - out.AttributesToSearchOn = (out.AttributesToSearchOn)[:0] - } - for !in.IsDelim(']') { - var v67 string - v67 = string(in.String()) - out.AttributesToSearchOn = append(out.AttributesToSearchOn, v67) - in.WantComma() - } - in.Delim(']') - } - case "attributesToCrop": - if in.IsNull() { - in.Skip() - out.AttributesToCrop = nil - } else { - in.Delim('[') - if out.AttributesToCrop == nil { - if !in.IsDelim(']') { - out.AttributesToCrop = make([]string, 0, 4) - } else { - out.AttributesToCrop = []string{} - } - } else { - out.AttributesToCrop = (out.AttributesToCrop)[:0] - } - for !in.IsDelim(']') { - var v68 string - v68 = string(in.String()) - out.AttributesToCrop = append(out.AttributesToCrop, v68) - in.WantComma() - } - in.Delim(']') - } - case "cropLength": - out.CropLength = int64(in.Int64()) - case "cropMarker": - out.CropMarker = string(in.String()) - case "attributesToHighlight": - if in.IsNull() { - in.Skip() - out.AttributesToHighlight = nil - } else { - in.Delim('[') - if out.AttributesToHighlight == nil { - if !in.IsDelim(']') { - out.AttributesToHighlight = make([]string, 0, 4) - } else { - out.AttributesToHighlight = []string{} - } - } else { - out.AttributesToHighlight = (out.AttributesToHighlight)[:0] - } - for !in.IsDelim(']') { - var v69 string - v69 = string(in.String()) - out.AttributesToHighlight = append(out.AttributesToHighlight, v69) - in.WantComma() - } - in.Delim(']') - } - case "highlightPreTag": - out.HighlightPreTag = string(in.String()) - case "highlightPostTag": - out.HighlightPostTag = string(in.String()) - case "matchingStrategy": - out.MatchingStrategy = string(in.String()) - case "filter": - if m, ok := out.Filter.(easyjson.Unmarshaler); ok { - m.UnmarshalEasyJSON(in) - } else if m, ok := out.Filter.(json.Unmarshaler); ok { - _ = m.UnmarshalJSON(in.Raw()) - } else { - out.Filter = in.Interface() - } - case "showMatchesPosition": - out.ShowMatchesPosition = bool(in.Bool()) - case "showRankingScore": - out.ShowRankingScore = bool(in.Bool()) - case "showRankingScoreDetails": - out.ShowRankingScoreDetails = bool(in.Bool()) - case "facets": - if in.IsNull() { - in.Skip() - out.Facets = nil - } else { - in.Delim('[') - if out.Facets == nil { - if !in.IsDelim(']') { - out.Facets = make([]string, 0, 4) - } else { - out.Facets = []string{} - } - } else { - out.Facets = (out.Facets)[:0] - } - for !in.IsDelim(']') { - var v70 string - v70 = string(in.String()) - out.Facets = append(out.Facets, v70) - in.WantComma() - } - in.Delim(']') - } - case "sort": - if in.IsNull() { - in.Skip() - out.Sort = nil - } else { - in.Delim('[') - if out.Sort == nil { - if !in.IsDelim(']') { - out.Sort = make([]string, 0, 4) - } else { - out.Sort = []string{} - } - } else { - out.Sort = (out.Sort)[:0] - } - for !in.IsDelim(']') { - var v71 string - v71 = string(in.String()) - out.Sort = append(out.Sort, v71) - in.WantComma() - } - in.Delim(']') - } - case "vector": - if in.IsNull() { - in.Skip() - out.Vector = nil - } else { - in.Delim('[') - if out.Vector == nil { - if !in.IsDelim(']') { - out.Vector = make([]float32, 0, 16) - } else { - out.Vector = []float32{} - } - } else { - out.Vector = (out.Vector)[:0] - } - for !in.IsDelim(']') { - var v72 float32 - v72 = float32(in.Float32()) - out.Vector = append(out.Vector, v72) - in.WantComma() - } - in.Delim(']') - } - case "hitsPerPage": - out.HitsPerPage = int64(in.Int64()) - case "page": - out.Page = int64(in.Int64()) - case "indexUid": - out.IndexUID = string(in.String()) - case "q": - out.Query = string(in.String()) - case "distinct": - out.Distinct = string(in.String()) - case "hybrid": - if in.IsNull() { - in.Skip() - out.Hybrid = nil - } else { - if out.Hybrid == nil { - out.Hybrid = new(SearchRequestHybrid) - } - (*out.Hybrid).UnmarshalEasyJSON(in) - } - case "retrieveVectors": - out.RetrieveVectors = bool(in.Bool()) - case "rankingScoreThreshold": - out.RankingScoreThreshold = float64(in.Float64()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(out *jwriter.Writer, in SearchRequest) { - out.RawByte('{') - first := true - _ = first - if in.Offset != 0 { - const prefix string = ",\"offset\":" - first = false - out.RawString(prefix[1:]) - out.Int64(int64(in.Offset)) - } - if in.Limit != 0 { - const prefix string = ",\"limit\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.Limit)) - } - if len(in.AttributesToRetrieve) != 0 { - const prefix string = ",\"attributesToRetrieve\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v73, v74 := range in.AttributesToRetrieve { - if v73 > 0 { - out.RawByte(',') - } - out.String(string(v74)) - } - out.RawByte(']') - } - } - if len(in.AttributesToSearchOn) != 0 { - const prefix string = ",\"attributesToSearchOn\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v75, v76 := range in.AttributesToSearchOn { - if v75 > 0 { - out.RawByte(',') - } - out.String(string(v76)) - } - out.RawByte(']') - } - } - if len(in.AttributesToCrop) != 0 { - const prefix string = ",\"attributesToCrop\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v77, v78 := range in.AttributesToCrop { - if v77 > 0 { - out.RawByte(',') - } - out.String(string(v78)) - } - out.RawByte(']') - } - } - if in.CropLength != 0 { - const prefix string = ",\"cropLength\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.CropLength)) - } - if in.CropMarker != "" { - const prefix string = ",\"cropMarker\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.CropMarker)) - } - if len(in.AttributesToHighlight) != 0 { - const prefix string = ",\"attributesToHighlight\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v79, v80 := range in.AttributesToHighlight { - if v79 > 0 { - out.RawByte(',') - } - out.String(string(v80)) - } - out.RawByte(']') - } - } - if in.HighlightPreTag != "" { - const prefix string = ",\"highlightPreTag\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.HighlightPreTag)) - } - if in.HighlightPostTag != "" { - const prefix string = ",\"highlightPostTag\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.HighlightPostTag)) - } - if in.MatchingStrategy != "" { - const prefix string = ",\"matchingStrategy\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.MatchingStrategy)) - } - if in.Filter != nil { - const prefix string = ",\"filter\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - if m, ok := in.Filter.(easyjson.Marshaler); ok { - m.MarshalEasyJSON(out) - } else if m, ok := in.Filter.(json.Marshaler); ok { - out.Raw(m.MarshalJSON()) - } else { - out.Raw(json.Marshal(in.Filter)) - } - } - if in.ShowMatchesPosition { - const prefix string = ",\"showMatchesPosition\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Bool(bool(in.ShowMatchesPosition)) - } - if in.ShowRankingScore { - const prefix string = ",\"showRankingScore\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Bool(bool(in.ShowRankingScore)) - } - if in.ShowRankingScoreDetails { - const prefix string = ",\"showRankingScoreDetails\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Bool(bool(in.ShowRankingScoreDetails)) - } - if len(in.Facets) != 0 { - const prefix string = ",\"facets\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v81, v82 := range in.Facets { - if v81 > 0 { - out.RawByte(',') - } - out.String(string(v82)) - } - out.RawByte(']') - } - } - if len(in.Sort) != 0 { - const prefix string = ",\"sort\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v83, v84 := range in.Sort { - if v83 > 0 { - out.RawByte(',') - } - out.String(string(v84)) - } - out.RawByte(']') - } - } - if len(in.Vector) != 0 { - const prefix string = ",\"vector\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v85, v86 := range in.Vector { - if v85 > 0 { - out.RawByte(',') - } - out.Float32(float32(v86)) - } - out.RawByte(']') - } - } - if in.HitsPerPage != 0 { - const prefix string = ",\"hitsPerPage\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.HitsPerPage)) - } - if in.Page != 0 { - const prefix string = ",\"page\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.Page)) - } - if in.IndexUID != "" { - const prefix string = ",\"indexUid\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.IndexUID)) - } - { - const prefix string = ",\"q\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.Query)) - } - if in.Distinct != "" { - const prefix string = ",\"distinct\":" - out.RawString(prefix) - out.String(string(in.Distinct)) - } - if in.Hybrid != nil { - const prefix string = ",\"hybrid\":" - out.RawString(prefix) - (*in.Hybrid).MarshalEasyJSON(out) - } - if in.RetrieveVectors { - const prefix string = ",\"retrieveVectors\":" - out.RawString(prefix) - out.Bool(bool(in.RetrieveVectors)) - } - if in.RankingScoreThreshold != 0 { - const prefix string = ",\"rankingScoreThreshold\":" - out.RawString(prefix) - out.Float64(float64(in.RankingScoreThreshold)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v SearchRequest) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v SearchRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *SearchRequest) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *SearchRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(in *jlexer.Lexer, out *Pagination) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "maxTotalHits": - out.MaxTotalHits = int64(in.Int64()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(out *jwriter.Writer, in Pagination) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"maxTotalHits\":" - out.RawString(prefix[1:]) - out.Int64(int64(in.MaxTotalHits)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v Pagination) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v Pagination) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *Pagination) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *Pagination) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(in *jlexer.Lexer, out *MultiSearchResponse) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "results": - if in.IsNull() { - in.Skip() - out.Results = nil - } else { - in.Delim('[') - if out.Results == nil { - if !in.IsDelim(']') { - out.Results = make([]SearchResponse, 0, 0) - } else { - out.Results = []SearchResponse{} - } - } else { - out.Results = (out.Results)[:0] - } - for !in.IsDelim(']') { - var v87 SearchResponse - (v87).UnmarshalEasyJSON(in) - out.Results = append(out.Results, v87) - in.WantComma() - } - in.Delim(']') - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(out *jwriter.Writer, in MultiSearchResponse) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"results\":" - out.RawString(prefix[1:]) - if in.Results == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v88, v89 := range in.Results { - if v88 > 0 { - out.RawByte(',') - } - (v89).MarshalEasyJSON(out) - } - out.RawByte(']') - } - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v MultiSearchResponse) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v MultiSearchResponse) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *MultiSearchResponse) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *MultiSearchResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(in *jlexer.Lexer, out *MultiSearchRequest) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "queries": - if in.IsNull() { - in.Skip() - out.Queries = nil - } else { - in.Delim('[') - if out.Queries == nil { - if !in.IsDelim(']') { - out.Queries = make([]*SearchRequest, 0, 8) - } else { - out.Queries = []*SearchRequest{} - } - } else { - out.Queries = (out.Queries)[:0] - } - for !in.IsDelim(']') { - var v90 *SearchRequest - if in.IsNull() { - in.Skip() - v90 = nil - } else { - if v90 == nil { - v90 = new(SearchRequest) - } - (*v90).UnmarshalEasyJSON(in) - } - out.Queries = append(out.Queries, v90) - in.WantComma() - } - in.Delim(']') - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(out *jwriter.Writer, in MultiSearchRequest) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"queries\":" - out.RawString(prefix[1:]) - if in.Queries == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v91, v92 := range in.Queries { - if v91 > 0 { - out.RawByte(',') - } - if v92 == nil { - out.RawString("null") - } else { - (*v92).MarshalEasyJSON(out) - } - } - out.RawByte(']') - } - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v MultiSearchRequest) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v MultiSearchRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *MultiSearchRequest) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *MultiSearchRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(in *jlexer.Lexer, out *MinWordSizeForTypos) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "oneTypo": - out.OneTypo = int64(in.Int64()) - case "twoTypos": - out.TwoTypos = int64(in.Int64()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(out *jwriter.Writer, in MinWordSizeForTypos) { - out.RawByte('{') - first := true - _ = first - if in.OneTypo != 0 { - const prefix string = ",\"oneTypo\":" - first = false - out.RawString(prefix[1:]) - out.Int64(int64(in.OneTypo)) - } - if in.TwoTypos != 0 { - const prefix string = ",\"twoTypos\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.TwoTypos)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v MinWordSizeForTypos) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v MinWordSizeForTypos) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *MinWordSizeForTypos) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *MinWordSizeForTypos) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(in *jlexer.Lexer, out *KeysResults) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "results": - if in.IsNull() { - in.Skip() - out.Results = nil - } else { - in.Delim('[') - if out.Results == nil { - if !in.IsDelim(']') { - out.Results = make([]Key, 0, 0) - } else { - out.Results = []Key{} - } - } else { - out.Results = (out.Results)[:0] - } - for !in.IsDelim(']') { - var v93 Key - (v93).UnmarshalEasyJSON(in) - out.Results = append(out.Results, v93) - in.WantComma() - } - in.Delim(']') - } - case "offset": - out.Offset = int64(in.Int64()) - case "limit": - out.Limit = int64(in.Int64()) - case "total": - out.Total = int64(in.Int64()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(out *jwriter.Writer, in KeysResults) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"results\":" - out.RawString(prefix[1:]) - if in.Results == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v94, v95 := range in.Results { - if v94 > 0 { - out.RawByte(',') - } - (v95).MarshalEasyJSON(out) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"offset\":" - out.RawString(prefix) - out.Int64(int64(in.Offset)) - } - { - const prefix string = ",\"limit\":" - out.RawString(prefix) - out.Int64(int64(in.Limit)) - } - { - const prefix string = ",\"total\":" - out.RawString(prefix) - out.Int64(int64(in.Total)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v KeysResults) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v KeysResults) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *KeysResults) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *KeysResults) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(in *jlexer.Lexer, out *KeysQuery) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "Limit": - out.Limit = int64(in.Int64()) - case "Offset": - out.Offset = int64(in.Int64()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(out *jwriter.Writer, in KeysQuery) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"Limit\":" - out.RawString(prefix[1:]) - out.Int64(int64(in.Limit)) - } - { - const prefix string = ",\"Offset\":" - out.RawString(prefix) - out.Int64(int64(in.Offset)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v KeysQuery) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v KeysQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *KeysQuery) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *KeysQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(in *jlexer.Lexer, out *KeyUpdate) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "name": - out.Name = string(in.String()) - case "description": - out.Description = string(in.String()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(out *jwriter.Writer, in KeyUpdate) { - out.RawByte('{') - first := true - _ = first - if in.Name != "" { - const prefix string = ",\"name\":" - first = false - out.RawString(prefix[1:]) - out.String(string(in.Name)) - } - if in.Description != "" { - const prefix string = ",\"description\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.Description)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v KeyUpdate) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v KeyUpdate) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *KeyUpdate) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *KeyUpdate) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(in *jlexer.Lexer, out *KeyParsed) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "name": - out.Name = string(in.String()) - case "description": - out.Description = string(in.String()) - case "uid": - out.UID = string(in.String()) - case "actions": - if in.IsNull() { - in.Skip() - out.Actions = nil - } else { - in.Delim('[') - if out.Actions == nil { - if !in.IsDelim(']') { - out.Actions = make([]string, 0, 4) - } else { - out.Actions = []string{} - } - } else { - out.Actions = (out.Actions)[:0] - } - for !in.IsDelim(']') { - var v96 string - v96 = string(in.String()) - out.Actions = append(out.Actions, v96) - in.WantComma() - } - in.Delim(']') - } - case "indexes": - if in.IsNull() { - in.Skip() - out.Indexes = nil - } else { - in.Delim('[') - if out.Indexes == nil { - if !in.IsDelim(']') { - out.Indexes = make([]string, 0, 4) - } else { - out.Indexes = []string{} - } - } else { - out.Indexes = (out.Indexes)[:0] - } - for !in.IsDelim(']') { - var v97 string - v97 = string(in.String()) - out.Indexes = append(out.Indexes, v97) - in.WantComma() - } - in.Delim(']') - } - case "expiresAt": - if in.IsNull() { - in.Skip() - out.ExpiresAt = nil - } else { - if out.ExpiresAt == nil { - out.ExpiresAt = new(string) - } - *out.ExpiresAt = string(in.String()) - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(out *jwriter.Writer, in KeyParsed) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"name\":" - out.RawString(prefix[1:]) - out.String(string(in.Name)) - } - { - const prefix string = ",\"description\":" - out.RawString(prefix) - out.String(string(in.Description)) - } - if in.UID != "" { - const prefix string = ",\"uid\":" - out.RawString(prefix) - out.String(string(in.UID)) - } - if len(in.Actions) != 0 { - const prefix string = ",\"actions\":" - out.RawString(prefix) - { - out.RawByte('[') - for v98, v99 := range in.Actions { - if v98 > 0 { - out.RawByte(',') - } - out.String(string(v99)) - } - out.RawByte(']') - } - } - if len(in.Indexes) != 0 { - const prefix string = ",\"indexes\":" - out.RawString(prefix) - { - out.RawByte('[') - for v100, v101 := range in.Indexes { - if v100 > 0 { - out.RawByte(',') - } - out.String(string(v101)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"expiresAt\":" - out.RawString(prefix) - if in.ExpiresAt == nil { - out.RawString("null") - } else { - out.String(string(*in.ExpiresAt)) - } - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v KeyParsed) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v KeyParsed) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *KeyParsed) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *KeyParsed) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(in *jlexer.Lexer, out *Key) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "name": - out.Name = string(in.String()) - case "description": - out.Description = string(in.String()) - case "key": - out.Key = string(in.String()) - case "uid": - out.UID = string(in.String()) - case "actions": - if in.IsNull() { - in.Skip() - out.Actions = nil - } else { - in.Delim('[') - if out.Actions == nil { - if !in.IsDelim(']') { - out.Actions = make([]string, 0, 4) - } else { - out.Actions = []string{} - } - } else { - out.Actions = (out.Actions)[:0] - } - for !in.IsDelim(']') { - var v102 string - v102 = string(in.String()) - out.Actions = append(out.Actions, v102) - in.WantComma() - } - in.Delim(']') - } - case "indexes": - if in.IsNull() { - in.Skip() - out.Indexes = nil - } else { - in.Delim('[') - if out.Indexes == nil { - if !in.IsDelim(']') { - out.Indexes = make([]string, 0, 4) - } else { - out.Indexes = []string{} - } - } else { - out.Indexes = (out.Indexes)[:0] - } - for !in.IsDelim(']') { - var v103 string - v103 = string(in.String()) - out.Indexes = append(out.Indexes, v103) - in.WantComma() - } - in.Delim(']') - } - case "createdAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.CreatedAt).UnmarshalJSON(data)) - } - case "updatedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.UpdatedAt).UnmarshalJSON(data)) - } - case "expiresAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.ExpiresAt).UnmarshalJSON(data)) - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(out *jwriter.Writer, in Key) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"name\":" - out.RawString(prefix[1:]) - out.String(string(in.Name)) - } - { - const prefix string = ",\"description\":" - out.RawString(prefix) - out.String(string(in.Description)) - } - if in.Key != "" { - const prefix string = ",\"key\":" - out.RawString(prefix) - out.String(string(in.Key)) - } - if in.UID != "" { - const prefix string = ",\"uid\":" - out.RawString(prefix) - out.String(string(in.UID)) - } - if len(in.Actions) != 0 { - const prefix string = ",\"actions\":" - out.RawString(prefix) - { - out.RawByte('[') - for v104, v105 := range in.Actions { - if v104 > 0 { - out.RawByte(',') - } - out.String(string(v105)) - } - out.RawByte(']') - } - } - if len(in.Indexes) != 0 { - const prefix string = ",\"indexes\":" - out.RawString(prefix) - { - out.RawByte('[') - for v106, v107 := range in.Indexes { - if v106 > 0 { - out.RawByte(',') - } - out.String(string(v107)) - } - out.RawByte(']') - } - } - if true { - const prefix string = ",\"createdAt\":" - out.RawString(prefix) - out.Raw((in.CreatedAt).MarshalJSON()) - } - if true { - const prefix string = ",\"updatedAt\":" - out.RawString(prefix) - out.Raw((in.UpdatedAt).MarshalJSON()) - } - { - const prefix string = ",\"expiresAt\":" - out.RawString(prefix) - out.Raw((in.ExpiresAt).MarshalJSON()) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v Key) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v Key) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *Key) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *Key) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(in *jlexer.Lexer, out *IndexesResults) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "results": - if in.IsNull() { - in.Skip() - out.Results = nil - } else { - in.Delim('[') - if out.Results == nil { - if !in.IsDelim(']') { - out.Results = make([]Index, 0, 0) - } else { - out.Results = []Index{} - } - } else { - out.Results = (out.Results)[:0] - } - for !in.IsDelim(']') { - var v108 Index - (v108).UnmarshalEasyJSON(in) - out.Results = append(out.Results, v108) - in.WantComma() - } - in.Delim(']') - } - case "offset": - out.Offset = int64(in.Int64()) - case "limit": - out.Limit = int64(in.Int64()) - case "total": - out.Total = int64(in.Int64()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(out *jwriter.Writer, in IndexesResults) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"results\":" - out.RawString(prefix[1:]) - if in.Results == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v109, v110 := range in.Results { - if v109 > 0 { - out.RawByte(',') - } - (v110).MarshalEasyJSON(out) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"offset\":" - out.RawString(prefix) - out.Int64(int64(in.Offset)) - } - { - const prefix string = ",\"limit\":" - out.RawString(prefix) - out.Int64(int64(in.Limit)) - } - { - const prefix string = ",\"total\":" - out.RawString(prefix) - out.Int64(int64(in.Total)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v IndexesResults) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v IndexesResults) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *IndexesResults) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *IndexesResults) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(in *jlexer.Lexer, out *IndexesQuery) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "Limit": - out.Limit = int64(in.Int64()) - case "Offset": - out.Offset = int64(in.Int64()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(out *jwriter.Writer, in IndexesQuery) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"Limit\":" - out.RawString(prefix[1:]) - out.Int64(int64(in.Limit)) - } - { - const prefix string = ",\"Offset\":" - out.RawString(prefix) - out.Int64(int64(in.Offset)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v IndexesQuery) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v IndexesQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *IndexesQuery) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *IndexesQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(in *jlexer.Lexer, out *Index) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "uid": - out.UID = string(in.String()) - case "createdAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.CreatedAt).UnmarshalJSON(data)) - } - case "updatedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.UpdatedAt).UnmarshalJSON(data)) - } - case "primaryKey": - out.PrimaryKey = string(in.String()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(out *jwriter.Writer, in Index) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"uid\":" - out.RawString(prefix[1:]) - out.String(string(in.UID)) - } - { - const prefix string = ",\"createdAt\":" - out.RawString(prefix) - out.Raw((in.CreatedAt).MarshalJSON()) - } - { - const prefix string = ",\"updatedAt\":" - out.RawString(prefix) - out.Raw((in.UpdatedAt).MarshalJSON()) - } - if in.PrimaryKey != "" { - const prefix string = ",\"primaryKey\":" - out.RawString(prefix) - out.String(string(in.PrimaryKey)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v Index) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v Index) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *Index) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *Index) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(in *jlexer.Lexer, out *Health) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "status": - out.Status = string(in.String()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(out *jwriter.Writer, in Health) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"status\":" - out.RawString(prefix[1:]) - out.String(string(in.Status)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v Health) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v Health) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *Health) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *Health) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, out *Faceting) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "maxValuesPerFacet": - out.MaxValuesPerFacet = int64(in.Int64()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writer, in Faceting) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"maxValuesPerFacet\":" - out.RawString(prefix[1:]) - out.Int64(int64(in.MaxValuesPerFacet)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v Faceting) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v Faceting) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *Faceting) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *Faceting) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(in *jlexer.Lexer, out *FacetSearchResponse) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "facetHits": - if in.IsNull() { - in.Skip() - out.FacetHits = nil - } else { - in.Delim('[') - if out.FacetHits == nil { - if !in.IsDelim(']') { - out.FacetHits = make([]interface{}, 0, 4) - } else { - out.FacetHits = []interface{}{} - } - } else { - out.FacetHits = (out.FacetHits)[:0] - } - for !in.IsDelim(']') { - var v111 interface{} - if m, ok := v111.(easyjson.Unmarshaler); ok { - m.UnmarshalEasyJSON(in) - } else if m, ok := v111.(json.Unmarshaler); ok { - _ = m.UnmarshalJSON(in.Raw()) - } else { - v111 = in.Interface() - } - out.FacetHits = append(out.FacetHits, v111) - in.WantComma() - } - in.Delim(']') - } - case "facetQuery": - out.FacetQuery = string(in.String()) - case "processingTimeMs": - out.ProcessingTimeMs = int64(in.Int64()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(out *jwriter.Writer, in FacetSearchResponse) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"facetHits\":" - out.RawString(prefix[1:]) - if in.FacetHits == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v112, v113 := range in.FacetHits { - if v112 > 0 { - out.RawByte(',') - } - if m, ok := v113.(easyjson.Marshaler); ok { - m.MarshalEasyJSON(out) - } else if m, ok := v113.(json.Marshaler); ok { - out.Raw(m.MarshalJSON()) - } else { - out.Raw(json.Marshal(v113)) - } - } - out.RawByte(']') - } - } - { - const prefix string = ",\"facetQuery\":" - out.RawString(prefix) - out.String(string(in.FacetQuery)) - } - { - const prefix string = ",\"processingTimeMs\":" - out.RawString(prefix) - out.Int64(int64(in.ProcessingTimeMs)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v FacetSearchResponse) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v FacetSearchResponse) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *FacetSearchResponse) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *FacetSearchResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, out *FacetSearchRequest) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "facetName": - out.FacetName = string(in.String()) - case "facetQuery": - out.FacetQuery = string(in.String()) - case "q": - out.Q = string(in.String()) - case "filter": - out.Filter = string(in.String()) - case "matchingStrategy": - out.MatchingStrategy = string(in.String()) - case "attributesToSearchOn": - if in.IsNull() { - in.Skip() - out.AttributesToSearchOn = nil - } else { - in.Delim('[') - if out.AttributesToSearchOn == nil { - if !in.IsDelim(']') { - out.AttributesToSearchOn = make([]string, 0, 4) - } else { - out.AttributesToSearchOn = []string{} - } - } else { - out.AttributesToSearchOn = (out.AttributesToSearchOn)[:0] - } - for !in.IsDelim(']') { - var v114 string - v114 = string(in.String()) - out.AttributesToSearchOn = append(out.AttributesToSearchOn, v114) - in.WantComma() - } - in.Delim(']') - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writer, in FacetSearchRequest) { - out.RawByte('{') - first := true - _ = first - if in.FacetName != "" { - const prefix string = ",\"facetName\":" - first = false - out.RawString(prefix[1:]) - out.String(string(in.FacetName)) - } - if in.FacetQuery != "" { - const prefix string = ",\"facetQuery\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.FacetQuery)) - } - if in.Q != "" { - const prefix string = ",\"q\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.Q)) - } - if in.Filter != "" { - const prefix string = ",\"filter\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.Filter)) - } - if in.MatchingStrategy != "" { - const prefix string = ",\"matchingStrategy\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.MatchingStrategy)) - } - if len(in.AttributesToSearchOn) != 0 { - const prefix string = ",\"attributesToSearchOn\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v115, v116 := range in.AttributesToSearchOn { - if v115 > 0 { - out.RawByte(',') - } - out.String(string(v116)) - } - out.RawByte(']') - } - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v FacetSearchRequest) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v FacetSearchRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *FacetSearchRequest) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *FacetSearchRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out *Embedder) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "source": - out.Source = string(in.String()) - case "apiKey": - out.ApiKey = string(in.String()) - case "model": - out.Model = string(in.String()) - case "dimensions": - out.Dimensions = int(in.Int()) - case "documentTemplate": - out.DocumentTemplate = string(in.String()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writer, in Embedder) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"source\":" - out.RawString(prefix[1:]) - out.String(string(in.Source)) - } - if in.ApiKey != "" { - const prefix string = ",\"apiKey\":" - out.RawString(prefix) - out.String(string(in.ApiKey)) - } - if in.Model != "" { - const prefix string = ",\"model\":" - out.RawString(prefix) - out.String(string(in.Model)) - } - if in.Dimensions != 0 { - const prefix string = ",\"dimensions\":" - out.RawString(prefix) - out.Int(int(in.Dimensions)) - } - if in.DocumentTemplate != "" { - const prefix string = ",\"documentTemplate\":" - out.RawString(prefix) - out.String(string(in.DocumentTemplate)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v Embedder) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v Embedder) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *Embedder) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *Embedder) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out *DocumentsResult) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "results": - if in.IsNull() { - in.Skip() - out.Results = nil - } else { - in.Delim('[') - if out.Results == nil { - if !in.IsDelim(']') { - out.Results = make([]map[string]interface{}, 0, 8) - } else { - out.Results = []map[string]interface{}{} - } - } else { - out.Results = (out.Results)[:0] - } - for !in.IsDelim(']') { - var v117 map[string]interface{} - if in.IsNull() { - in.Skip() - } else { - in.Delim('{') - v117 = make(map[string]interface{}) - for !in.IsDelim('}') { - key := string(in.String()) - in.WantColon() - var v118 interface{} - if m, ok := v118.(easyjson.Unmarshaler); ok { - m.UnmarshalEasyJSON(in) - } else if m, ok := v118.(json.Unmarshaler); ok { - _ = m.UnmarshalJSON(in.Raw()) - } else { - v118 = in.Interface() - } - (v117)[key] = v118 - in.WantComma() - } - in.Delim('}') - } - out.Results = append(out.Results, v117) - in.WantComma() - } - in.Delim(']') - } - case "limit": - out.Limit = int64(in.Int64()) - case "offset": - out.Offset = int64(in.Int64()) - case "total": - out.Total = int64(in.Int64()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writer, in DocumentsResult) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"results\":" - out.RawString(prefix[1:]) - if in.Results == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v119, v120 := range in.Results { - if v119 > 0 { - out.RawByte(',') - } - if v120 == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { - out.RawString(`null`) - } else { - out.RawByte('{') - v121First := true - for v121Name, v121Value := range v120 { - if v121First { - v121First = false - } else { - out.RawByte(',') - } - out.String(string(v121Name)) - out.RawByte(':') - if m, ok := v121Value.(easyjson.Marshaler); ok { - m.MarshalEasyJSON(out) - } else if m, ok := v121Value.(json.Marshaler); ok { - out.Raw(m.MarshalJSON()) - } else { - out.Raw(json.Marshal(v121Value)) - } - } - out.RawByte('}') - } - } - out.RawByte(']') - } - } - { - const prefix string = ",\"limit\":" - out.RawString(prefix) - out.Int64(int64(in.Limit)) - } - { - const prefix string = ",\"offset\":" - out.RawString(prefix) - out.Int64(int64(in.Offset)) - } - { - const prefix string = ",\"total\":" - out.RawString(prefix) - out.Int64(int64(in.Total)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v DocumentsResult) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v DocumentsResult) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *DocumentsResult) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *DocumentsResult) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out *DocumentsQuery) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "offset": - out.Offset = int64(in.Int64()) - case "limit": - out.Limit = int64(in.Int64()) - case "fields": - if in.IsNull() { - in.Skip() - out.Fields = nil - } else { - in.Delim('[') - if out.Fields == nil { - if !in.IsDelim(']') { - out.Fields = make([]string, 0, 4) - } else { - out.Fields = []string{} - } - } else { - out.Fields = (out.Fields)[:0] - } - for !in.IsDelim(']') { - var v122 string - v122 = string(in.String()) - out.Fields = append(out.Fields, v122) - in.WantComma() - } - in.Delim(']') - } - case "filter": - if m, ok := out.Filter.(easyjson.Unmarshaler); ok { - m.UnmarshalEasyJSON(in) - } else if m, ok := out.Filter.(json.Unmarshaler); ok { - _ = m.UnmarshalJSON(in.Raw()) - } else { - out.Filter = in.Interface() - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writer, in DocumentsQuery) { - out.RawByte('{') - first := true - _ = first - if in.Offset != 0 { - const prefix string = ",\"offset\":" - first = false - out.RawString(prefix[1:]) - out.Int64(int64(in.Offset)) - } - if in.Limit != 0 { - const prefix string = ",\"limit\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.Limit)) - } - if len(in.Fields) != 0 { - const prefix string = ",\"fields\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v123, v124 := range in.Fields { - if v123 > 0 { - out.RawByte(',') - } - out.String(string(v124)) - } - out.RawByte(']') - } - } - if in.Filter != nil { - const prefix string = ",\"filter\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - if m, ok := in.Filter.(easyjson.Marshaler); ok { - m.MarshalEasyJSON(out) - } else if m, ok := in.Filter.(json.Marshaler); ok { - out.Raw(m.MarshalJSON()) - } else { - out.Raw(json.Marshal(in.Filter)) - } - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v DocumentsQuery) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v DocumentsQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *DocumentsQuery) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *DocumentsQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out *DocumentQuery) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "fields": - if in.IsNull() { - in.Skip() - out.Fields = nil - } else { - in.Delim('[') - if out.Fields == nil { - if !in.IsDelim(']') { - out.Fields = make([]string, 0, 4) - } else { - out.Fields = []string{} - } - } else { - out.Fields = (out.Fields)[:0] - } - for !in.IsDelim(']') { - var v125 string - v125 = string(in.String()) - out.Fields = append(out.Fields, v125) - in.WantComma() - } - in.Delim(']') - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writer, in DocumentQuery) { - out.RawByte('{') - first := true - _ = first - if len(in.Fields) != 0 { - const prefix string = ",\"fields\":" - first = false - out.RawString(prefix[1:]) - { - out.RawByte('[') - for v126, v127 := range in.Fields { - if v126 > 0 { - out.RawByte(',') - } - out.String(string(v127)) - } - out.RawByte(']') - } - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v DocumentQuery) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v DocumentQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *DocumentQuery) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *DocumentQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(in *jlexer.Lexer, out *Details) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "receivedDocuments": - out.ReceivedDocuments = int64(in.Int64()) - case "indexedDocuments": - out.IndexedDocuments = int64(in.Int64()) - case "deletedDocuments": - out.DeletedDocuments = int64(in.Int64()) - case "primaryKey": - out.PrimaryKey = string(in.String()) - case "providedIds": - out.ProvidedIds = int64(in.Int64()) - case "rankingRules": - if in.IsNull() { - in.Skip() - out.RankingRules = nil - } else { - in.Delim('[') - if out.RankingRules == nil { - if !in.IsDelim(']') { - out.RankingRules = make([]string, 0, 4) - } else { - out.RankingRules = []string{} - } - } else { - out.RankingRules = (out.RankingRules)[:0] - } - for !in.IsDelim(']') { - var v128 string - v128 = string(in.String()) - out.RankingRules = append(out.RankingRules, v128) - in.WantComma() - } - in.Delim(']') - } - case "distinctAttribute": - if in.IsNull() { - in.Skip() - out.DistinctAttribute = nil - } else { - if out.DistinctAttribute == nil { - out.DistinctAttribute = new(string) - } - *out.DistinctAttribute = string(in.String()) - } - case "searchableAttributes": - if in.IsNull() { - in.Skip() - out.SearchableAttributes = nil - } else { - in.Delim('[') - if out.SearchableAttributes == nil { - if !in.IsDelim(']') { - out.SearchableAttributes = make([]string, 0, 4) - } else { - out.SearchableAttributes = []string{} - } - } else { - out.SearchableAttributes = (out.SearchableAttributes)[:0] - } - for !in.IsDelim(']') { - var v129 string - v129 = string(in.String()) - out.SearchableAttributes = append(out.SearchableAttributes, v129) - in.WantComma() - } - in.Delim(']') - } - case "displayedAttributes": - if in.IsNull() { - in.Skip() - out.DisplayedAttributes = nil - } else { - in.Delim('[') - if out.DisplayedAttributes == nil { - if !in.IsDelim(']') { - out.DisplayedAttributes = make([]string, 0, 4) - } else { - out.DisplayedAttributes = []string{} - } - } else { - out.DisplayedAttributes = (out.DisplayedAttributes)[:0] - } - for !in.IsDelim(']') { - var v130 string - v130 = string(in.String()) - out.DisplayedAttributes = append(out.DisplayedAttributes, v130) - in.WantComma() - } - in.Delim(']') - } - case "stopWords": - if in.IsNull() { - in.Skip() - out.StopWords = nil - } else { - in.Delim('[') - if out.StopWords == nil { - if !in.IsDelim(']') { - out.StopWords = make([]string, 0, 4) - } else { - out.StopWords = []string{} - } - } else { - out.StopWords = (out.StopWords)[:0] - } - for !in.IsDelim(']') { - var v131 string - v131 = string(in.String()) - out.StopWords = append(out.StopWords, v131) - in.WantComma() - } - in.Delim(']') - } - case "synonyms": - if in.IsNull() { - in.Skip() - } else { - in.Delim('{') - if !in.IsDelim('}') { - out.Synonyms = make(map[string][]string) - } else { - out.Synonyms = nil - } - for !in.IsDelim('}') { - key := string(in.String()) - in.WantColon() - var v132 []string - if in.IsNull() { - in.Skip() - v132 = nil - } else { - in.Delim('[') - if v132 == nil { - if !in.IsDelim(']') { - v132 = make([]string, 0, 4) - } else { - v132 = []string{} - } - } else { - v132 = (v132)[:0] - } - for !in.IsDelim(']') { - var v133 string - v133 = string(in.String()) - v132 = append(v132, v133) - in.WantComma() - } - in.Delim(']') - } - (out.Synonyms)[key] = v132 - in.WantComma() - } - in.Delim('}') - } - case "filterableAttributes": - if in.IsNull() { - in.Skip() - out.FilterableAttributes = nil - } else { - in.Delim('[') - if out.FilterableAttributes == nil { - if !in.IsDelim(']') { - out.FilterableAttributes = make([]string, 0, 4) - } else { - out.FilterableAttributes = []string{} - } - } else { - out.FilterableAttributes = (out.FilterableAttributes)[:0] - } - for !in.IsDelim(']') { - var v134 string - v134 = string(in.String()) - out.FilterableAttributes = append(out.FilterableAttributes, v134) - in.WantComma() - } - in.Delim(']') - } - case "sortableAttributes": - if in.IsNull() { - in.Skip() - out.SortableAttributes = nil - } else { - in.Delim('[') - if out.SortableAttributes == nil { - if !in.IsDelim(']') { - out.SortableAttributes = make([]string, 0, 4) - } else { - out.SortableAttributes = []string{} - } - } else { - out.SortableAttributes = (out.SortableAttributes)[:0] - } - for !in.IsDelim(']') { - var v135 string - v135 = string(in.String()) - out.SortableAttributes = append(out.SortableAttributes, v135) - in.WantComma() - } - in.Delim(']') - } - case "typoTolerance": - if in.IsNull() { - in.Skip() - out.TypoTolerance = nil - } else { - if out.TypoTolerance == nil { - out.TypoTolerance = new(TypoTolerance) - } - (*out.TypoTolerance).UnmarshalEasyJSON(in) - } - case "pagination": - if in.IsNull() { - in.Skip() - out.Pagination = nil - } else { - if out.Pagination == nil { - out.Pagination = new(Pagination) - } - (*out.Pagination).UnmarshalEasyJSON(in) - } - case "faceting": - if in.IsNull() { - in.Skip() - out.Faceting = nil - } else { - if out.Faceting == nil { - out.Faceting = new(Faceting) - } - (*out.Faceting).UnmarshalEasyJSON(in) - } - case "matchedTasks": - out.MatchedTasks = int64(in.Int64()) - case "canceledTasks": - out.CanceledTasks = int64(in.Int64()) - case "deletedTasks": - out.DeletedTasks = int64(in.Int64()) - case "originalFilter": - out.OriginalFilter = string(in.String()) - case "swaps": - if in.IsNull() { - in.Skip() - out.Swaps = nil - } else { - in.Delim('[') - if out.Swaps == nil { - if !in.IsDelim(']') { - out.Swaps = make([]SwapIndexesParams, 0, 2) - } else { - out.Swaps = []SwapIndexesParams{} - } - } else { - out.Swaps = (out.Swaps)[:0] - } - for !in.IsDelim(']') { - var v136 SwapIndexesParams - (v136).UnmarshalEasyJSON(in) - out.Swaps = append(out.Swaps, v136) - in.WantComma() - } - in.Delim(']') - } - case "dumpUid": - out.DumpUid = string(in.String()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(out *jwriter.Writer, in Details) { - out.RawByte('{') - first := true - _ = first - if in.ReceivedDocuments != 0 { - const prefix string = ",\"receivedDocuments\":" - first = false - out.RawString(prefix[1:]) - out.Int64(int64(in.ReceivedDocuments)) - } - if in.IndexedDocuments != 0 { - const prefix string = ",\"indexedDocuments\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.IndexedDocuments)) - } - if in.DeletedDocuments != 0 { - const prefix string = ",\"deletedDocuments\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.DeletedDocuments)) - } - if in.PrimaryKey != "" { - const prefix string = ",\"primaryKey\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.PrimaryKey)) - } - if in.ProvidedIds != 0 { - const prefix string = ",\"providedIds\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.ProvidedIds)) - } - if len(in.RankingRules) != 0 { - const prefix string = ",\"rankingRules\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v137, v138 := range in.RankingRules { - if v137 > 0 { - out.RawByte(',') - } - out.String(string(v138)) - } - out.RawByte(']') - } - } - if in.DistinctAttribute != nil { - const prefix string = ",\"distinctAttribute\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(*in.DistinctAttribute)) - } - if len(in.SearchableAttributes) != 0 { - const prefix string = ",\"searchableAttributes\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v139, v140 := range in.SearchableAttributes { - if v139 > 0 { - out.RawByte(',') - } - out.String(string(v140)) - } - out.RawByte(']') - } - } - if len(in.DisplayedAttributes) != 0 { - const prefix string = ",\"displayedAttributes\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v141, v142 := range in.DisplayedAttributes { - if v141 > 0 { - out.RawByte(',') - } - out.String(string(v142)) - } - out.RawByte(']') - } - } - if len(in.StopWords) != 0 { - const prefix string = ",\"stopWords\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v143, v144 := range in.StopWords { - if v143 > 0 { - out.RawByte(',') - } - out.String(string(v144)) - } - out.RawByte(']') - } - } - if len(in.Synonyms) != 0 { - const prefix string = ",\"synonyms\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('{') - v145First := true - for v145Name, v145Value := range in.Synonyms { - if v145First { - v145First = false - } else { - out.RawByte(',') - } - out.String(string(v145Name)) - out.RawByte(':') - if v145Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v146, v147 := range v145Value { - if v146 > 0 { - out.RawByte(',') - } - out.String(string(v147)) - } - out.RawByte(']') - } - } - out.RawByte('}') - } - } - if len(in.FilterableAttributes) != 0 { - const prefix string = ",\"filterableAttributes\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v148, v149 := range in.FilterableAttributes { - if v148 > 0 { - out.RawByte(',') - } - out.String(string(v149)) - } - out.RawByte(']') - } - } - if len(in.SortableAttributes) != 0 { - const prefix string = ",\"sortableAttributes\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v150, v151 := range in.SortableAttributes { - if v150 > 0 { - out.RawByte(',') - } - out.String(string(v151)) - } - out.RawByte(']') - } - } - if in.TypoTolerance != nil { - const prefix string = ",\"typoTolerance\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - (*in.TypoTolerance).MarshalEasyJSON(out) - } - if in.Pagination != nil { - const prefix string = ",\"pagination\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - (*in.Pagination).MarshalEasyJSON(out) - } - if in.Faceting != nil { - const prefix string = ",\"faceting\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - (*in.Faceting).MarshalEasyJSON(out) - } - if in.MatchedTasks != 0 { - const prefix string = ",\"matchedTasks\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.MatchedTasks)) - } - if in.CanceledTasks != 0 { - const prefix string = ",\"canceledTasks\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.CanceledTasks)) - } - if in.DeletedTasks != 0 { - const prefix string = ",\"deletedTasks\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.Int64(int64(in.DeletedTasks)) - } - if in.OriginalFilter != "" { - const prefix string = ",\"originalFilter\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.OriginalFilter)) - } - if len(in.Swaps) != 0 { - const prefix string = ",\"swaps\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - { - out.RawByte('[') - for v152, v153 := range in.Swaps { - if v152 > 0 { - out.RawByte(',') - } - (v153).MarshalEasyJSON(out) - } - out.RawByte(']') - } - } - if in.DumpUid != "" { - const prefix string = ",\"dumpUid\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.DumpUid)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v Details) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v Details) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *Details) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *Details) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, out *DeleteTasksQuery) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "UIDS": - if in.IsNull() { - in.Skip() - out.UIDS = nil - } else { - in.Delim('[') - if out.UIDS == nil { - if !in.IsDelim(']') { - out.UIDS = make([]int64, 0, 8) - } else { - out.UIDS = []int64{} - } - } else { - out.UIDS = (out.UIDS)[:0] - } - for !in.IsDelim(']') { - var v154 int64 - v154 = int64(in.Int64()) - out.UIDS = append(out.UIDS, v154) - in.WantComma() - } - in.Delim(']') - } - case "IndexUIDS": - if in.IsNull() { - in.Skip() - out.IndexUIDS = nil - } else { - in.Delim('[') - if out.IndexUIDS == nil { - if !in.IsDelim(']') { - out.IndexUIDS = make([]string, 0, 4) - } else { - out.IndexUIDS = []string{} - } - } else { - out.IndexUIDS = (out.IndexUIDS)[:0] - } - for !in.IsDelim(']') { - var v155 string - v155 = string(in.String()) - out.IndexUIDS = append(out.IndexUIDS, v155) - in.WantComma() - } - in.Delim(']') - } - case "Statuses": - if in.IsNull() { - in.Skip() - out.Statuses = nil - } else { - in.Delim('[') - if out.Statuses == nil { - if !in.IsDelim(']') { - out.Statuses = make([]TaskStatus, 0, 4) - } else { - out.Statuses = []TaskStatus{} - } - } else { - out.Statuses = (out.Statuses)[:0] - } - for !in.IsDelim(']') { - var v156 TaskStatus - v156 = TaskStatus(in.String()) - out.Statuses = append(out.Statuses, v156) - in.WantComma() - } - in.Delim(']') - } - case "Types": - if in.IsNull() { - in.Skip() - out.Types = nil - } else { - in.Delim('[') - if out.Types == nil { - if !in.IsDelim(']') { - out.Types = make([]TaskType, 0, 4) - } else { - out.Types = []TaskType{} - } - } else { - out.Types = (out.Types)[:0] - } - for !in.IsDelim(']') { - var v157 TaskType - v157 = TaskType(in.String()) - out.Types = append(out.Types, v157) - in.WantComma() - } - in.Delim(']') - } - case "CanceledBy": - if in.IsNull() { - in.Skip() - out.CanceledBy = nil - } else { - in.Delim('[') - if out.CanceledBy == nil { - if !in.IsDelim(']') { - out.CanceledBy = make([]int64, 0, 8) - } else { - out.CanceledBy = []int64{} - } - } else { - out.CanceledBy = (out.CanceledBy)[:0] - } - for !in.IsDelim(']') { - var v158 int64 - v158 = int64(in.Int64()) - out.CanceledBy = append(out.CanceledBy, v158) - in.WantComma() - } - in.Delim(']') - } - case "BeforeEnqueuedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.BeforeEnqueuedAt).UnmarshalJSON(data)) - } - case "AfterEnqueuedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.AfterEnqueuedAt).UnmarshalJSON(data)) - } - case "BeforeStartedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.BeforeStartedAt).UnmarshalJSON(data)) - } - case "AfterStartedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.AfterStartedAt).UnmarshalJSON(data)) - } - case "BeforeFinishedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.BeforeFinishedAt).UnmarshalJSON(data)) - } - case "AfterFinishedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.AfterFinishedAt).UnmarshalJSON(data)) - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writer, in DeleteTasksQuery) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"UIDS\":" - out.RawString(prefix[1:]) - if in.UIDS == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v159, v160 := range in.UIDS { - if v159 > 0 { - out.RawByte(',') - } - out.Int64(int64(v160)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"IndexUIDS\":" - out.RawString(prefix) - if in.IndexUIDS == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v161, v162 := range in.IndexUIDS { - if v161 > 0 { - out.RawByte(',') - } - out.String(string(v162)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"Statuses\":" - out.RawString(prefix) - if in.Statuses == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v163, v164 := range in.Statuses { - if v163 > 0 { - out.RawByte(',') - } - out.String(string(v164)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"Types\":" - out.RawString(prefix) - if in.Types == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v165, v166 := range in.Types { - if v165 > 0 { - out.RawByte(',') - } - out.String(string(v166)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"CanceledBy\":" - out.RawString(prefix) - if in.CanceledBy == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v167, v168 := range in.CanceledBy { - if v167 > 0 { - out.RawByte(',') - } - out.Int64(int64(v168)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"BeforeEnqueuedAt\":" - out.RawString(prefix) - out.Raw((in.BeforeEnqueuedAt).MarshalJSON()) - } - { - const prefix string = ",\"AfterEnqueuedAt\":" - out.RawString(prefix) - out.Raw((in.AfterEnqueuedAt).MarshalJSON()) - } - { - const prefix string = ",\"BeforeStartedAt\":" - out.RawString(prefix) - out.Raw((in.BeforeStartedAt).MarshalJSON()) - } - { - const prefix string = ",\"AfterStartedAt\":" - out.RawString(prefix) - out.Raw((in.AfterStartedAt).MarshalJSON()) - } - { - const prefix string = ",\"BeforeFinishedAt\":" - out.RawString(prefix) - out.Raw((in.BeforeFinishedAt).MarshalJSON()) - } - { - const prefix string = ",\"AfterFinishedAt\":" - out.RawString(prefix) - out.Raw((in.AfterFinishedAt).MarshalJSON()) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v DeleteTasksQuery) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v DeleteTasksQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *DeleteTasksQuery) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *DeleteTasksQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(in *jlexer.Lexer, out *CsvDocumentsQuery) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "primaryKey": - out.PrimaryKey = string(in.String()) - case "csvDelimiter": - out.CsvDelimiter = string(in.String()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(out *jwriter.Writer, in CsvDocumentsQuery) { - out.RawByte('{') - first := true - _ = first - if in.PrimaryKey != "" { - const prefix string = ",\"primaryKey\":" - first = false - out.RawString(prefix[1:]) - out.String(string(in.PrimaryKey)) - } - if in.CsvDelimiter != "" { - const prefix string = ",\"csvDelimiter\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.CsvDelimiter)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v CsvDocumentsQuery) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v CsvDocumentsQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *CsvDocumentsQuery) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *CsvDocumentsQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out *CreateIndexRequest) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "uid": - out.UID = string(in.String()) - case "primaryKey": - out.PrimaryKey = string(in.String()) - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writer, in CreateIndexRequest) { - out.RawByte('{') - first := true - _ = first - if in.UID != "" { - const prefix string = ",\"uid\":" - first = false - out.RawString(prefix[1:]) - out.String(string(in.UID)) - } - if in.PrimaryKey != "" { - const prefix string = ",\"primaryKey\":" - if first { - first = false - out.RawString(prefix[1:]) - } else { - out.RawString(prefix) - } - out.String(string(in.PrimaryKey)) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v CreateIndexRequest) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v CreateIndexRequest) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *CreateIndexRequest) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *CreateIndexRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo43(in *jlexer.Lexer, out *Client) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo43(out *jwriter.Writer, in Client) { - out.RawByte('{') - first := true - _ = first - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v Client) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo43(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v Client) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo43(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *Client) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo43(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *Client) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo43(l, v) -} -func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo44(in *jlexer.Lexer, out *CancelTasksQuery) { - isTopLevel := in.IsStart() - if in.IsNull() { - if isTopLevel { - in.Consumed() - } - in.Skip() - return - } - in.Delim('{') - for !in.IsDelim('}') { - key := in.UnsafeFieldName(false) - in.WantColon() - if in.IsNull() { - in.Skip() - in.WantComma() - continue - } - switch key { - case "UIDS": - if in.IsNull() { - in.Skip() - out.UIDS = nil - } else { - in.Delim('[') - if out.UIDS == nil { - if !in.IsDelim(']') { - out.UIDS = make([]int64, 0, 8) - } else { - out.UIDS = []int64{} - } - } else { - out.UIDS = (out.UIDS)[:0] - } - for !in.IsDelim(']') { - var v169 int64 - v169 = int64(in.Int64()) - out.UIDS = append(out.UIDS, v169) - in.WantComma() - } - in.Delim(']') - } - case "IndexUIDS": - if in.IsNull() { - in.Skip() - out.IndexUIDS = nil - } else { - in.Delim('[') - if out.IndexUIDS == nil { - if !in.IsDelim(']') { - out.IndexUIDS = make([]string, 0, 4) - } else { - out.IndexUIDS = []string{} - } - } else { - out.IndexUIDS = (out.IndexUIDS)[:0] - } - for !in.IsDelim(']') { - var v170 string - v170 = string(in.String()) - out.IndexUIDS = append(out.IndexUIDS, v170) - in.WantComma() - } - in.Delim(']') - } - case "Statuses": - if in.IsNull() { - in.Skip() - out.Statuses = nil - } else { - in.Delim('[') - if out.Statuses == nil { - if !in.IsDelim(']') { - out.Statuses = make([]TaskStatus, 0, 4) - } else { - out.Statuses = []TaskStatus{} - } - } else { - out.Statuses = (out.Statuses)[:0] - } - for !in.IsDelim(']') { - var v171 TaskStatus - v171 = TaskStatus(in.String()) - out.Statuses = append(out.Statuses, v171) - in.WantComma() - } - in.Delim(']') - } - case "Types": - if in.IsNull() { - in.Skip() - out.Types = nil - } else { - in.Delim('[') - if out.Types == nil { - if !in.IsDelim(']') { - out.Types = make([]TaskType, 0, 4) - } else { - out.Types = []TaskType{} - } - } else { - out.Types = (out.Types)[:0] - } - for !in.IsDelim(']') { - var v172 TaskType - v172 = TaskType(in.String()) - out.Types = append(out.Types, v172) - in.WantComma() - } - in.Delim(']') - } - case "BeforeEnqueuedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.BeforeEnqueuedAt).UnmarshalJSON(data)) - } - case "AfterEnqueuedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.AfterEnqueuedAt).UnmarshalJSON(data)) - } - case "BeforeStartedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.BeforeStartedAt).UnmarshalJSON(data)) - } - case "AfterStartedAt": - if data := in.Raw(); in.Ok() { - in.AddError((out.AfterStartedAt).UnmarshalJSON(data)) - } - default: - in.SkipRecursive() - } - in.WantComma() - } - in.Delim('}') - if isTopLevel { - in.Consumed() - } -} -func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo44(out *jwriter.Writer, in CancelTasksQuery) { - out.RawByte('{') - first := true - _ = first - { - const prefix string = ",\"UIDS\":" - out.RawString(prefix[1:]) - if in.UIDS == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v173, v174 := range in.UIDS { - if v173 > 0 { - out.RawByte(',') - } - out.Int64(int64(v174)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"IndexUIDS\":" - out.RawString(prefix) - if in.IndexUIDS == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v175, v176 := range in.IndexUIDS { - if v175 > 0 { - out.RawByte(',') - } - out.String(string(v176)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"Statuses\":" - out.RawString(prefix) - if in.Statuses == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v177, v178 := range in.Statuses { - if v177 > 0 { - out.RawByte(',') - } - out.String(string(v178)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"Types\":" - out.RawString(prefix) - if in.Types == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { - out.RawString("null") - } else { - out.RawByte('[') - for v179, v180 := range in.Types { - if v179 > 0 { - out.RawByte(',') - } - out.String(string(v180)) - } - out.RawByte(']') - } - } - { - const prefix string = ",\"BeforeEnqueuedAt\":" - out.RawString(prefix) - out.Raw((in.BeforeEnqueuedAt).MarshalJSON()) - } - { - const prefix string = ",\"AfterEnqueuedAt\":" - out.RawString(prefix) - out.Raw((in.AfterEnqueuedAt).MarshalJSON()) - } - { - const prefix string = ",\"BeforeStartedAt\":" - out.RawString(prefix) - out.Raw((in.BeforeStartedAt).MarshalJSON()) - } - { - const prefix string = ",\"AfterStartedAt\":" - out.RawString(prefix) - out.Raw((in.AfterStartedAt).MarshalJSON()) - } - out.RawByte('}') -} - -// MarshalJSON supports json.Marshaler interface -func (v CancelTasksQuery) MarshalJSON() ([]byte, error) { - w := jwriter.Writer{} - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo44(&w, v) - return w.Buffer.BuildBytes(), w.Error -} - -// MarshalEasyJSON supports easyjson.Marshaler interface -func (v CancelTasksQuery) MarshalEasyJSON(w *jwriter.Writer) { - easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo44(w, v) -} - -// UnmarshalJSON supports json.Unmarshaler interface -func (v *CancelTasksQuery) UnmarshalJSON(data []byte) error { - r := jlexer.Lexer{Data: data} - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo44(&r, v) - return r.Error() -} - -// UnmarshalEasyJSON supports easyjson.Unmarshaler interface -func (v *CancelTasksQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { - easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo44(l, v) -} From 2544334db68a151167a8330337ab62099f681732 Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 08:57:35 +0330 Subject: [PATCH 25/43] fix: refactor fasthttp client to http client --- client.go | 688 ++++------------ client_test.go | 2056 ++---------------------------------------------- 2 files changed, 218 insertions(+), 2526 deletions(-) diff --git a/client.go b/client.go index 4b4a9602..55d977eb 100644 --- a/client.go +++ b/client.go @@ -1,598 +1,228 @@ package meilisearch import ( + "bytes" "context" + "encoding/json" + "errors" "fmt" + "io" "net/http" - "regexp" - "strconv" - "strings" - "time" - - "github.com/golang-jwt/jwt/v4" - "github.com/valyala/fasthttp" + "net/url" + "sync" ) -// ClientConfig configure the Client -type ClientConfig struct { - // Host is the host of your Meilisearch database - // Example: 'http://localhost:7700' - Host string - - // APIKey is optional - APIKey string - - // Timeout is optional - Timeout time.Duration -} - -type WaitParams struct { - Context context.Context - Interval time.Duration -} - -// ClientInterface is interface for all Meilisearch client -type ClientInterface interface { - Index(uid string) *Index - GetIndex(indexID string) (resp *Index, err error) - GetRawIndex(uid string) (resp map[string]interface{}, err error) - GetIndexes(param *IndexesQuery) (resp *IndexesResults, err error) - GetRawIndexes(param *IndexesQuery) (resp map[string]interface{}, err error) - CreateIndex(config *IndexConfig) (resp *TaskInfo, err error) - DeleteIndex(uid string) (resp *TaskInfo, err error) - CreateKey(request *Key) (resp *Key, err error) - MultiSearch(queries *MultiSearchRequest) (*MultiSearchResponse, error) - GetKey(identifier string) (resp *Key, err error) - GetKeys(param *KeysQuery) (resp *KeysResults, err error) - UpdateKey(keyOrUID string, request *Key) (resp *Key, err error) - DeleteKey(keyOrUID string) (resp bool, err error) - GetStats() (resp *Stats, err error) - CreateDump() (resp *TaskInfo, err error) - Version() (*Version, error) - GetVersion() (resp *Version, err error) - Health() (*Health, error) - IsHealthy() bool - GetTask(taskUID int64) (resp *Task, err error) - GetTasks(param *TasksQuery) (resp *TaskResult, err error) - CancelTasks(param *CancelTasksQuery) (resp *TaskInfo, err error) - DeleteTasks(param *DeleteTasksQuery) (resp *TaskInfo, err error) - SwapIndexes(param []SwapIndexesParams) (resp *TaskInfo, err error) - WaitForTask(taskUID int64, options ...WaitParams) (*Task, error) - GenerateTenantToken(APIKeyUID string, searchRules map[string]interface{}, options *TenantTokenOptions) (resp string, err error) -} - -var _ ClientInterface = &Client{} - -const ( - defaultWaitForTaskTimeout = 5 * time.Second - defaultWaitForTaskInterval = 50 * time.Millisecond -) - -// NewFastHTTPCustomClient creates Meilisearch with custom fasthttp.Client -func NewFastHTTPCustomClient(config ClientConfig, client *fasthttp.Client) *Client { - c := &Client{ - config: config, - httpClient: client, - } - return c -} - -// NewClient creates Meilisearch with default fasthttp.Client -func NewClient(config ClientConfig) *Client { - client := &fasthttp.Client{ - Name: "meilisearch-client", - // Reuse the most recently-used idle connection. - ConnPoolStrategy: fasthttp.LIFO, - } - c := &Client{ - config: config, - httpClient: client, - } - return c -} - -func (c *Client) Version() (resp *Version, err error) { - resp = &Version{} - req := internalRequest{ - endpoint: "/version", - method: http.MethodGet, - withRequest: nil, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "Version", - } - if err := c.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} - -func (c *Client) GetVersion() (resp *Version, err error) { - return c.Version() -} - -func (c *Client) GetStats() (resp *Stats, err error) { - resp = &Stats{} - req := internalRequest{ - endpoint: "/stats", - method: http.MethodGet, - withRequest: nil, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "GetStats", - } - if err := c.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} - -func (c *Client) CreateKey(request *Key) (resp *Key, err error) { - parsedRequest := convertKeyToParsedKey(*request) - resp = &Key{} - req := internalRequest{ - endpoint: "/keys", - method: http.MethodPost, - contentType: contentTypeJSON, - withRequest: &parsedRequest, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusCreated}, - functionName: "CreateKey", - } - if err := c.executeRequest(req); err != nil { - return nil, err - } - return resp, nil +type client struct { + client *http.Client + host string + apiKey string + bufferPool *sync.Pool } -func (c *Client) GetKey(identifier string) (resp *Key, err error) { - resp = &Key{} - req := internalRequest{ - endpoint: "/keys/" + identifier, - method: http.MethodGet, - withRequest: nil, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "GetKey", - } - if err := c.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} +type internalRequest struct { + endpoint string + method string + contentType string -func (c *Client) GetKeys(param *KeysQuery) (resp *KeysResults, err error) { - resp = &KeysResults{} - req := internalRequest{ - endpoint: "/keys", - method: http.MethodGet, - withRequest: nil, - withResponse: resp, - withQueryParams: map[string]string{}, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "GetKeys", - } - if param != nil && param.Limit != 0 { - req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10) - } - if param != nil && param.Offset != 0 { - req.withQueryParams["offset"] = strconv.FormatInt(param.Offset, 10) - } - if err := c.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} + withRequest interface{} + withResponse interface{} + withQueryParams map[string]string -func (c *Client) UpdateKey(keyOrUID string, request *Key) (resp *Key, err error) { - parsedRequest := KeyUpdate{Name: request.Name, Description: request.Description} - resp = &Key{} - req := internalRequest{ - endpoint: "/keys/" + keyOrUID, - method: http.MethodPatch, - contentType: contentTypeJSON, - withRequest: &parsedRequest, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "UpdateKey", - } - if err := c.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} + acceptedStatusCodes []int -func (c *Client) DeleteKey(keyOrUID string) (resp bool, err error) { - req := internalRequest{ - endpoint: "/keys/" + keyOrUID, - method: http.MethodDelete, - withRequest: nil, - withResponse: nil, - acceptedStatusCodes: []int{http.StatusNoContent}, - functionName: "DeleteKey", - } - if err := c.executeRequest(req); err != nil { - return false, err - } - return true, nil + functionName string } -func (c *Client) Health() (resp *Health, err error) { - resp = &Health{} - req := internalRequest{ - endpoint: "/health", - method: http.MethodGet, - withRequest: nil, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "Health", - } - if err := c.executeRequest(req); err != nil { - return nil, err +func newClient(cli *http.Client, host, apiKey string) *client { + return &client{ + client: cli, + host: host, + apiKey: apiKey, + bufferPool: &sync.Pool{ + New: func() interface{} { + return new(bytes.Buffer) + }, + }, } - return resp, nil } -func (c *Client) IsHealthy() bool { - if _, err := c.Health(); err != nil { - return false +func (c *client) executeRequest(ctx context.Context, req *internalRequest) error { + internalError := &Error{ + Endpoint: req.endpoint, + Method: req.method, + Function: req.functionName, + RequestToString: "empty request", + ResponseToString: "empty response", + MeilisearchApiError: meilisearchApiError{ + Message: "empty meilisearch message", + }, + StatusCodeExpected: req.acceptedStatusCodes, } - return true -} -func (c *Client) CreateDump() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/dumps", - method: http.MethodPost, - contentType: contentTypeJSON, - withRequest: nil, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusAccepted}, - functionName: "CreateDump", - } - if err := c.executeRequest(req); err != nil { - return nil, err + resp, err := c.sendRequest(ctx, req, internalError) + if err != nil { + return err } - return resp, nil -} -func (c *Client) MultiSearch(queries *MultiSearchRequest) (*MultiSearchResponse, error) { - resp := &MultiSearchResponse{} + defer func() { + _ = resp.Body.Close() + }() - for i := 0; i < len(queries.Queries); i++ { - queries.Queries[i].validate() - } + internalError.StatusCode = resp.StatusCode - req := internalRequest{ - endpoint: "/multi-search", - method: http.MethodPost, - contentType: contentTypeJSON, - withRequest: queries, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "MultiSearch", + b, err := io.ReadAll(resp.Body) + if err != nil { + return err } - if err := c.executeRequest(req); err != nil { - return nil, err + err = c.handleStatusCode(req, resp.StatusCode, b, internalError) + if err != nil { + return err } - return resp, nil -} - -func (c *Client) GetTask(taskUID int64) (resp *Task, err error) { - resp = &Task{} - req := internalRequest{ - endpoint: "/tasks/" + strconv.FormatInt(taskUID, 10), - method: http.MethodGet, - withRequest: nil, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "GetTask", - } - if err := c.executeRequest(req); err != nil { - return nil, err + err = c.handleResponse(req, b, internalError) + if err != nil { + return err } - return resp, nil + return nil } -func (c *Client) GetTasks(param *TasksQuery) (resp *TaskResult, err error) { - resp = &TaskResult{} - req := internalRequest{ - endpoint: "/tasks", - method: http.MethodGet, - withRequest: nil, - withResponse: &resp, - withQueryParams: map[string]string{}, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "GetTasks", - } - if param != nil { - encodeTasksQuery(param, &req) - } - if err := c.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} +func (c *client) sendRequest( + ctx context.Context, + req *internalRequest, + internalError *Error, +) (*http.Response, error) { -func (c *Client) CancelTasks(param *CancelTasksQuery) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/tasks/cancel", - method: http.MethodPost, - withRequest: nil, - withResponse: &resp, - withQueryParams: map[string]string{}, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "CancelTasks", - } - if param != nil { - paramToSend := &TasksQuery{ - UIDS: param.UIDS, - IndexUIDS: param.IndexUIDS, - Statuses: param.Statuses, - Types: param.Types, - BeforeEnqueuedAt: param.BeforeEnqueuedAt, - AfterEnqueuedAt: param.AfterEnqueuedAt, - BeforeStartedAt: param.BeforeStartedAt, - AfterStartedAt: param.AfterStartedAt, - } - encodeTasksQuery(paramToSend, &req) - } - if err := c.executeRequest(req); err != nil { - return nil, err + apiURL, err := url.Parse(c.host + req.endpoint) + if err != nil { + return nil, fmt.Errorf("unable to parse url: %w", err) } - return resp, nil -} -func (c *Client) DeleteTasks(param *DeleteTasksQuery) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/tasks", - method: http.MethodDelete, - withRequest: nil, - withResponse: &resp, - withQueryParams: map[string]string{}, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "DeleteTasks", - } - if param != nil { - paramToSend := &TasksQuery{ - UIDS: param.UIDS, - IndexUIDS: param.IndexUIDS, - Statuses: param.Statuses, - Types: param.Types, - CanceledBy: param.CanceledBy, - BeforeEnqueuedAt: param.BeforeEnqueuedAt, - AfterEnqueuedAt: param.AfterEnqueuedAt, - BeforeStartedAt: param.BeforeStartedAt, - AfterStartedAt: param.AfterStartedAt, - BeforeFinishedAt: param.BeforeFinishedAt, - AfterFinishedAt: param.AfterFinishedAt, + if req.withQueryParams != nil { + query := apiURL.Query() + for key, value := range req.withQueryParams { + query.Set(key, value) } - encodeTasksQuery(paramToSend, &req) - } - if err := c.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} -func (c *Client) SwapIndexes(param []SwapIndexesParams) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/swap-indexes", - method: http.MethodPost, - contentType: contentTypeJSON, - withRequest: param, - withResponse: &resp, - acceptedStatusCodes: []int{http.StatusAccepted}, - functionName: "SwapIndexes", + apiURL.RawQuery = query.Encode() } - if err := c.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} -// WaitForTask waits for a task to be processed -// -// The function will check by regular interval provided in parameter interval -// the TaskStatus. -// If no ctx and interval are provided WaitForTask will check each 50ms the -// status of a task. -func (c *Client) WaitForTask(taskUID int64, options ...WaitParams) (*Task, error) { - // extract closure to get the task and check the status first before the ticker - fn := func() (*Task, error) { - getTask, err := c.GetTask(taskUID) - if err != nil { - return nil, err + // Create request body + var body io.Reader = nil + if req.withRequest != nil { + if req.method == http.MethodGet || req.method == http.MethodHead { + return nil, ErrInvalidRequestMethod + } + if req.contentType == "" { + return nil, ErrRequestBodyWithoutContentType } - if getTask.Status != TaskStatusEnqueued && getTask.Status != TaskStatusProcessing { - return getTask, nil + rawRequest := req.withRequest + if b, ok := rawRequest.([]byte); ok { + // If the request body is already a []byte then use it directly + buf := c.bufferPool.Get().(*bytes.Buffer) + buf.Reset() + buf.Write(b) + body = buf + } else if reader, ok := rawRequest.(io.Reader); ok { + // If the request body is an io.Reader then stream it directly + body = reader + } else { + // Otherwise convert it to JSON + var ( + data []byte + err error + ) + if marshaler, ok := rawRequest.(json.Marshaler); ok { + data, err = marshaler.MarshalJSON() + if err != nil { + return nil, internalError.WithErrCode(ErrCodeMarshalRequest, fmt.Errorf("failed to marshal with MarshalJSON: %w", err)) + } + if data == nil { + return nil, internalError.WithErrCode(ErrCodeMarshalRequest, errors.New("MarshalJSON returned nil data")) + } + } else { + data, err = json.Marshal(rawRequest) + if err != nil { + return nil, internalError.WithErrCode(ErrCodeMarshalRequest, fmt.Errorf("failed to marshal with json.Marshal: %w", err)) + } + } + buf := c.bufferPool.Get().(*bytes.Buffer) + buf.Reset() + buf.Write(data) + body = buf } - return nil, nil } - // run first before the ticker, we do not want to wait for the first interval - task, err := fn() + // Create the HTTP request + request, err := http.NewRequestWithContext(ctx, req.method, apiURL.String(), body) if err != nil { - // Return error if it exists - return nil, err + return nil, fmt.Errorf("unable to create request: %w", err) } - // Return task if it exists - if task != nil { - return task, nil + // adding request headers + if req.contentType != "" { + request.Header.Set("Content-Type", req.contentType) } - - // Check if options are provided, if not create a default context and interval - if options == nil { - ctx, cancelFunc := context.WithTimeout(context.Background(), defaultWaitForTaskTimeout) - defer cancelFunc() - options = []WaitParams{ - { - Context: ctx, - Interval: defaultWaitForTaskInterval, - }, - } + if c.apiKey != "" { + request.Header.Set("Authorization", "Bearer "+c.apiKey) } - // Create a ticker to check the task status, because our initial check was not successful - ticker := time.NewTicker(options[0].Interval) - - // Defer the stop of the ticker, help GC to cleanup - defer func() { - // we might want to revist this, go.mod now is 1.16 - // however I still encouter the issue on go 1.22.2 - // there are 2 issues regarding tickers - // https://go-review.googlesource.com/c/go/+/512355 - // https://github.com/golang/go/issues/61542 - ticker.Stop() - ticker = nil - }() + request.Header.Set("User-Agent", GetQualifiedVersion()) - for { - select { - case <-options[0].Context.Done(): - return nil, options[0].Context.Err() - case <-ticker.C: - task, err := fn() - if err != nil { - return nil, err - } + resp, err := c.client.Do(request) + if err != nil { + if errors.Is(err, context.DeadlineExceeded) { + return nil, internalError.WithErrCode(MeilisearchTimeoutError, err) + } + return nil, internalError.WithErrCode(MeilisearchCommunicationError, err) + } - if task != nil { - return task, nil - } + if body != nil { + if buf, ok := body.(*bytes.Buffer); ok { + c.bufferPool.Put(buf) } } + return resp, nil } -// Generate a JWT token for the use of multitenancy -// -// SearchRules parameters is mandatory and should contains the rules to be enforced at search time for all or specific -// accessible indexes for the signing API Key. -// ExpiresAt options is a time.Time when the key will expire. Note that if an ExpiresAt value is included it should be in UTC time. -// ApiKey options is the API key parent of the token. If you leave it empty the client API Key will be used. -func (c *Client) GenerateTenantToken(APIKeyUID string, SearchRules map[string]interface{}, Options *TenantTokenOptions) (resp string, err error) { - // validate the arguments - if SearchRules == nil { - return "", fmt.Errorf("GenerateTenantToken: The search rules added in the token generation must be of type array or object") - } - if (Options == nil || Options.APIKey == "") && c.config.APIKey == "" { - return "", fmt.Errorf("GenerateTenantToken: The API key used for the token generation must exist and be a valid Meilisearch key") - } - if APIKeyUID == "" || !IsValidUUID(APIKeyUID) { - return "", fmt.Errorf("GenerateTenantToken: The uid used for the token generation must exist and comply to uuid4 format") - } - if Options != nil && !Options.ExpiresAt.IsZero() && Options.ExpiresAt.Before(time.Now()) { - return "", fmt.Errorf("GenerateTenantToken: When the expiresAt field in the token generation has a value, it must be a date set in the future") - } +func (c *client) handleStatusCode(req *internalRequest, statusCode int, body []byte, internalError *Error) error { + if req.acceptedStatusCodes != nil { - var secret string - if Options == nil || Options.APIKey == "" { - secret = c.config.APIKey - } else { - secret = Options.APIKey - } + // A successful status code is required so check if the response status code is in the + // expected status code list. + for _, acceptedCode := range req.acceptedStatusCodes { + if statusCode == acceptedCode { + return nil + } + } - // For HMAC signing method, the key should be any []byte - hmacSampleSecret := []byte(secret) + internalError.ErrorBody(body) - // Create the claims - claims := TenantTokenClaims{} - if Options != nil && !Options.ExpiresAt.IsZero() { - claims.RegisteredClaims = jwt.RegisteredClaims{ - ExpiresAt: jwt.NewNumericDate(Options.ExpiresAt), + if internalError.MeilisearchApiError.Code == "" { + return internalError.WithErrCode(MeilisearchApiErrorWithoutMessage) } + return internalError.WithErrCode(MeilisearchApiError) } - claims.APIKeyUID = APIKeyUID - claims.SearchRules = SearchRules - - // Create a new token object, specifying signing method and the claims - token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) - // Sign and get the complete encoded token as a string using the secret - tokenString, err := token.SignedString(hmacSampleSecret) - - return tokenString, err + return nil } -// This function allows the user to create a Key with an ExpiresAt in time.Time -// and transform the Key structure into a KeyParsed structure to send the time format -// managed by Meilisearch -func convertKeyToParsedKey(key Key) (resp KeyParsed) { - resp = KeyParsed{Name: key.Name, Description: key.Description, UID: key.UID, Actions: key.Actions, Indexes: key.Indexes} - - // Convert time.Time to *string to feat the exact ISO-8601 - // format of Meilisearch - if !key.ExpiresAt.IsZero() { - resp.ExpiresAt = formatDate(key.ExpiresAt, true) - } - return resp -} +func (c *client) handleResponse(req *internalRequest, body []byte, internalError *Error) (err error) { + if req.withResponse != nil { -func IsValidUUID(uuid string) bool { - r := regexp.MustCompile("^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-4[a-fA-F0-9]{3}-[8|9|aA|bB][a-fA-F0-9]{3}-[a-fA-F0-9]{12}$") - return r.MatchString(uuid) -} + internalError.ResponseToString = string(body) -func encodeTasksQuery(param *TasksQuery, req *internalRequest) { - if param.Limit != 0 { - req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10) - } - if param.From != 0 { - req.withQueryParams["from"] = strconv.FormatInt(param.From, 10) - } - if len(param.Statuses) != 0 { - var statuses []string - for _, status := range param.Statuses { - statuses = append(statuses, string(status)) + var err error + if resp, ok := req.withResponse.(json.Unmarshaler); ok { + err = resp.UnmarshalJSON(body) + req.withResponse = resp + } else { + err = json.Unmarshal(body, req.withResponse) } - req.withQueryParams["statuses"] = strings.Join(statuses, ",") - } - if len(param.Types) != 0 { - var types []string - for _, t := range param.Types { - types = append(types, string(t)) + if err != nil { + return internalError.WithErrCode(ErrCodeResponseUnmarshalBody, err) } - req.withQueryParams["types"] = strings.Join(types, ",") - } - if len(param.IndexUIDS) != 0 { - req.withQueryParams["indexUids"] = strings.Join(param.IndexUIDS, ",") - } - if len(param.UIDS) != 0 { - req.withQueryParams["uids"] = strings.Trim(strings.Join(strings.Fields(fmt.Sprint(param.UIDS)), ","), "[]") - } - if len(param.CanceledBy) != 0 { - req.withQueryParams["canceledBy"] = strings.Trim(strings.Join(strings.Fields(fmt.Sprint(param.CanceledBy)), ","), "[]") } - if !param.BeforeEnqueuedAt.IsZero() { - req.withQueryParams["beforeEnqueuedAt"] = *formatDate(param.BeforeEnqueuedAt, false) - } - if !param.AfterEnqueuedAt.IsZero() { - req.withQueryParams["afterEnqueuedAt"] = *formatDate(param.AfterEnqueuedAt, false) - } - if !param.BeforeStartedAt.IsZero() { - req.withQueryParams["beforeStartedAt"] = *formatDate(param.BeforeStartedAt, false) - } - if !param.AfterStartedAt.IsZero() { - req.withQueryParams["afterStartedAt"] = *formatDate(param.AfterStartedAt, false) - } - if !param.BeforeFinishedAt.IsZero() { - req.withQueryParams["beforeFinishedAt"] = *formatDate(param.BeforeFinishedAt, false) - } - if !param.AfterFinishedAt.IsZero() { - req.withQueryParams["afterFinishedAt"] = *formatDate(param.AfterFinishedAt, false) - } -} - -func formatDate(date time.Time, key bool) *string { - const format = "2006-01-02T15:04:05Z" - timeParsedToString := date.Format(format) - return &timeParsedToString + return nil } diff --git a/client_test.go b/client_test.go index 3d6451da..2322de6b 100644 --- a/client_test.go +++ b/client_test.go @@ -2,2026 +2,88 @@ package meilisearch import ( "context" - "math" - "reflect" - "strings" - "sync" - "testing" - "time" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/valyala/fasthttp" + "net/http" + "net/http/httptest" + "testing" ) -func TestClient_Version(t *testing.T) { - tests := []struct { - name string - client *Client - }{ - { - name: "TestVersion", - client: defaultClient, - }, - { - name: "TestVersionWithCustomClient", - client: customClient, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotResp, err := tt.client.GetVersion() - require.NoError(t, err) - require.NotNil(t, gotResp, "Version() should not return nil value") - }) - } -} - -func TestClient_TimeoutError(t *testing.T) { - tests := []struct { - name string - client *Client - expectedError Error - }{ - { - name: "TestTimeoutError", - client: timeoutClient, - expectedError: Error{ - MeilisearchApiError: meilisearchApiError{}, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotResp, err := tt.client.GetVersion() - require.Error(t, err) - require.Nil(t, gotResp) - require.Equal(t, tt.expectedError.MeilisearchApiError.Code, - err.(*Error).MeilisearchApiError.Code) - }) - } -} - -func TestClient_GetStats(t *testing.T) { - tests := []struct { - name string - client *Client - }{ - { - name: "TestGetStats", - client: defaultClient, - }, - { - name: "TestGetStatsWithCustomClient", - client: customClient, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotResp, err := tt.client.GetStats() - require.NoError(t, err) - require.NotNil(t, gotResp, "GetStats() should not return nil value") - }) - } -} - -func TestClient_GetKey(t *testing.T) { - tests := []struct { - name string - client *Client - }{ - { - name: "TestGetKey", - client: defaultClient, - }, - { - name: "TestGetKeyWithCustomClient", - client: customClient, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotResp, err := tt.client.GetKeys(nil) - require.NoError(t, err) - - gotKey, err := tt.client.GetKey(gotResp.Results[0].Key) - require.NoError(t, err) - require.NotNil(t, gotKey.ExpiresAt) - require.NotNil(t, gotKey.CreatedAt) - require.NotNil(t, gotKey.UpdatedAt) - }) - } -} - -func TestClient_GetKeys(t *testing.T) { - type args struct { - client *Client - request *KeysQuery - } - tests := []struct { - name string - args args - }{ - { - name: "TestBasicGetKeys", - args: args{ - client: defaultClient, - request: nil, - }, - }, - { - name: "TestGetKeysWithCustomClient", - args: args{ - client: customClient, - request: nil, - }, - }, - { - name: "TestGetKeysWithEmptyParam", - args: args{ - client: defaultClient, - request: &KeysQuery{}, - }, - }, - { - name: "TestGetKeysWithLimit", - args: args{ - client: defaultClient, - request: &KeysQuery{ - Limit: 1, - }, - }, - }, - { - name: "TestGetKeysWithOffset", - args: args{ - client: defaultClient, - request: &KeysQuery{ - Limit: 2, - Offset: 1, - }, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotResp, err := tt.args.client.GetKeys(tt.args.request) - - require.NoError(t, err) - require.NotNil(t, gotResp, "GetKeys() should not return nil value") - switch { - case tt.args.request != nil && tt.args.request.Limit != 0 && tt.args.request.Offset == 0: - require.Equal(t, tt.args.request.Limit, int64(len(gotResp.Results))) - case tt.args.request != nil && tt.args.request.Limit == 2 && tt.args.request.Offset == 1: - require.GreaterOrEqual(t, len(gotResp.Results), int(tt.args.request.Limit-tt.args.request.Offset)) - default: - require.GreaterOrEqual(t, len(gotResp.Results), 2) - } - }) - } -} - -func TestClient_CreateKey(t *testing.T) { - tests := []struct { - name string - client *Client - key Key - }{ - { - name: "TestCreateBasicKey", - client: defaultClient, - key: Key{ - Actions: []string{"*"}, - Indexes: []string{"*"}, - }, - }, - { - name: "TestCreateKeyWithCustomClient", - client: customClient, - key: Key{ - Actions: []string{"*"}, - Indexes: []string{"*"}, - }, - }, - { - name: "TestCreateKeyWithExpirationAt", - client: defaultClient, - key: Key{ - Actions: []string{"*"}, - Indexes: []string{"*"}, - ExpiresAt: time.Now().Add(time.Hour * 10), - }, - }, - { - name: "TestCreateKeyWithDescription", - client: defaultClient, - key: Key{ - Name: "TestCreateKeyWithDescription", - Description: "TestCreateKeyWithDescription", - Actions: []string{"*"}, - Indexes: []string{"*"}, - }, - }, - { - name: "TestCreateKeyWithActions", - client: defaultClient, - key: Key{ - Name: "TestCreateKeyWithActions", - Description: "TestCreateKeyWithActions", - Actions: []string{"documents.add", "documents.delete"}, - Indexes: []string{"*"}, - }, - }, - { - name: "TestCreateKeyWithIndexes", - client: defaultClient, - key: Key{ - Name: "TestCreateKeyWithIndexes", - Description: "TestCreateKeyWithIndexes", - Actions: []string{"*"}, - Indexes: []string{"movies", "games"}, - }, - }, - { - name: "TestCreateKeyWithWildcardedAction", - client: defaultClient, - key: Key{ - Name: "TestCreateKeyWithWildcardedAction", - Description: "TestCreateKeyWithWildcardedAction", - Actions: []string{"documents.*"}, - Indexes: []string{"movies", "games"}, - }, - }, - { - name: "TestCreateKeyWithUID", - client: defaultClient, - key: Key{ - Name: "TestCreateKeyWithUID", - UID: "9aec34f4-e44c-4917-86c2-9c9403abb3b6", - Actions: []string{"*"}, - Indexes: []string{"*"}, - }, - }, - { - name: "TestCreateKeyWithAllOptions", - client: defaultClient, - key: Key{ - Name: "TestCreateKeyWithAllOptions", - Description: "TestCreateKeyWithAllOptions", - UID: "9aec34f4-e44c-4917-86c2-9c9403abb3b6", - Actions: []string{"documents.add", "documents.delete"}, - Indexes: []string{"movies", "games"}, - ExpiresAt: time.Now().Add(time.Hour * 10), - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - const Format = "2006-01-02T15:04:05" - c := tt.client - t.Cleanup(cleanup(c)) - - gotResp, err := c.CreateKey(&tt.key) - require.NoError(t, err) - - gotKey, err := c.GetKey(gotResp.Key) - require.NoError(t, err) - require.Equal(t, tt.key.Name, gotKey.Name) - require.Equal(t, tt.key.Description, gotKey.Description) - if tt.key.UID != "" { - require.Equal(t, tt.key.UID, gotKey.UID) - } - require.Equal(t, tt.key.Actions, gotKey.Actions) - require.Equal(t, tt.key.Indexes, gotKey.Indexes) - if !tt.key.ExpiresAt.IsZero() { - require.Equal(t, tt.key.ExpiresAt.Format(Format), gotKey.ExpiresAt.Format(Format)) - } - }) - } -} - -func TestClient_UpdateKey(t *testing.T) { - tests := []struct { - name string - client *Client - keyToCreate Key - keyToUpdate Key - }{ - { - name: "TestUpdateKeyWithDescription", - client: defaultClient, - keyToCreate: Key{ - Actions: []string{"*"}, - Indexes: []string{"*"}, - }, - keyToUpdate: Key{ - Description: "TestUpdateKeyWithDescription", - }, - }, - { - name: "TestUpdateKeyWithCustomClientWithDescription", - client: customClient, - keyToCreate: Key{ - Actions: []string{"*"}, - Indexes: []string{"TestUpdateKeyWithCustomClientWithDescription"}, - }, - keyToUpdate: Key{ - Description: "TestUpdateKeyWithCustomClientWithDescription", - }, - }, - { - name: "TestUpdateKeyWithName", - client: defaultClient, - keyToCreate: Key{ - Actions: []string{"*"}, - Indexes: []string{"TestUpdateKeyWithName"}, - }, - keyToUpdate: Key{ - Name: "TestUpdateKeyWithName", - }, - }, - { - name: "TestUpdateKeyWithNameAndAction", - client: defaultClient, - keyToCreate: Key{ - Actions: []string{"search"}, - Indexes: []string{"*"}, - }, - keyToUpdate: Key{ - Name: "TestUpdateKeyWithName", - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - const Format = "2006-01-02T15:04:05" - c := tt.client - t.Cleanup(cleanup(c)) - - gotResp, err := c.CreateKey(&tt.keyToCreate) - require.NoError(t, err) - - if tt.keyToCreate.Description != "" { - require.Equal(t, tt.keyToCreate.Description, gotResp.Description) - } - if len(tt.keyToCreate.Actions) != 0 { - require.Equal(t, tt.keyToCreate.Actions, gotResp.Actions) - } - if len(tt.keyToCreate.Indexes) != 0 { - require.Equal(t, tt.keyToCreate.Indexes, gotResp.Indexes) - } - if !tt.keyToCreate.ExpiresAt.IsZero() { - require.Equal(t, tt.keyToCreate.ExpiresAt.Format(Format), gotResp.ExpiresAt.Format(Format)) - } - - gotKey, err := c.UpdateKey(gotResp.Key, &tt.keyToUpdate) - require.NoError(t, err) - - if tt.keyToUpdate.Description != "" { - require.Equal(t, tt.keyToUpdate.Description, gotKey.Description) - } - if len(tt.keyToUpdate.Actions) != 0 { - require.Equal(t, tt.keyToUpdate.Actions, gotKey.Actions) - } - if len(tt.keyToUpdate.Indexes) != 0 { - require.Equal(t, tt.keyToUpdate.Indexes, gotKey.Indexes) - } - if tt.keyToUpdate.Description != "" { - require.Equal(t, tt.keyToUpdate.Name, gotKey.Name) - } - }) - } -} - -func TestClient_DeleteKey(t *testing.T) { - tests := []struct { - name string - client *Client - key Key - }{ - { - name: "TestDeleteBasicKey", - client: defaultClient, - key: Key{ - Actions: []string{"*"}, - Indexes: []string{"*"}, - }, - }, - { - name: "TestDeleteKeyWithCustomClient", - client: customClient, - key: Key{ - Actions: []string{"*"}, - Indexes: []string{"*"}, - }, - }, - { - name: "TestDeleteKeyWithExpirationAt", - client: defaultClient, - key: Key{ - Actions: []string{"*"}, - Indexes: []string{"*"}, - ExpiresAt: time.Now().Add(time.Hour * 10), - }, - }, - { - name: "TestDeleteKeyWithDescription", - client: defaultClient, - key: Key{ - Description: "TestDeleteKeyWithDescription", - Actions: []string{"*"}, - Indexes: []string{"*"}, - }, - }, - { - name: "TestDeleteKeyWithActions", - client: defaultClient, - key: Key{ - Description: "TestDeleteKeyWithActions", - Actions: []string{"documents.add", "documents.delete"}, - Indexes: []string{"*"}, - }, - }, - { - name: "TestDeleteKeyWithIndexes", - client: defaultClient, - key: Key{ - Description: "TestDeleteKeyWithIndexes", - Actions: []string{"*"}, - Indexes: []string{"movies", "games"}, - }, - }, - { - name: "TestDeleteKeyWithAllOptions", - client: defaultClient, - key: Key{ - Description: "TestDeleteKeyWithAllOptions", - Actions: []string{"documents.add", "documents.delete"}, - Indexes: []string{"movies", "games"}, - ExpiresAt: time.Now().Add(time.Hour * 10), - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.client - - gotKey, err := c.CreateKey(&tt.key) - require.NoError(t, err) - - gotResp, err := c.DeleteKey(gotKey.Key) - require.NoError(t, err) - require.True(t, gotResp) - - gotResp, err = c.DeleteKey(gotKey.Key) - require.Error(t, err) - require.False(t, gotResp) - }) - } -} - -func TestClient_Health(t *testing.T) { - tests := []struct { - name string - client *Client - wantResp *Health - wantErr bool - }{ - { - name: "TestHealth", - client: defaultClient, - wantResp: &Health{ - Status: "available", - }, - wantErr: false, - }, - { - name: "TestHealthWithCustomClient", - client: customClient, - wantResp: &Health{ - Status: "available", - }, - wantErr: false, - }, - { - name: "TestHealthWithBadUrl", - client: &Client{ - config: ClientConfig{ - Host: "http://wrongurl:1234", - APIKey: masterKey, - }, - httpClient: &fasthttp.Client{ - Name: "meilisearch-client", - }, - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotResp, err := tt.client.Health() - if tt.wantErr { - require.Error(t, err) - } else { - require.NoError(t, err) - require.Equal(t, tt.wantResp, gotResp, "Health() got response %v, want %v", gotResp, tt.wantResp) - } - }) - } -} - -func TestClient_IsHealthy(t *testing.T) { - tests := []struct { - name string - client *Client - want bool - }{ - { - name: "TestIsHealthy", - client: defaultClient, - want: true, - }, - { - name: "TestIsHealthyWithCustomClient", - client: customClient, - want: true, - }, - { - name: "TestIsHealthyWIthBadUrl", - client: &Client{ - config: ClientConfig{ - Host: "http://wrongurl:1234", - APIKey: masterKey, - }, - httpClient: &fasthttp.Client{ - Name: "meilisearch-client", - }, - }, - want: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got := tt.client.IsHealthy() - require.Equal(t, tt.want, got, "IsHealthy() got response %v, want %v", got, tt.want) - }) - } +// Mock structures for testing +type mockResponse struct { + Message string `json:"message"` } -func TestClient_CreateDump(t *testing.T) { - tests := []struct { - name string - client *Client - wantResp *Task - }{ - { - name: "TestCreateDump", - client: defaultClient, - wantResp: &Task{ - Status: "enqueued", - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.client - - task, err := c.CreateDump() - require.NoError(t, err) - if assert.NotNil(t, task, "CreateDump() should not return nil value") { - require.Equal(t, tt.wantResp.Status, task.Status, "CreateDump() got response status %v, want: %v", task.Status, tt.wantResp.Status) - } - - taskInfo, err := c.WaitForTask(task.TaskUID) +func TestExecuteRequest(t *testing.T) { + // Create a mock server + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet && r.URL.Path == "/test-get" { + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{"message":"get successful"}`)) + } else if r.Method == http.MethodPost && r.URL.Path == "/test-post" { + w.WriteHeader(http.StatusCreated) + _, _ = w.Write([]byte(`{"message":"post successful"}`)) + } else { + w.WriteHeader(http.StatusNotFound) + } + })) + defer ts.Close() - require.NoError(t, err) - require.NotNil(t, taskInfo) - require.NotNil(t, taskInfo.Details) - require.Equal(t, TaskStatusSucceeded, taskInfo.Status) - require.NotEmpty(t, taskInfo.Details.DumpUid) - }) - } -} + client := newClient(&http.Client{}, ts.URL, "testApiKey") -func TestClient_GetTask(t *testing.T) { - type args struct { - UID string - client *Client - taskUID int64 - document []docTest - } tests := []struct { - name string - args args + name string + internalReq *internalRequest + expectedResp interface{} + expectedErr error }{ { - name: "TestBasicGetTask", - args: args{ - UID: "TestBasicGetTask", - client: defaultClient, - taskUID: 0, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, + name: "Successful GET request", + internalReq: &internalRequest{ + endpoint: "/test-get", + method: http.MethodGet, + withResponse: &mockResponse{}, + acceptedStatusCodes: []int{http.StatusOK}, }, + expectedResp: &mockResponse{Message: "get successful"}, + expectedErr: nil, }, { - name: "TestGetTaskWithCustomClient", - args: args{ - UID: "TestGetTaskWithCustomClient", - client: customClient, - taskUID: 1, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, + name: "Successful POST request", + internalReq: &internalRequest{ + endpoint: "/test-post", + method: http.MethodPost, + withRequest: map[string]string{"key": "value"}, + contentType: contentTypeJSON, + withResponse: &mockResponse{}, + acceptedStatusCodes: []int{http.StatusCreated}, }, + expectedResp: &mockResponse{Message: "post successful"}, + expectedErr: nil, }, { - name: "TestGetTask", - args: args{ - UID: "TestGetTask", - client: defaultClient, - taskUID: 2, - document: []docTest{ - {ID: "456", Name: "Le Petit Prince"}, - {ID: "1", Name: "Alice In Wonderland"}, - }, + name: "404 Not Found", + internalReq: &internalRequest{ + endpoint: "/not-found", + method: http.MethodGet, + withResponse: &mockResponse{}, + acceptedStatusCodes: []int{http.StatusOK}, }, + expectedResp: nil, + expectedErr: &Error{StatusCode: http.StatusNotFound}, }, } - t.Cleanup(cleanup(defaultClient)) - for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - i := c.Index(tt.args.UID) - t.Cleanup(cleanup(c)) - - task, err := i.AddDocuments(tt.args.document) - require.NoError(t, err) - - _, err = c.WaitForTask(task.TaskUID) - require.NoError(t, err) - - gotResp, err := c.GetTask(task.TaskUID) - require.NoError(t, err) - require.NotNil(t, gotResp) - require.NotNil(t, gotResp.Details) - require.GreaterOrEqual(t, gotResp.UID, tt.args.taskUID) - require.Equal(t, tt.args.UID, gotResp.IndexUID) - require.Equal(t, TaskStatusSucceeded, gotResp.Status) - require.Equal(t, int64(len(tt.args.document)), gotResp.Details.ReceivedDocuments) - require.Equal(t, int64(len(tt.args.document)), gotResp.Details.IndexedDocuments) - - // Make sure that timestamps are also retrieved - require.NotZero(t, gotResp.EnqueuedAt) - require.NotZero(t, gotResp.StartedAt) - require.NotZero(t, gotResp.FinishedAt) - }) - } -} - -func TestClient_GetTasks(t *testing.T) { - type args struct { - UID string - client *Client - document []docTest - query *TasksQuery - } - tests := []struct { - name string - args args - }{ - { - name: "TestBasicGetTasks", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: nil, - }, - }, - { - name: "TestGetTasksWithCustomClient", - args: args{ - UID: "indexUID", - client: customClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: nil, - }, - }, - { - name: "TestGetTasksWithLimit", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - }, - }, - }, - { - name: "TestGetTasksWithLimit", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - }, - }, - }, - { - name: "TestGetTasksWithFrom", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - From: 0, - }, - }, - }, - { - name: "TestGetTasksWithParameters", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - From: 0, - IndexUIDS: []string{"indexUID"}, - }, - }, - }, - { - name: "TestGetTasksWithUidFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - UIDS: []int64{1}, - }, - }, - }, - { - name: "TestGetTasksWithDateFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - BeforeEnqueuedAt: time.Now(), - }, - }, - }, - { - name: "TestGetTasksWithCanceledByFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - CanceledBy: []int64{1}, - }, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - i := c.Index(tt.args.UID) - t.Cleanup(cleanup(c)) - - task, err := i.AddDocuments(tt.args.document) - require.NoError(t, err) - - _, err = c.WaitForTask(task.TaskUID) - require.NoError(t, err) - - gotResp, err := i.GetTasks(tt.args.query) - require.NoError(t, err) - require.NotNil(t, (*gotResp).Results[0].Status) - require.NotZero(t, (*gotResp).Results[0].UID) - require.NotNil(t, (*gotResp).Results[0].Type) - if tt.args.query != nil { - if tt.args.query.Limit != 0 { - require.Equal(t, tt.args.query.Limit, (*gotResp).Limit) - } else { - require.Equal(t, int64(20), (*gotResp).Limit) - } - if tt.args.query.From != 0 { - require.Equal(t, tt.args.query.From, (*gotResp).From) - } - } - }) - } -} - -func TestClient_GetTasksUsingClient(t *testing.T) { - type args struct { - UID string - client *Client - document []docTest - query *TasksQuery - expectedResults int - } - - tests := []struct { - name string - args args - }{ - { - name: "TestBasicGetTasks", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: nil, - expectedResults: 1, - }, - }, - { - name: "TestGetTasksWithCustomClient", - args: args{ - UID: "indexUID", - client: customClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: nil, - expectedResults: 1, - }, - }, - { - name: "TestGetTasksWithLimit", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - }, - expectedResults: 1, - }, - }, - { - name: "TestGetTasksWithLimit", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - }, - expectedResults: 1, - }, - }, - { - name: "TestGetTasksWithFrom", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - From: 0, - }, - expectedResults: 1, - }, - }, - { - name: "TestGetTasksWithFrom_1", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - From: 1, - }, - expectedResults: 0, - }, - }, - { - name: "TestGetTasksWithParameters", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - From: 0, - IndexUIDS: []string{"indexUID"}, - }, - expectedResults: 1, - }, - }, - { - name: "TestGetTasksWithDateFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - BeforeEnqueuedAt: time.Now(), - }, - expectedResults: 1, - }, - }, - - { - name: "TestGetTasksWithBeforeStartedAt", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - BeforeStartedAt: time.Now(), - }, - expectedResults: 1, - }, - }, - { - name: "TestGetTasksWithAfterStartedAt", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - AfterStartedAt: time.Now().Add(-time.Hour), - }, - expectedResults: 0, - }, - }, - { - name: "TestGetTasksWithBeforeFinishedAt", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - BeforeFinishedAt: time.Now().Add(time.Hour), - }, - expectedResults: 1, - }, - }, - { - name: "TestGetTasksWithAfterFinishedAt", - args: args{ - UID: "indexUID", - client: defaultClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: &TasksQuery{ - Limit: 1, - AfterFinishedAt: time.Now().Add(-time.Hour), - }, - expectedResults: 0, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - i := c.Index(tt.args.UID) - - t.Cleanup(cleanup(c)) - - task, err := i.AddDocuments(tt.args.document) - require.NoError(t, err) - - _, err = c.WaitForTask(task.TaskUID) - require.NoError(t, err) - - gotResp, err := c.GetTasks(tt.args.query) - require.NoError(t, err) - require.NotNil(t, gotResp) - // require.Equal(t, tt.args.expectedResults, len((*gotResp).Results)) - - if tt.args.expectedResults > 0 { - require.NotNil(t, (*gotResp).Results[0].Status) - require.NotZero(t, (*gotResp).Results[0].UID) - require.NotNil(t, (*gotResp).Results[0].Type) - } - if tt.args.query != nil { - if tt.args.query.Limit != 0 { - require.Equal(t, tt.args.query.Limit, (*gotResp).Limit) - } else { - require.Equal(t, int64(20), (*gotResp).Limit) - } - if tt.args.query.From != 0 && tt.args.expectedResults > 0 { - require.Equal(t, tt.args.query.From, (*gotResp).From) - } - } - }) - } -} - -func TestClient_GetTasksUsingClientAllFailures(t *testing.T) { - type args struct { - UID string - client *Client - document []docTest - query *TasksQuery - expectedResults int - } - - tests := []struct { - name string - args args - }{ - { - name: "TestBasicGetTasks", - args: args{ - UID: "indexUID", - client: brokenClient, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - }, - query: nil, - expectedResults: 1, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - t.Cleanup(cleanup(c)) - i := c.Index("NOT_EXISTS") - - _, err := c.DeleteIndex("NOT_EXISTS") - require.Error(t, err) - - _, err = c.WaitForTask(math.MaxInt32) - require.Error(t, err) - - _, err = i.AddDocuments(tt.args.document) - require.Error(t, err) - - _, err = c.GetTasks(tt.args.query) - require.Error(t, err) - - _, err = c.GetStats() - require.Error(t, err) - - _, err = c.CreateKey(&Key{ - Name: "Wrong", - }) - require.Error(t, err) - - _, err = c.GetKey("Wrong") - require.Error(t, err) - - _, err = c.UpdateKey("Wrong", &Key{ - Name: "Wrong", - }) - require.Error(t, err) - - _, err = c.CreateDump() - require.Error(t, err) - - _, err = c.GetTask(1) - require.Error(t, err) - - _, err = c.DeleteTasks(nil) - require.Error(t, err) - - _, err = c.SwapIndexes([]SwapIndexesParams{ - {Indexes: []string{"Wrong", "Worse"}}, - }) - require.Error(t, err) - }) - } -} - -func TestClient_CancelTasks(t *testing.T) { - type args struct { - UID string - client *Client - query *CancelTasksQuery - } - tests := []struct { - name string - args args - want string - }{ - { - name: "TestCancelTasksWithNoFilters", - args: args{ - UID: "indexUID", - client: defaultClient, - query: nil, - }, - want: "", - }, - { - name: "TestCancelTasksWithStatutes", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &CancelTasksQuery{ - Statuses: []TaskStatus{TaskStatusSucceeded}, - }, - }, - want: "?statuses=succeeded", - }, - { - name: "TestCancelTasksWithIndexUIDFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &CancelTasksQuery{ - IndexUIDS: []string{"0"}, - }, - }, - want: "?indexUids=0", - }, - { - name: "TestCancelTasksWithMultipleIndexUIDsFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &CancelTasksQuery{ - IndexUIDS: []string{"0", "1"}, - }, - }, - want: "?indexUids=0%2C1", - }, - { - name: "TestCancelTasksWithUidFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &CancelTasksQuery{ - UIDS: []int64{0}, - }, - }, - want: "?uids=0", - }, - { - name: "TestCancelTasksWithMultipleUidsFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &CancelTasksQuery{ - UIDS: []int64{0, 1}, - }, - }, - want: "?uids=0%2C1", - }, - { - name: "TestCancelTasksWithDateFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &CancelTasksQuery{ - BeforeEnqueuedAt: time.Now(), - }, - }, - want: strings.NewReplacer(":", "%3A").Replace("?beforeEnqueuedAt=" + time.Now().Format("2006-01-02T15:04:05Z")), - }, - { - name: "TestCancelTasksWithParameters", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &CancelTasksQuery{ - Statuses: []TaskStatus{TaskStatusEnqueued}, - Types: []TaskType{TaskTypeDocumentAdditionOrUpdate}, - IndexUIDS: []string{"indexUID"}, - UIDS: []int64{1}, - AfterEnqueuedAt: time.Now(), - }, - }, - want: "?afterEnqueuedAt=" + strings.NewReplacer(":", "%3A").Replace(time.Now().Format("2006-01-02T15:04:05Z")) + "&indexUids=indexUID&statuses=enqueued&types=documentAdditionOrUpdate&uids=1", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - t.Cleanup(cleanup(c)) - - gotResp, err := c.CancelTasks(tt.args.query) - if tt.args.query == nil { - require.Error(t, err) - require.Equal(t, "missing_task_filters", - err.(*Error).MeilisearchApiError.Code) - } else { - require.NoError(t, err) - - _, err = c.WaitForTask(gotResp.TaskUID) - require.NoError(t, err) - - gotTask, err := c.GetTask(gotResp.TaskUID) - require.NoError(t, err) - - require.NotNil(t, gotResp.Status) - require.NotNil(t, gotResp.Type) - require.NotNil(t, gotResp.TaskUID) - require.NotNil(t, gotResp.EnqueuedAt) - require.Equal(t, "", gotResp.IndexUID) - require.Equal(t, TaskTypeTaskCancelation, gotResp.Type) - require.Equal(t, tt.want, gotTask.Details.OriginalFilter) - } - }) - } -} - -func TestClient_DeleteTasks(t *testing.T) { - type args struct { - UID string - client *Client - query *DeleteTasksQuery - } - tests := []struct { - name string - args args - want string - }{ - { - name: "TestBasicDeleteTasks", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &DeleteTasksQuery{ - Statuses: []TaskStatus{TaskStatusEnqueued}, - }, - }, - want: "?statuses=enqueued", - }, - { - name: "TestDeleteTasksWithUidFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &DeleteTasksQuery{ - UIDS: []int64{1}, - }, - }, - want: "?uids=1", - }, - { - name: "TestDeleteTasksWithMultipleUidsFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &DeleteTasksQuery{ - UIDS: []int64{0, 1}, - }, - }, - want: "?uids=0%2C1", - }, - { - name: "TestDeleteTasksWithIndexUIDFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &DeleteTasksQuery{ - IndexUIDS: []string{"0"}, - }, - }, - want: "?indexUids=0", - }, - { - name: "TestDeleteTasksWithMultipleIndexUIDsFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &DeleteTasksQuery{ - IndexUIDS: []string{"0", "1"}, - }, - }, - want: "?indexUids=0%2C1", - }, - { - name: "TestDeleteTasksWithDateFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &DeleteTasksQuery{ - BeforeEnqueuedAt: time.Now(), - }, - }, - want: strings.NewReplacer(":", "%3A").Replace("?beforeEnqueuedAt=" + time.Now().Format("2006-01-02T15:04:05Z")), - }, - { - name: "TestDeleteTasksWithCanceledByFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &DeleteTasksQuery{ - CanceledBy: []int64{1}, - }, - }, - want: "?canceledBy=1", - }, - { - name: "TestDeleteTasksWithParameters", - args: args{ - UID: "indexUID", - client: defaultClient, - query: &DeleteTasksQuery{ - Statuses: []TaskStatus{TaskStatusEnqueued}, - IndexUIDS: []string{"indexUID"}, - UIDS: []int64{1}, - AfterEnqueuedAt: time.Now(), - }, - }, - want: "?afterEnqueuedAt=" + strings.NewReplacer(":", "%3A").Replace(time.Now().Format("2006-01-02T15:04:05Z")) + "&indexUids=indexUID&statuses=enqueued&uids=1", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - t.Cleanup(cleanup(c)) - - gotResp, err := c.DeleteTasks(tt.args.query) - require.NoError(t, err) - - _, err = c.WaitForTask(gotResp.TaskUID) - require.NoError(t, err) - - gotTask, err := c.GetTask(gotResp.TaskUID) - require.NoError(t, err) - - require.NotNil(t, gotResp.Status) - require.NotNil(t, gotResp.Type) - require.NotNil(t, gotResp.TaskUID) - require.NotNil(t, gotResp.EnqueuedAt) - require.Equal(t, "", gotResp.IndexUID) - require.Equal(t, TaskTypeTaskDeletion, gotResp.Type) - require.NotNil(t, gotTask.Details.OriginalFilter) - require.Equal(t, tt.want, gotTask.Details.OriginalFilter) - }) - } -} - -func TestClient_SwapIndexes(t *testing.T) { - type args struct { - UID string - client *Client - query []SwapIndexesParams - } - tests := []struct { - name string - args args - }{ - { - name: "TestBasicSwapIndexes", - args: args{ - UID: "indexUID", - client: defaultClient, - query: []SwapIndexesParams{ - {Indexes: []string{"IndexA", "IndexB"}}, - }, - }, - }, - { - name: "TestSwapIndexesWithMultipleIndexes", - args: args{ - UID: "indexUID", - client: defaultClient, - query: []SwapIndexesParams{ - {Indexes: []string{"IndexA", "IndexB"}}, - {Indexes: []string{"Index1", "Index2"}}, - }, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - t.Cleanup(cleanup(c)) - - gotResp, err := c.SwapIndexes(tt.args.query) - require.NoError(t, err) - - _, err = c.WaitForTask(gotResp.TaskUID) - require.NoError(t, err) - - gotTask, err := c.GetTask(gotResp.TaskUID) - require.NoError(t, err) - - require.NotNil(t, gotResp.Status) - require.NotNil(t, gotResp.Type) - require.NotNil(t, gotResp.TaskUID) - require.NotNil(t, gotResp.EnqueuedAt) - require.Equal(t, "", gotResp.IndexUID) - require.Equal(t, TaskTypeIndexSwap, gotResp.Type) - require.Equal(t, tt.args.query, gotTask.Details.Swaps) - }) - } -} - -func TestClient_DefaultWaitForTask(t *testing.T) { - type args struct { - UID string - client *Client - taskUID *Task - document []docTest - } - tests := []struct { - name string - args args - want TaskStatus - }{ - { - name: "TestDefaultWaitForTask", - args: args{ - UID: "TestDefaultWaitForTask", - client: defaultClient, - taskUID: &Task{ - UID: 0, - }, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - {ID: "456", Name: "Le Petit Prince"}, - {ID: "1", Name: "Alice In Wonderland"}, - }, - }, - want: "succeeded", - }, - { - name: "TestDefaultWaitForTaskWithCustomClient", - args: args{ - UID: "TestDefaultWaitForTaskWithCustomClient", - client: customClient, - taskUID: &Task{ - UID: 0, - }, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - {ID: "456", Name: "Le Petit Prince"}, - {ID: "1", Name: "Alice In Wonderland"}, - }, - }, - want: "succeeded", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - t.Cleanup(cleanup(c)) - - task, err := c.Index(tt.args.UID).AddDocuments(tt.args.document) - require.NoError(t, err) - - gotTask, err := c.WaitForTask(task.TaskUID) - require.NoError(t, err) - require.Equal(t, tt.want, gotTask.Status) - }) - } -} - -func TestClient_WaitForTaskWithContext(t *testing.T) { - type args struct { - UID string - client *Client - interval time.Duration - timeout time.Duration - taskUID *Task - document []docTest - } - tests := []struct { - name string - args args - want TaskStatus - }{ - { - name: "TestWaitForTask50", - args: args{ - UID: "TestWaitForTask50", - client: defaultClient, - interval: time.Millisecond * 50, - timeout: time.Second * 5, - taskUID: &Task{ - UID: 0, - }, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - {ID: "456", Name: "Le Petit Prince"}, - {ID: "1", Name: "Alice In Wonderland"}, - }, - }, - want: "succeeded", - }, - { - name: "TestWaitForTask50WithCustomClient", - args: args{ - UID: "TestWaitForTask50WithCustomClient", - client: customClient, - interval: time.Millisecond * 50, - timeout: time.Second * 5, - taskUID: &Task{ - UID: 0, - }, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - {ID: "456", Name: "Le Petit Prince"}, - {ID: "1", Name: "Alice In Wonderland"}, - }, - }, - want: "succeeded", - }, - { - name: "TestWaitForTask10", - args: args{ - UID: "TestWaitForTask10", - client: defaultClient, - interval: time.Millisecond * 10, - timeout: time.Second * 5, - taskUID: &Task{ - UID: 1, - }, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - {ID: "456", Name: "Le Petit Prince"}, - {ID: "1", Name: "Alice In Wonderland"}, - }, - }, - want: "succeeded", - }, - { - name: "TestWaitForTaskWithTimeout", - args: args{ - UID: "TestWaitForTaskWithTimeout", - client: defaultClient, - interval: time.Millisecond * 50, - timeout: time.Millisecond * 10, - taskUID: &Task{ - UID: 1, - }, - document: []docTest{ - {ID: "123", Name: "Pride and Prejudice"}, - {ID: "456", Name: "Le Petit Prince"}, - {ID: "1", Name: "Alice In Wonderland"}, - }, - }, - want: "succeeded", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - t.Cleanup(cleanup(c)) - - task, err := c.Index(tt.args.UID).AddDocuments(tt.args.document) - require.NoError(t, err) - - ctx, cancelFunc := context.WithTimeout(context.Background(), tt.args.timeout) - defer cancelFunc() - - gotTask, err := c.WaitForTask(task.TaskUID, WaitParams{Context: ctx, Interval: tt.args.interval}) - if tt.args.timeout < tt.args.interval { - require.Error(t, err) - } else { - require.NoError(t, err) - require.Equal(t, tt.want, gotTask.Status) - } - }) - } -} - -func TestClient_ConnectionCloseByServer(t *testing.T) { - meili := NewClient(ClientConfig{Host: getenv("MEILISEARCH_URL", "http://localhost:7700")}) - - // Simulate 10 clients sending requests. - g := sync.WaitGroup{} - for i := 0; i < 10; i++ { - g.Add(1) - go func() { - defer g.Done() - - _, _ = meili.Index("foo").Search("bar", &SearchRequest{}) - time.Sleep(5 * time.Second) - _, err := meili.Index("foo").Search("bar", &SearchRequest{}) - if e, ok := err.(*Error); ok && e.ErrCode == MeilisearchCommunicationError { - require.NoErrorf(t, e, "unexpected error") - } - }() - } - g.Wait() -} - -func TestClient_GenerateTenantToken(t *testing.T) { - type args struct { - IndexUIDS string - client *Client - APIKeyUID string - searchRules map[string]interface{} - options *TenantTokenOptions - filter []string - } - tests := []struct { - name string - args args - wantErr bool - wantFilter bool - }{ - { - name: "TestDefaultGenerateTenantToken", - args: args{ - IndexUIDS: "TestDefaultGenerateTenantToken", - client: privateClient, - APIKeyUID: GetPrivateUIDKey(), - searchRules: map[string]interface{}{ - "*": map[string]string{}, - }, - options: nil, - filter: nil, - }, - wantErr: false, - wantFilter: false, - }, - { - name: "TestGenerateTenantTokenWithApiKey", - args: args{ - IndexUIDS: "TestGenerateTenantTokenWithApiKey", - client: defaultClient, - APIKeyUID: GetPrivateUIDKey(), - searchRules: map[string]interface{}{ - "*": map[string]string{}, - }, - options: &TenantTokenOptions{ - APIKey: GetPrivateKey(), - }, - filter: nil, - }, - wantErr: false, - wantFilter: false, - }, - { - name: "TestGenerateTenantTokenWithOnlyExpiresAt", - args: args{ - IndexUIDS: "TestGenerateTenantTokenWithOnlyExpiresAt", - client: privateClient, - APIKeyUID: GetPrivateUIDKey(), - searchRules: map[string]interface{}{ - "*": map[string]string{}, - }, - options: &TenantTokenOptions{ - ExpiresAt: time.Now().Add(time.Hour * 10), - }, - filter: nil, - }, - wantErr: false, - wantFilter: false, - }, - { - name: "TestGenerateTenantTokenWithApiKeyAndExpiresAt", - args: args{ - IndexUIDS: "TestGenerateTenantTokenWithApiKeyAndExpiresAt", - client: defaultClient, - APIKeyUID: GetPrivateUIDKey(), - searchRules: map[string]interface{}{ - "*": map[string]string{}, - }, - options: &TenantTokenOptions{ - APIKey: GetPrivateKey(), - ExpiresAt: time.Now().Add(time.Hour * 10), - }, - filter: nil, - }, - wantErr: false, - wantFilter: false, - }, - { - name: "TestGenerateTenantTokenWithFilters", - args: args{ - IndexUIDS: "indexUID", - client: privateClient, - APIKeyUID: GetPrivateUIDKey(), - searchRules: map[string]interface{}{ - "*": map[string]string{ - "filter": "book_id > 1000", - }, - }, - options: nil, - filter: []string{ - "book_id", - }, - }, - wantErr: false, - wantFilter: true, - }, - { - name: "TestGenerateTenantTokenWithFilterOnOneINdex", - args: args{ - IndexUIDS: "indexUID", - client: privateClient, - APIKeyUID: GetPrivateUIDKey(), - searchRules: map[string]interface{}{ - "indexUID": map[string]string{ - "filter": "year > 2000", - }, - }, - options: nil, - filter: []string{ - "year", - }, - }, - wantErr: false, - wantFilter: true, - }, - { - name: "TestGenerateTenantTokenWithoutSearchRules", - args: args{ - IndexUIDS: "TestGenerateTenantTokenWithoutSearchRules", - client: privateClient, - APIKeyUID: GetPrivateUIDKey(), - searchRules: nil, - options: nil, - filter: nil, - }, - wantErr: true, - wantFilter: false, - }, - { - name: "TestGenerateTenantTokenWithoutApiKey", - args: args{ - IndexUIDS: "TestGenerateTenantTokenWithoutApiKey", - client: NewClient(ClientConfig{ - Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), - APIKey: "", - }), - APIKeyUID: GetPrivateUIDKey(), - searchRules: map[string]interface{}{ - "*": map[string]string{}, - }, - options: nil, - filter: nil, - }, - wantErr: true, - wantFilter: false, - }, - { - name: "TestGenerateTenantTokenWithBadExpiresAt", - args: args{ - IndexUIDS: "TestGenerateTenantTokenWithBadExpiresAt", - client: defaultClient, - APIKeyUID: GetPrivateUIDKey(), - searchRules: map[string]interface{}{ - "*": map[string]string{}, - }, - options: &TenantTokenOptions{ - ExpiresAt: time.Now().Add(-time.Hour * 10), - }, - filter: nil, - }, - wantErr: true, - wantFilter: false, - }, - { - name: "TestGenerateTenantTokenWithBadAPIKeyUID", - args: args{ - IndexUIDS: "TestGenerateTenantTokenWithBadAPIKeyUID", - client: defaultClient, - APIKeyUID: GetPrivateUIDKey() + "1234", - searchRules: map[string]interface{}{ - "*": map[string]string{}, - }, - options: nil, - filter: nil, - }, - wantErr: true, - wantFilter: false, - }, - { - name: "TestGenerateTenantTokenWithEmptyAPIKeyUID", - args: args{ - IndexUIDS: "TestGenerateTenantTokenWithEmptyAPIKeyUID", - client: defaultClient, - APIKeyUID: "", - searchRules: map[string]interface{}{ - "*": map[string]string{}, - }, - options: nil, - filter: nil, - }, - wantErr: true, - wantFilter: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - t.Cleanup(cleanup(c)) - - token, err := c.GenerateTenantToken(tt.args.APIKeyUID, tt.args.searchRules, tt.args.options) - - if tt.wantErr { - require.Error(t, err) - } else { - require.NoError(t, err) - - if tt.wantFilter { - gotTask, err := c.Index(tt.args.IndexUIDS).UpdateFilterableAttributes(&tt.args.filter) - require.NoError(t, err, "UpdateFilterableAttributes() in TestGenerateTenantToken error should be nil") - testWaitForTask(t, c.Index(tt.args.IndexUIDS), gotTask) - } else { - _, err := SetUpEmptyIndex(&IndexConfig{Uid: tt.args.IndexUIDS}) - require.NoError(t, err, "CreateIndex() in TestGenerateTenantToken error should be nil") - } - - client := NewClient(ClientConfig{ - Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), - APIKey: token, - }) - - _, err = client.Index(tt.args.IndexUIDS).Search("", &SearchRequest{}) - - require.NoError(t, err) - } - }) - } -} - -func TestClient_MultiSearch(t *testing.T) { - type args struct { - client *Client - queries *MultiSearchRequest - UIDS []string - } - tests := []struct { - name string - args args - want *MultiSearchResponse - wantErr bool - }{ - { - name: "TestClientMultiSearchOneIndex", - args: args{ - client: defaultClient, - queries: &MultiSearchRequest{ - []*SearchRequest{ - { - IndexUID: "TestClientMultiSearchOneIndex", - Query: "wonder", - }, - }, - }, - UIDS: []string{"TestClientMultiSearchOneIndex"}, - }, - want: &MultiSearchResponse{ - Results: []SearchResponse{ - { - Hits: []interface{}{ - map[string]interface{}{ - "book_id": float64(1), - "title": "Alice In Wonderland", - }, - }, - EstimatedTotalHits: 1, - Offset: 0, - Limit: 20, - Query: "wonder", - IndexUID: "TestClientMultiSearchOneIndex", - }, - }, - }, - }, - { - name: "TestClientMultiSearchOnTwoIndexes", - args: args{ - client: defaultClient, - queries: &MultiSearchRequest{ - []*SearchRequest{ - { - IndexUID: "TestClientMultiSearchOnTwoIndexes1", - Query: "wonder", - }, - { - IndexUID: "TestClientMultiSearchOnTwoIndexes2", - Query: "prince", - }, - }, - }, - UIDS: []string{"TestClientMultiSearchOnTwoIndexes1", "TestClientMultiSearchOnTwoIndexes2"}, - }, - want: &MultiSearchResponse{ - Results: []SearchResponse{ - { - Hits: []interface{}{ - map[string]interface{}{ - "book_id": float64(1), - "title": "Alice In Wonderland", - }, - }, - EstimatedTotalHits: 1, - Offset: 0, - Limit: 20, - Query: "wonder", - IndexUID: "TestClientMultiSearchOnTwoIndexes1", - }, - { - Hits: []interface{}{ - map[string]interface{}{ - "book_id": float64(456), - "title": "Le Petit Prince", - }, - map[string]interface{}{ - "book_id": float64(4), - "title": "Harry Potter and the Half-Blood Prince", - }, - }, - EstimatedTotalHits: 2, - Offset: 0, - Limit: 20, - Query: "prince", - IndexUID: "TestClientMultiSearchOnTwoIndexes2", - }, - }, - }, - }, - { - name: "TestClientMultiSearchNoIndex", - args: args{ - client: defaultClient, - queries: &MultiSearchRequest{ - []*SearchRequest{ - { - Query: "", - }, - }, - }, - UIDS: []string{"TestClientMultiSearchNoIndex"}, - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - for _, UID := range tt.args.UIDS { - SetUpBasicIndex(UID) - } - c := tt.args.client - t.Cleanup(cleanup(c)) - - got, err := c.MultiSearch(tt.args.queries) - - if tt.wantErr { - require.Error(t, err) + err := client.executeRequest(context.Background(), tt.internalReq) + if tt.expectedErr != nil { + assert.Error(t, err) + var apiErr *Error + assert.ErrorAs(t, err, &apiErr) + assert.Equal(t, tt.expectedErr.(*Error).StatusCode, apiErr.StatusCode) } else { - require.NotNil(t, got) - for i := 0; i < len(tt.want.Results); i++ { - if !reflect.DeepEqual(got.Results[i].Hits, tt.want.Results[i].Hits) { - t.Errorf("Client.MultiSearch() = %v, want %v", got.Results[i].Hits, tt.want.Results[i].Hits) - } - require.Equal(t, tt.want.Results[i].EstimatedTotalHits, got.Results[i].EstimatedTotalHits) - require.Equal(t, tt.want.Results[i].Offset, got.Results[i].Offset) - require.Equal(t, tt.want.Results[i].Limit, got.Results[i].Limit) - require.Equal(t, tt.want.Results[i].Query, got.Results[i].Query) - require.Equal(t, tt.want.Results[i].IndexUID, got.Results[i].IndexUID) - } + assert.NoError(t, err) + assert.Equal(t, tt.expectedResp, tt.internalReq.withResponse) } }) } From 6859a7f173305df6ac214fa8a2c85e1d65a83931 Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 08:58:22 +0330 Subject: [PATCH 26/43] chore: add general errors --- error.go | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/error.go b/error.go index a4498660..4f2e7e06 100644 --- a/error.go +++ b/error.go @@ -2,6 +2,7 @@ package meilisearch import ( "encoding/json" + "errors" "fmt" "strings" ) @@ -16,9 +17,9 @@ const ( ErrCodeMarshalRequest ErrCode = iota + 1 // ErrCodeResponseUnmarshalBody impossible deserialize the response body ErrCodeResponseUnmarshalBody - // MeilisearchApiError send by the Meilisearch api + // MeilisearchApiError send by the meilisearch api MeilisearchApiError - // MeilisearchApiError send by the Meilisearch api + // MeilisearchApiErrorWithoutMessage MeilisearchApiError send by the meilisearch api MeilisearchApiErrorWithoutMessage // MeilisearchTimeoutError MeilisearchTimeoutError @@ -80,8 +81,8 @@ type Error struct { // RequestToString is the raw request into string ('empty response' if not present) ResponseToString string - // Error info from Meilisearch api - // Message is the raw request into string ('empty Meilisearch message' if not present) + // Error info from meilisearch api + // Message is the raw request into string ('empty meilisearch message' if not present) MeilisearchApiError meilisearchApiError // StatusCode of the request @@ -101,7 +102,7 @@ type Error struct { } // Error return a well human formatted message. -func (e Error) Error() string { +func (e *Error) Error() string { message := namedSprintf(e.rawMessage, map[string]interface{}{ "endpoint": e.Endpoint, "method": e.Method, @@ -146,9 +147,10 @@ func (e *Error) ErrorBody(body []byte) { } } -// Added a hint to the error message if it may come from a version incompatibility with Meilisearch +// VersionErrorHintMessage a hint to the error message if it may come from a version incompatibility with meilisearch func VersionErrorHintMessage(err error, req *internalRequest) error { - return fmt.Errorf("%w. Hint: It might not be working because you're not up to date with the Meilisearch version that %s call requires.", err, req.functionName) + return fmt.Errorf("%w. Hint: It might not be working because you're not up to date with the "+ + "meilisearch version that %s call requires", err, req.functionName) } func namedSprintf(format string, params map[string]interface{}) string { @@ -157,3 +159,12 @@ func namedSprintf(format string, params map[string]interface{}) string { } return format } + +// General errors +var ( + ErrInvalidRequestMethod = errors.New("request body is not expected for GET and HEAD requests") + ErrRequestBodyWithoutContentType = errors.New("request body without Content-Type is not allowed") + ErrNoSearchRequest = errors.New("no search request provided") + ErrNoFacetSearchRequest = errors.New("no search facet request provided") + ErrConnectingFailed = errors.New("meilisearch is not connected") +) From 9ed1483da32b25adcbee3b32c844c6f64f12e6fe Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 09:00:17 +0330 Subject: [PATCH 27/43] feat: add option pattern for make optional client --- options.go | 63 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 options.go diff --git a/options.go b/options.go new file mode 100644 index 00000000..c42e31b8 --- /dev/null +++ b/options.go @@ -0,0 +1,63 @@ +package meilisearch + +import ( + "crypto/tls" + "net" + "net/http" + "time" +) + +var ( + defaultMeiliOpt = &meiliOpt{ + client: &http.Client{ + Transport: baseTransport(), + }, + } +) + +type meiliOpt struct { + client *http.Client + timeout time.Duration + apiKey string +} + +type Option func(*meiliOpt) + +// WithCustomClient set custom http.Client +func WithCustomClient(client *http.Client) Option { + return func(opt *meiliOpt) { + opt.client = client + } +} + +// WithCustomClientWithTLS client support tls configuration +func WithCustomClientWithTLS(tlsConfig *tls.Config) Option { + return func(opt *meiliOpt) { + trans := baseTransport() + trans.TLSClientConfig = tlsConfig + opt.client = &http.Client{Transport: trans} + } +} + +// WithAPIKey is API key or master key. +// more: https://www.meilisearch.com/docs/reference/api/keys +func WithAPIKey(key string) Option { + return func(opt *meiliOpt) { + opt.apiKey = key + } +} + +func baseTransport() *http.Transport { + return &http.Transport{ + Proxy: http.ProxyFromEnvironment, + DialContext: (&net.Dialer{ + Timeout: 30 * time.Second, + KeepAlive: 30 * time.Second, + }).DialContext, + MaxIdleConns: 100, + MaxIdleConnsPerHost: 100, + IdleConnTimeout: 90 * time.Second, + TLSHandshakeTimeout: 10 * time.Second, + ExpectContinueTimeout: 1 * time.Second, + } +} From 96e9a18feb65379cf5bbe5115126a99f5e37669f Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 09:03:26 +0330 Subject: [PATCH 28/43] docs: update documentation of types --- types.go | 65 +- types_easyjson.go | 7014 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 7048 insertions(+), 31 deletions(-) create mode 100644 types_easyjson.go diff --git a/types.go b/types.go index 05273bb8..11d798d8 100644 --- a/types.go +++ b/types.go @@ -4,34 +4,37 @@ import ( "time" "github.com/golang-jwt/jwt/v4" - "github.com/valyala/fasthttp" ) -// -// Internal types to Meilisearch -// +const ( + DefaultLimit int64 = 20 + + contentTypeJSON string = "application/json" + contentTypeNDJSON string = "application/x-ndjson" + contentTypeCSV string = "text/csv" +) -// Client is a structure that give you the power for interacting with an high-level api with Meilisearch. -type Client struct { - config ClientConfig - httpClient *fasthttp.Client +type IndexConfig struct { + // Uid is the unique identifier of a given index. + Uid string + // PrimaryKey is optional + PrimaryKey string } -// Index is the type that represent an index in Meilisearch -type Index struct { +type IndexResult struct { UID string `json:"uid"` CreatedAt time.Time `json:"createdAt"` UpdatedAt time.Time `json:"updatedAt"` PrimaryKey string `json:"primaryKey,omitempty"` - client *Client + IndexManager } -// Return of multiple indexes is wrap in a IndexesResults +// IndexesResults return of multiple indexes is wrap in a IndexesResults type IndexesResults struct { - Results []Index `json:"results"` - Offset int64 `json:"offset"` - Limit int64 `json:"limit"` - Total int64 `json:"total"` + Results []*IndexResult `json:"results"` + Offset int64 `json:"offset"` + Limit int64 `json:"limit"` + Total int64 `json:"total"` } type IndexesQuery struct { @@ -39,7 +42,7 @@ type IndexesQuery struct { Offset int64 } -// Settings is the type that represents the settings in Meilisearch +// Settings is the type that represents the settings in meilisearch type Settings struct { RankingRules []string `json:"rankingRules,omitempty"` DistinctAttribute *string `json:"distinctAttribute,omitempty"` @@ -56,7 +59,7 @@ type Settings struct { Embedders map[string]Embedder `json:"embedders,omitempty"` } -// TypoTolerance is the type that represents the typo tolerance setting in Meilisearch +// TypoTolerance is the type that represents the typo tolerance setting in meilisearch type TypoTolerance struct { Enabled bool `json:"enabled"` MinWordSizeForTypos MinWordSizeForTypos `json:"minWordSizeForTypos,omitempty"` @@ -64,18 +67,18 @@ type TypoTolerance struct { DisableOnAttributes []string `json:"disableOnAttributes,omitempty"` } -// MinWordSizeForTypos is the type that represents the minWordSizeForTypos setting in the typo tolerance setting in Meilisearch +// MinWordSizeForTypos is the type that represents the minWordSizeForTypos setting in the typo tolerance setting in meilisearch type MinWordSizeForTypos struct { OneTypo int64 `json:"oneTypo,omitempty"` TwoTypos int64 `json:"twoTypos,omitempty"` } -// Pagination is the type that represents the pagination setting in Meilisearch +// Pagination is the type that represents the pagination setting in meilisearch type Pagination struct { MaxTotalHits int64 `json:"maxTotalHits"` } -// Faceting is the type that represents the faceting setting in Meilisearch +// Faceting is the type that represents the faceting setting in meilisearch type Faceting struct { MaxValuesPerFacet int64 `json:"maxValuesPerFacet"` } @@ -88,14 +91,14 @@ type Embedder struct { DocumentTemplate string `json:"documentTemplate,omitempty"` } -// Version is the type that represents the versions in Meilisearch +// Version is the type that represents the versions in meilisearch type Version struct { CommitSha string `json:"commitSha"` CommitDate string `json:"commitDate"` PkgVersion string `json:"pkgVersion"` } -// StatsIndex is the type that represent the stats of an index in Meilisearch +// StatsIndex is the type that represent the stats of an index in meilisearch type StatsIndex struct { NumberOfDocuments int64 `json:"numberOfDocuments"` IsIndexing bool `json:"isIndexing"` @@ -253,7 +256,7 @@ type Details struct { DumpUid string `json:"dumpUid,omitempty"` } -// Return of multiple tasks is wrap in a TaskResult +// TaskResult return of multiple tasks is wrap in a TaskResult type TaskResult struct { Results []Task `json:"results"` Limit int64 `json:"limit"` @@ -262,7 +265,7 @@ type TaskResult struct { Total int64 `json:"total"` } -// Keys allow the user to connect to the Meilisearch instance +// Key allow the user to connect to the meilisearch instance // // Documentation: https://www.meilisearch.com/docs/learn/security/master_api_keys#protecting-a-meilisearch-instance type Key struct { @@ -277,7 +280,7 @@ type Key struct { ExpiresAt time.Time `json:"expiresAt"` } -// This structure is used to send the exact ISO-8601 time format managed by Meilisearch +// KeyParsed this structure is used to send the exact ISO-8601 time format managed by meilisearch type KeyParsed struct { Name string `json:"name"` Description string `json:"description"` @@ -287,13 +290,13 @@ type KeyParsed struct { ExpiresAt *string `json:"expiresAt"` } -// This structure is used to update a Key +// KeyUpdate this structure is used to update a Key type KeyUpdate struct { Name string `json:"name,omitempty"` Description string `json:"description,omitempty"` } -// Return of multiple keys is wrap in a KeysResults +// KeysResults return of multiple keys is wrap in a KeysResults type KeysResults struct { Results []Key `json:"results"` Offset int64 `json:"offset"` @@ -306,7 +309,7 @@ type KeysQuery struct { Offset int64 } -// Information to create a tenant token +// TenantTokenOptions information to create a tenant token // // ExpiresAt is a time.Time when the key will expire. // Note that if an ExpiresAt value is included it should be in UTC time. @@ -316,7 +319,7 @@ type TenantTokenOptions struct { ExpiresAt time.Time } -// Custom Claims structure to create a Tenant Token +// TenantTokenClaims custom Claims structure to create a Tenant Token type TenantTokenClaims struct { APIKeyUID string `json:"apiKeyUid"` SearchRules interface{} `json:"searchRules"` @@ -466,7 +469,7 @@ type SwapIndexesParams struct { // RawType is an alias for raw byte[] type RawType []byte -// Health is the request body for set Meilisearch health +// Health is the request body for set meilisearch health type Health struct { Status string `json:"status"` } diff --git a/types_easyjson.go b/types_easyjson.go new file mode 100644 index 00000000..66f025e0 --- /dev/null +++ b/types_easyjson.go @@ -0,0 +1,7014 @@ +// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT. + +package meilisearch + +import ( + json "encoding/json" + _v4 "github.com/golang-jwt/jwt/v4" + easyjson "github.com/mailru/easyjson" + jlexer "github.com/mailru/easyjson/jlexer" + jwriter "github.com/mailru/easyjson/jwriter" +) + +// suppress unused package warning +var ( + _ *json.RawMessage + _ *jlexer.Lexer + _ *jwriter.Writer + _ easyjson.Marshaler +) + +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo(in *jlexer.Lexer, out *Version) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "commitSha": + out.CommitSha = string(in.String()) + case "commitDate": + out.CommitDate = string(in.String()) + case "pkgVersion": + out.PkgVersion = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo(out *jwriter.Writer, in Version) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"commitSha\":" + out.RawString(prefix[1:]) + out.String(string(in.CommitSha)) + } + { + const prefix string = ",\"commitDate\":" + out.RawString(prefix) + out.String(string(in.CommitDate)) + } + { + const prefix string = ",\"pkgVersion\":" + out.RawString(prefix) + out.String(string(in.PkgVersion)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v Version) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v Version) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *Version) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *Version) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo1(in *jlexer.Lexer, out *UpdateIndexRequest) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "primaryKey": + out.PrimaryKey = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo1(out *jwriter.Writer, in UpdateIndexRequest) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"primaryKey\":" + out.RawString(prefix[1:]) + out.String(string(in.PrimaryKey)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v UpdateIndexRequest) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo1(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v UpdateIndexRequest) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo1(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *UpdateIndexRequest) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo1(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *UpdateIndexRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo1(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo2(in *jlexer.Lexer, out *TypoTolerance) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "enabled": + out.Enabled = bool(in.Bool()) + case "minWordSizeForTypos": + (out.MinWordSizeForTypos).UnmarshalEasyJSON(in) + case "disableOnWords": + if in.IsNull() { + in.Skip() + out.DisableOnWords = nil + } else { + in.Delim('[') + if out.DisableOnWords == nil { + if !in.IsDelim(']') { + out.DisableOnWords = make([]string, 0, 4) + } else { + out.DisableOnWords = []string{} + } + } else { + out.DisableOnWords = (out.DisableOnWords)[:0] + } + for !in.IsDelim(']') { + var v1 string + v1 = string(in.String()) + out.DisableOnWords = append(out.DisableOnWords, v1) + in.WantComma() + } + in.Delim(']') + } + case "disableOnAttributes": + if in.IsNull() { + in.Skip() + out.DisableOnAttributes = nil + } else { + in.Delim('[') + if out.DisableOnAttributes == nil { + if !in.IsDelim(']') { + out.DisableOnAttributes = make([]string, 0, 4) + } else { + out.DisableOnAttributes = []string{} + } + } else { + out.DisableOnAttributes = (out.DisableOnAttributes)[:0] + } + for !in.IsDelim(']') { + var v2 string + v2 = string(in.String()) + out.DisableOnAttributes = append(out.DisableOnAttributes, v2) + in.WantComma() + } + in.Delim(']') + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo2(out *jwriter.Writer, in TypoTolerance) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"enabled\":" + out.RawString(prefix[1:]) + out.Bool(bool(in.Enabled)) + } + if true { + const prefix string = ",\"minWordSizeForTypos\":" + out.RawString(prefix) + (in.MinWordSizeForTypos).MarshalEasyJSON(out) + } + if len(in.DisableOnWords) != 0 { + const prefix string = ",\"disableOnWords\":" + out.RawString(prefix) + { + out.RawByte('[') + for v3, v4 := range in.DisableOnWords { + if v3 > 0 { + out.RawByte(',') + } + out.String(string(v4)) + } + out.RawByte(']') + } + } + if len(in.DisableOnAttributes) != 0 { + const prefix string = ",\"disableOnAttributes\":" + out.RawString(prefix) + { + out.RawByte('[') + for v5, v6 := range in.DisableOnAttributes { + if v5 > 0 { + out.RawByte(',') + } + out.String(string(v6)) + } + out.RawByte(']') + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v TypoTolerance) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo2(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v TypoTolerance) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo2(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *TypoTolerance) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo2(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *TypoTolerance) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo2(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo3(in *jlexer.Lexer, out *TenantTokenOptions) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "APIKey": + out.APIKey = string(in.String()) + case "ExpiresAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.ExpiresAt).UnmarshalJSON(data)) + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo3(out *jwriter.Writer, in TenantTokenOptions) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"APIKey\":" + out.RawString(prefix[1:]) + out.String(string(in.APIKey)) + } + { + const prefix string = ",\"ExpiresAt\":" + out.RawString(prefix) + out.Raw((in.ExpiresAt).MarshalJSON()) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v TenantTokenOptions) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo3(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v TenantTokenOptions) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo3(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *TenantTokenOptions) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo3(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *TenantTokenOptions) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo3(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo4(in *jlexer.Lexer, out *TenantTokenClaims) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "apiKeyUid": + out.APIKeyUID = string(in.String()) + case "searchRules": + if m, ok := out.SearchRules.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := out.SearchRules.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + out.SearchRules = in.Interface() + } + case "iss": + out.Issuer = string(in.String()) + case "sub": + out.Subject = string(in.String()) + case "aud": + if data := in.Raw(); in.Ok() { + in.AddError((out.Audience).UnmarshalJSON(data)) + } + case "exp": + if in.IsNull() { + in.Skip() + out.ExpiresAt = nil + } else { + if out.ExpiresAt == nil { + out.ExpiresAt = new(_v4.NumericDate) + } + if data := in.Raw(); in.Ok() { + in.AddError((*out.ExpiresAt).UnmarshalJSON(data)) + } + } + case "nbf": + if in.IsNull() { + in.Skip() + out.NotBefore = nil + } else { + if out.NotBefore == nil { + out.NotBefore = new(_v4.NumericDate) + } + if data := in.Raw(); in.Ok() { + in.AddError((*out.NotBefore).UnmarshalJSON(data)) + } + } + case "iat": + if in.IsNull() { + in.Skip() + out.IssuedAt = nil + } else { + if out.IssuedAt == nil { + out.IssuedAt = new(_v4.NumericDate) + } + if data := in.Raw(); in.Ok() { + in.AddError((*out.IssuedAt).UnmarshalJSON(data)) + } + } + case "jti": + out.ID = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo4(out *jwriter.Writer, in TenantTokenClaims) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"apiKeyUid\":" + out.RawString(prefix[1:]) + out.String(string(in.APIKeyUID)) + } + { + const prefix string = ",\"searchRules\":" + out.RawString(prefix) + if m, ok := in.SearchRules.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := in.SearchRules.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(in.SearchRules)) + } + } + if in.Issuer != "" { + const prefix string = ",\"iss\":" + out.RawString(prefix) + out.String(string(in.Issuer)) + } + if in.Subject != "" { + const prefix string = ",\"sub\":" + out.RawString(prefix) + out.String(string(in.Subject)) + } + if len(in.Audience) != 0 { + const prefix string = ",\"aud\":" + out.RawString(prefix) + out.Raw((in.Audience).MarshalJSON()) + } + if in.ExpiresAt != nil { + const prefix string = ",\"exp\":" + out.RawString(prefix) + out.Raw((*in.ExpiresAt).MarshalJSON()) + } + if in.NotBefore != nil { + const prefix string = ",\"nbf\":" + out.RawString(prefix) + out.Raw((*in.NotBefore).MarshalJSON()) + } + if in.IssuedAt != nil { + const prefix string = ",\"iat\":" + out.RawString(prefix) + out.Raw((*in.IssuedAt).MarshalJSON()) + } + if in.ID != "" { + const prefix string = ",\"jti\":" + out.RawString(prefix) + out.String(string(in.ID)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v TenantTokenClaims) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo4(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v TenantTokenClaims) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo4(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *TenantTokenClaims) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo4(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *TenantTokenClaims) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo4(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo5(in *jlexer.Lexer, out *TasksQuery) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "UIDS": + if in.IsNull() { + in.Skip() + out.UIDS = nil + } else { + in.Delim('[') + if out.UIDS == nil { + if !in.IsDelim(']') { + out.UIDS = make([]int64, 0, 8) + } else { + out.UIDS = []int64{} + } + } else { + out.UIDS = (out.UIDS)[:0] + } + for !in.IsDelim(']') { + var v7 int64 + v7 = int64(in.Int64()) + out.UIDS = append(out.UIDS, v7) + in.WantComma() + } + in.Delim(']') + } + case "Limit": + out.Limit = int64(in.Int64()) + case "From": + out.From = int64(in.Int64()) + case "IndexUIDS": + if in.IsNull() { + in.Skip() + out.IndexUIDS = nil + } else { + in.Delim('[') + if out.IndexUIDS == nil { + if !in.IsDelim(']') { + out.IndexUIDS = make([]string, 0, 4) + } else { + out.IndexUIDS = []string{} + } + } else { + out.IndexUIDS = (out.IndexUIDS)[:0] + } + for !in.IsDelim(']') { + var v8 string + v8 = string(in.String()) + out.IndexUIDS = append(out.IndexUIDS, v8) + in.WantComma() + } + in.Delim(']') + } + case "Statuses": + if in.IsNull() { + in.Skip() + out.Statuses = nil + } else { + in.Delim('[') + if out.Statuses == nil { + if !in.IsDelim(']') { + out.Statuses = make([]TaskStatus, 0, 4) + } else { + out.Statuses = []TaskStatus{} + } + } else { + out.Statuses = (out.Statuses)[:0] + } + for !in.IsDelim(']') { + var v9 TaskStatus + v9 = TaskStatus(in.String()) + out.Statuses = append(out.Statuses, v9) + in.WantComma() + } + in.Delim(']') + } + case "Types": + if in.IsNull() { + in.Skip() + out.Types = nil + } else { + in.Delim('[') + if out.Types == nil { + if !in.IsDelim(']') { + out.Types = make([]TaskType, 0, 4) + } else { + out.Types = []TaskType{} + } + } else { + out.Types = (out.Types)[:0] + } + for !in.IsDelim(']') { + var v10 TaskType + v10 = TaskType(in.String()) + out.Types = append(out.Types, v10) + in.WantComma() + } + in.Delim(']') + } + case "CanceledBy": + if in.IsNull() { + in.Skip() + out.CanceledBy = nil + } else { + in.Delim('[') + if out.CanceledBy == nil { + if !in.IsDelim(']') { + out.CanceledBy = make([]int64, 0, 8) + } else { + out.CanceledBy = []int64{} + } + } else { + out.CanceledBy = (out.CanceledBy)[:0] + } + for !in.IsDelim(']') { + var v11 int64 + v11 = int64(in.Int64()) + out.CanceledBy = append(out.CanceledBy, v11) + in.WantComma() + } + in.Delim(']') + } + case "BeforeEnqueuedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.BeforeEnqueuedAt).UnmarshalJSON(data)) + } + case "AfterEnqueuedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.AfterEnqueuedAt).UnmarshalJSON(data)) + } + case "BeforeStartedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.BeforeStartedAt).UnmarshalJSON(data)) + } + case "AfterStartedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.AfterStartedAt).UnmarshalJSON(data)) + } + case "BeforeFinishedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.BeforeFinishedAt).UnmarshalJSON(data)) + } + case "AfterFinishedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.AfterFinishedAt).UnmarshalJSON(data)) + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo5(out *jwriter.Writer, in TasksQuery) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"UIDS\":" + out.RawString(prefix[1:]) + if in.UIDS == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v12, v13 := range in.UIDS { + if v12 > 0 { + out.RawByte(',') + } + out.Int64(int64(v13)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"Limit\":" + out.RawString(prefix) + out.Int64(int64(in.Limit)) + } + { + const prefix string = ",\"From\":" + out.RawString(prefix) + out.Int64(int64(in.From)) + } + { + const prefix string = ",\"IndexUIDS\":" + out.RawString(prefix) + if in.IndexUIDS == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v14, v15 := range in.IndexUIDS { + if v14 > 0 { + out.RawByte(',') + } + out.String(string(v15)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"Statuses\":" + out.RawString(prefix) + if in.Statuses == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v16, v17 := range in.Statuses { + if v16 > 0 { + out.RawByte(',') + } + out.String(string(v17)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"Types\":" + out.RawString(prefix) + if in.Types == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v18, v19 := range in.Types { + if v18 > 0 { + out.RawByte(',') + } + out.String(string(v19)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"CanceledBy\":" + out.RawString(prefix) + if in.CanceledBy == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v20, v21 := range in.CanceledBy { + if v20 > 0 { + out.RawByte(',') + } + out.Int64(int64(v21)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"BeforeEnqueuedAt\":" + out.RawString(prefix) + out.Raw((in.BeforeEnqueuedAt).MarshalJSON()) + } + { + const prefix string = ",\"AfterEnqueuedAt\":" + out.RawString(prefix) + out.Raw((in.AfterEnqueuedAt).MarshalJSON()) + } + { + const prefix string = ",\"BeforeStartedAt\":" + out.RawString(prefix) + out.Raw((in.BeforeStartedAt).MarshalJSON()) + } + { + const prefix string = ",\"AfterStartedAt\":" + out.RawString(prefix) + out.Raw((in.AfterStartedAt).MarshalJSON()) + } + { + const prefix string = ",\"BeforeFinishedAt\":" + out.RawString(prefix) + out.Raw((in.BeforeFinishedAt).MarshalJSON()) + } + { + const prefix string = ",\"AfterFinishedAt\":" + out.RawString(prefix) + out.Raw((in.AfterFinishedAt).MarshalJSON()) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v TasksQuery) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo5(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v TasksQuery) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo5(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *TasksQuery) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo5(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *TasksQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo5(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo6(in *jlexer.Lexer, out *TaskResult) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "results": + if in.IsNull() { + in.Skip() + out.Results = nil + } else { + in.Delim('[') + if out.Results == nil { + if !in.IsDelim(']') { + out.Results = make([]Task, 0, 0) + } else { + out.Results = []Task{} + } + } else { + out.Results = (out.Results)[:0] + } + for !in.IsDelim(']') { + var v22 Task + (v22).UnmarshalEasyJSON(in) + out.Results = append(out.Results, v22) + in.WantComma() + } + in.Delim(']') + } + case "limit": + out.Limit = int64(in.Int64()) + case "from": + out.From = int64(in.Int64()) + case "next": + out.Next = int64(in.Int64()) + case "total": + out.Total = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo6(out *jwriter.Writer, in TaskResult) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"results\":" + out.RawString(prefix[1:]) + if in.Results == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v23, v24 := range in.Results { + if v23 > 0 { + out.RawByte(',') + } + (v24).MarshalEasyJSON(out) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"limit\":" + out.RawString(prefix) + out.Int64(int64(in.Limit)) + } + { + const prefix string = ",\"from\":" + out.RawString(prefix) + out.Int64(int64(in.From)) + } + { + const prefix string = ",\"next\":" + out.RawString(prefix) + out.Int64(int64(in.Next)) + } + { + const prefix string = ",\"total\":" + out.RawString(prefix) + out.Int64(int64(in.Total)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v TaskResult) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo6(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v TaskResult) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo6(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *TaskResult) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo6(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *TaskResult) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo6(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo7(in *jlexer.Lexer, out *TaskInfo) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "status": + out.Status = TaskStatus(in.String()) + case "taskUid": + out.TaskUID = int64(in.Int64()) + case "indexUid": + out.IndexUID = string(in.String()) + case "type": + out.Type = TaskType(in.String()) + case "enqueuedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.EnqueuedAt).UnmarshalJSON(data)) + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo7(out *jwriter.Writer, in TaskInfo) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"status\":" + out.RawString(prefix[1:]) + out.String(string(in.Status)) + } + { + const prefix string = ",\"taskUid\":" + out.RawString(prefix) + out.Int64(int64(in.TaskUID)) + } + { + const prefix string = ",\"indexUid\":" + out.RawString(prefix) + out.String(string(in.IndexUID)) + } + { + const prefix string = ",\"type\":" + out.RawString(prefix) + out.String(string(in.Type)) + } + { + const prefix string = ",\"enqueuedAt\":" + out.RawString(prefix) + out.Raw((in.EnqueuedAt).MarshalJSON()) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v TaskInfo) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo7(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v TaskInfo) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo7(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *TaskInfo) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo7(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *TaskInfo) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo7(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo8(in *jlexer.Lexer, out *Task) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "status": + out.Status = TaskStatus(in.String()) + case "uid": + out.UID = int64(in.Int64()) + case "taskUid": + out.TaskUID = int64(in.Int64()) + case "indexUid": + out.IndexUID = string(in.String()) + case "type": + out.Type = TaskType(in.String()) + case "error": + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo9(in, &out.Error) + case "duration": + out.Duration = string(in.String()) + case "enqueuedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.EnqueuedAt).UnmarshalJSON(data)) + } + case "startedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.StartedAt).UnmarshalJSON(data)) + } + case "finishedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.FinishedAt).UnmarshalJSON(data)) + } + case "details": + (out.Details).UnmarshalEasyJSON(in) + case "canceledBy": + out.CanceledBy = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo8(out *jwriter.Writer, in Task) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"status\":" + out.RawString(prefix[1:]) + out.String(string(in.Status)) + } + if in.UID != 0 { + const prefix string = ",\"uid\":" + out.RawString(prefix) + out.Int64(int64(in.UID)) + } + if in.TaskUID != 0 { + const prefix string = ",\"taskUid\":" + out.RawString(prefix) + out.Int64(int64(in.TaskUID)) + } + { + const prefix string = ",\"indexUid\":" + out.RawString(prefix) + out.String(string(in.IndexUID)) + } + { + const prefix string = ",\"type\":" + out.RawString(prefix) + out.String(string(in.Type)) + } + if true { + const prefix string = ",\"error\":" + out.RawString(prefix) + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo9(out, in.Error) + } + if in.Duration != "" { + const prefix string = ",\"duration\":" + out.RawString(prefix) + out.String(string(in.Duration)) + } + { + const prefix string = ",\"enqueuedAt\":" + out.RawString(prefix) + out.Raw((in.EnqueuedAt).MarshalJSON()) + } + if true { + const prefix string = ",\"startedAt\":" + out.RawString(prefix) + out.Raw((in.StartedAt).MarshalJSON()) + } + if true { + const prefix string = ",\"finishedAt\":" + out.RawString(prefix) + out.Raw((in.FinishedAt).MarshalJSON()) + } + if true { + const prefix string = ",\"details\":" + out.RawString(prefix) + (in.Details).MarshalEasyJSON(out) + } + if in.CanceledBy != 0 { + const prefix string = ",\"canceledBy\":" + out.RawString(prefix) + out.Int64(int64(in.CanceledBy)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v Task) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo8(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v Task) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo8(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *Task) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo8(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *Task) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo8(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo9(in *jlexer.Lexer, out *meilisearchApiError) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "message": + out.Message = string(in.String()) + case "code": + out.Code = string(in.String()) + case "type": + out.Type = string(in.String()) + case "link": + out.Link = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo9(out *jwriter.Writer, in meilisearchApiError) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"message\":" + out.RawString(prefix[1:]) + out.String(string(in.Message)) + } + { + const prefix string = ",\"code\":" + out.RawString(prefix) + out.String(string(in.Code)) + } + { + const prefix string = ",\"type\":" + out.RawString(prefix) + out.String(string(in.Type)) + } + { + const prefix string = ",\"link\":" + out.RawString(prefix) + out.String(string(in.Link)) + } + out.RawByte('}') +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo10(in *jlexer.Lexer, out *SwapIndexesParams) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "indexes": + if in.IsNull() { + in.Skip() + out.Indexes = nil + } else { + in.Delim('[') + if out.Indexes == nil { + if !in.IsDelim(']') { + out.Indexes = make([]string, 0, 4) + } else { + out.Indexes = []string{} + } + } else { + out.Indexes = (out.Indexes)[:0] + } + for !in.IsDelim(']') { + var v25 string + v25 = string(in.String()) + out.Indexes = append(out.Indexes, v25) + in.WantComma() + } + in.Delim(']') + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo10(out *jwriter.Writer, in SwapIndexesParams) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"indexes\":" + out.RawString(prefix[1:]) + if in.Indexes == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v26, v27 := range in.Indexes { + if v26 > 0 { + out.RawByte(',') + } + out.String(string(v27)) + } + out.RawByte(']') + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v SwapIndexesParams) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo10(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v SwapIndexesParams) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo10(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *SwapIndexesParams) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo10(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *SwapIndexesParams) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo10(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo11(in *jlexer.Lexer, out *StatsIndex) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "numberOfDocuments": + out.NumberOfDocuments = int64(in.Int64()) + case "isIndexing": + out.IsIndexing = bool(in.Bool()) + case "fieldDistribution": + if in.IsNull() { + in.Skip() + } else { + in.Delim('{') + out.FieldDistribution = make(map[string]int64) + for !in.IsDelim('}') { + key := string(in.String()) + in.WantColon() + var v28 int64 + v28 = int64(in.Int64()) + (out.FieldDistribution)[key] = v28 + in.WantComma() + } + in.Delim('}') + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo11(out *jwriter.Writer, in StatsIndex) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"numberOfDocuments\":" + out.RawString(prefix[1:]) + out.Int64(int64(in.NumberOfDocuments)) + } + { + const prefix string = ",\"isIndexing\":" + out.RawString(prefix) + out.Bool(bool(in.IsIndexing)) + } + { + const prefix string = ",\"fieldDistribution\":" + out.RawString(prefix) + if in.FieldDistribution == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { + out.RawString(`null`) + } else { + out.RawByte('{') + v29First := true + for v29Name, v29Value := range in.FieldDistribution { + if v29First { + v29First = false + } else { + out.RawByte(',') + } + out.String(string(v29Name)) + out.RawByte(':') + out.Int64(int64(v29Value)) + } + out.RawByte('}') + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v StatsIndex) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo11(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v StatsIndex) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo11(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *StatsIndex) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo11(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *StatsIndex) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo11(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo12(in *jlexer.Lexer, out *Stats) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "databaseSize": + out.DatabaseSize = int64(in.Int64()) + case "lastUpdate": + if data := in.Raw(); in.Ok() { + in.AddError((out.LastUpdate).UnmarshalJSON(data)) + } + case "indexes": + if in.IsNull() { + in.Skip() + } else { + in.Delim('{') + out.Indexes = make(map[string]StatsIndex) + for !in.IsDelim('}') { + key := string(in.String()) + in.WantColon() + var v30 StatsIndex + (v30).UnmarshalEasyJSON(in) + (out.Indexes)[key] = v30 + in.WantComma() + } + in.Delim('}') + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo12(out *jwriter.Writer, in Stats) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"databaseSize\":" + out.RawString(prefix[1:]) + out.Int64(int64(in.DatabaseSize)) + } + { + const prefix string = ",\"lastUpdate\":" + out.RawString(prefix) + out.Raw((in.LastUpdate).MarshalJSON()) + } + { + const prefix string = ",\"indexes\":" + out.RawString(prefix) + if in.Indexes == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { + out.RawString(`null`) + } else { + out.RawByte('{') + v31First := true + for v31Name, v31Value := range in.Indexes { + if v31First { + v31First = false + } else { + out.RawByte(',') + } + out.String(string(v31Name)) + out.RawByte(':') + (v31Value).MarshalEasyJSON(out) + } + out.RawByte('}') + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v Stats) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo12(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v Stats) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo12(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *Stats) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo12(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *Stats) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo12(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(in *jlexer.Lexer, out *SimilarDocumentResult) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "hits": + if in.IsNull() { + in.Skip() + out.Hits = nil + } else { + in.Delim('[') + if out.Hits == nil { + if !in.IsDelim(']') { + out.Hits = make([]interface{}, 0, 4) + } else { + out.Hits = []interface{}{} + } + } else { + out.Hits = (out.Hits)[:0] + } + for !in.IsDelim(']') { + var v32 interface{} + if m, ok := v32.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := v32.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + v32 = in.Interface() + } + out.Hits = append(out.Hits, v32) + in.WantComma() + } + in.Delim(']') + } + case "id": + out.ID = string(in.String()) + case "processingTimeMs": + out.ProcessingTimeMS = int64(in.Int64()) + case "limit": + out.Limit = int64(in.Int64()) + case "offset": + out.Offset = int64(in.Int64()) + case "estimatedTotalHits": + out.EstimatedTotalHits = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(out *jwriter.Writer, in SimilarDocumentResult) { + out.RawByte('{') + first := true + _ = first + if len(in.Hits) != 0 { + const prefix string = ",\"hits\":" + first = false + out.RawString(prefix[1:]) + { + out.RawByte('[') + for v33, v34 := range in.Hits { + if v33 > 0 { + out.RawByte(',') + } + if m, ok := v34.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := v34.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(v34)) + } + } + out.RawByte(']') + } + } + if in.ID != "" { + const prefix string = ",\"id\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.ID)) + } + if in.ProcessingTimeMS != 0 { + const prefix string = ",\"processingTimeMs\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.ProcessingTimeMS)) + } + if in.Limit != 0 { + const prefix string = ",\"limit\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Limit)) + } + if in.Offset != 0 { + const prefix string = ",\"offset\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Offset)) + } + if in.EstimatedTotalHits != 0 { + const prefix string = ",\"estimatedTotalHits\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.EstimatedTotalHits)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v SimilarDocumentResult) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v SimilarDocumentResult) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo13(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *SimilarDocumentResult) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *SimilarDocumentResult) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo13(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(in *jlexer.Lexer, out *SimilarDocumentQuery) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "id": + if m, ok := out.Id.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := out.Id.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + out.Id = in.Interface() + } + case "embedder": + out.Embedder = string(in.String()) + case "attributesToRetrieve": + if in.IsNull() { + in.Skip() + out.AttributesToRetrieve = nil + } else { + in.Delim('[') + if out.AttributesToRetrieve == nil { + if !in.IsDelim(']') { + out.AttributesToRetrieve = make([]string, 0, 4) + } else { + out.AttributesToRetrieve = []string{} + } + } else { + out.AttributesToRetrieve = (out.AttributesToRetrieve)[:0] + } + for !in.IsDelim(']') { + var v35 string + v35 = string(in.String()) + out.AttributesToRetrieve = append(out.AttributesToRetrieve, v35) + in.WantComma() + } + in.Delim(']') + } + case "offset": + out.Offset = int64(in.Int64()) + case "limit": + out.Limit = int64(in.Int64()) + case "filter": + out.Filter = string(in.String()) + case "showRankingScore": + out.ShowRankingScore = bool(in.Bool()) + case "showRankingScoreDetails": + out.ShowRankingScoreDetails = bool(in.Bool()) + case "rankingScoreThreshold": + out.RankingScoreThreshold = float64(in.Float64()) + case "retrieveVectors": + out.RetrieveVectors = bool(in.Bool()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(out *jwriter.Writer, in SimilarDocumentQuery) { + out.RawByte('{') + first := true + _ = first + if in.Id != nil { + const prefix string = ",\"id\":" + first = false + out.RawString(prefix[1:]) + if m, ok := in.Id.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := in.Id.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(in.Id)) + } + } + if in.Embedder != "" { + const prefix string = ",\"embedder\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.Embedder)) + } + if len(in.AttributesToRetrieve) != 0 { + const prefix string = ",\"attributesToRetrieve\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v36, v37 := range in.AttributesToRetrieve { + if v36 > 0 { + out.RawByte(',') + } + out.String(string(v37)) + } + out.RawByte(']') + } + } + if in.Offset != 0 { + const prefix string = ",\"offset\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Offset)) + } + if in.Limit != 0 { + const prefix string = ",\"limit\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Limit)) + } + if in.Filter != "" { + const prefix string = ",\"filter\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.Filter)) + } + if in.ShowRankingScore { + const prefix string = ",\"showRankingScore\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Bool(bool(in.ShowRankingScore)) + } + if in.ShowRankingScoreDetails { + const prefix string = ",\"showRankingScoreDetails\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Bool(bool(in.ShowRankingScoreDetails)) + } + if in.RankingScoreThreshold != 0 { + const prefix string = ",\"rankingScoreThreshold\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Float64(float64(in.RankingScoreThreshold)) + } + if in.RetrieveVectors { + const prefix string = ",\"retrieveVectors\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Bool(bool(in.RetrieveVectors)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v SimilarDocumentQuery) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v SimilarDocumentQuery) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo14(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *SimilarDocumentQuery) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *SimilarDocumentQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo14(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(in *jlexer.Lexer, out *Settings) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "rankingRules": + if in.IsNull() { + in.Skip() + out.RankingRules = nil + } else { + in.Delim('[') + if out.RankingRules == nil { + if !in.IsDelim(']') { + out.RankingRules = make([]string, 0, 4) + } else { + out.RankingRules = []string{} + } + } else { + out.RankingRules = (out.RankingRules)[:0] + } + for !in.IsDelim(']') { + var v38 string + v38 = string(in.String()) + out.RankingRules = append(out.RankingRules, v38) + in.WantComma() + } + in.Delim(']') + } + case "distinctAttribute": + if in.IsNull() { + in.Skip() + out.DistinctAttribute = nil + } else { + if out.DistinctAttribute == nil { + out.DistinctAttribute = new(string) + } + *out.DistinctAttribute = string(in.String()) + } + case "searchableAttributes": + if in.IsNull() { + in.Skip() + out.SearchableAttributes = nil + } else { + in.Delim('[') + if out.SearchableAttributes == nil { + if !in.IsDelim(']') { + out.SearchableAttributes = make([]string, 0, 4) + } else { + out.SearchableAttributes = []string{} + } + } else { + out.SearchableAttributes = (out.SearchableAttributes)[:0] + } + for !in.IsDelim(']') { + var v39 string + v39 = string(in.String()) + out.SearchableAttributes = append(out.SearchableAttributes, v39) + in.WantComma() + } + in.Delim(']') + } + case "searchCutoffMs": + out.SearchCutoffMs = int64(in.Int64()) + case "displayedAttributes": + if in.IsNull() { + in.Skip() + out.DisplayedAttributes = nil + } else { + in.Delim('[') + if out.DisplayedAttributes == nil { + if !in.IsDelim(']') { + out.DisplayedAttributes = make([]string, 0, 4) + } else { + out.DisplayedAttributes = []string{} + } + } else { + out.DisplayedAttributes = (out.DisplayedAttributes)[:0] + } + for !in.IsDelim(']') { + var v40 string + v40 = string(in.String()) + out.DisplayedAttributes = append(out.DisplayedAttributes, v40) + in.WantComma() + } + in.Delim(']') + } + case "stopWords": + if in.IsNull() { + in.Skip() + out.StopWords = nil + } else { + in.Delim('[') + if out.StopWords == nil { + if !in.IsDelim(']') { + out.StopWords = make([]string, 0, 4) + } else { + out.StopWords = []string{} + } + } else { + out.StopWords = (out.StopWords)[:0] + } + for !in.IsDelim(']') { + var v41 string + v41 = string(in.String()) + out.StopWords = append(out.StopWords, v41) + in.WantComma() + } + in.Delim(']') + } + case "synonyms": + if in.IsNull() { + in.Skip() + } else { + in.Delim('{') + if !in.IsDelim('}') { + out.Synonyms = make(map[string][]string) + } else { + out.Synonyms = nil + } + for !in.IsDelim('}') { + key := string(in.String()) + in.WantColon() + var v42 []string + if in.IsNull() { + in.Skip() + v42 = nil + } else { + in.Delim('[') + if v42 == nil { + if !in.IsDelim(']') { + v42 = make([]string, 0, 4) + } else { + v42 = []string{} + } + } else { + v42 = (v42)[:0] + } + for !in.IsDelim(']') { + var v43 string + v43 = string(in.String()) + v42 = append(v42, v43) + in.WantComma() + } + in.Delim(']') + } + (out.Synonyms)[key] = v42 + in.WantComma() + } + in.Delim('}') + } + case "filterableAttributes": + if in.IsNull() { + in.Skip() + out.FilterableAttributes = nil + } else { + in.Delim('[') + if out.FilterableAttributes == nil { + if !in.IsDelim(']') { + out.FilterableAttributes = make([]string, 0, 4) + } else { + out.FilterableAttributes = []string{} + } + } else { + out.FilterableAttributes = (out.FilterableAttributes)[:0] + } + for !in.IsDelim(']') { + var v44 string + v44 = string(in.String()) + out.FilterableAttributes = append(out.FilterableAttributes, v44) + in.WantComma() + } + in.Delim(']') + } + case "sortableAttributes": + if in.IsNull() { + in.Skip() + out.SortableAttributes = nil + } else { + in.Delim('[') + if out.SortableAttributes == nil { + if !in.IsDelim(']') { + out.SortableAttributes = make([]string, 0, 4) + } else { + out.SortableAttributes = []string{} + } + } else { + out.SortableAttributes = (out.SortableAttributes)[:0] + } + for !in.IsDelim(']') { + var v45 string + v45 = string(in.String()) + out.SortableAttributes = append(out.SortableAttributes, v45) + in.WantComma() + } + in.Delim(']') + } + case "typoTolerance": + if in.IsNull() { + in.Skip() + out.TypoTolerance = nil + } else { + if out.TypoTolerance == nil { + out.TypoTolerance = new(TypoTolerance) + } + (*out.TypoTolerance).UnmarshalEasyJSON(in) + } + case "pagination": + if in.IsNull() { + in.Skip() + out.Pagination = nil + } else { + if out.Pagination == nil { + out.Pagination = new(Pagination) + } + (*out.Pagination).UnmarshalEasyJSON(in) + } + case "faceting": + if in.IsNull() { + in.Skip() + out.Faceting = nil + } else { + if out.Faceting == nil { + out.Faceting = new(Faceting) + } + (*out.Faceting).UnmarshalEasyJSON(in) + } + case "embedders": + if in.IsNull() { + in.Skip() + } else { + in.Delim('{') + if !in.IsDelim('}') { + out.Embedders = make(map[string]Embedder) + } else { + out.Embedders = nil + } + for !in.IsDelim('}') { + key := string(in.String()) + in.WantColon() + var v46 Embedder + (v46).UnmarshalEasyJSON(in) + (out.Embedders)[key] = v46 + in.WantComma() + } + in.Delim('}') + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(out *jwriter.Writer, in Settings) { + out.RawByte('{') + first := true + _ = first + if len(in.RankingRules) != 0 { + const prefix string = ",\"rankingRules\":" + first = false + out.RawString(prefix[1:]) + { + out.RawByte('[') + for v47, v48 := range in.RankingRules { + if v47 > 0 { + out.RawByte(',') + } + out.String(string(v48)) + } + out.RawByte(']') + } + } + if in.DistinctAttribute != nil { + const prefix string = ",\"distinctAttribute\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(*in.DistinctAttribute)) + } + if len(in.SearchableAttributes) != 0 { + const prefix string = ",\"searchableAttributes\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v49, v50 := range in.SearchableAttributes { + if v49 > 0 { + out.RawByte(',') + } + out.String(string(v50)) + } + out.RawByte(']') + } + } + if in.SearchCutoffMs != 0 { + const prefix string = ",\"searchCutoffMs\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.SearchCutoffMs)) + } + if len(in.DisplayedAttributes) != 0 { + const prefix string = ",\"displayedAttributes\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v51, v52 := range in.DisplayedAttributes { + if v51 > 0 { + out.RawByte(',') + } + out.String(string(v52)) + } + out.RawByte(']') + } + } + if len(in.StopWords) != 0 { + const prefix string = ",\"stopWords\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v53, v54 := range in.StopWords { + if v53 > 0 { + out.RawByte(',') + } + out.String(string(v54)) + } + out.RawByte(']') + } + } + if len(in.Synonyms) != 0 { + const prefix string = ",\"synonyms\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('{') + v55First := true + for v55Name, v55Value := range in.Synonyms { + if v55First { + v55First = false + } else { + out.RawByte(',') + } + out.String(string(v55Name)) + out.RawByte(':') + if v55Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v56, v57 := range v55Value { + if v56 > 0 { + out.RawByte(',') + } + out.String(string(v57)) + } + out.RawByte(']') + } + } + out.RawByte('}') + } + } + if len(in.FilterableAttributes) != 0 { + const prefix string = ",\"filterableAttributes\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v58, v59 := range in.FilterableAttributes { + if v58 > 0 { + out.RawByte(',') + } + out.String(string(v59)) + } + out.RawByte(']') + } + } + if len(in.SortableAttributes) != 0 { + const prefix string = ",\"sortableAttributes\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v60, v61 := range in.SortableAttributes { + if v60 > 0 { + out.RawByte(',') + } + out.String(string(v61)) + } + out.RawByte(']') + } + } + if in.TypoTolerance != nil { + const prefix string = ",\"typoTolerance\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + (*in.TypoTolerance).MarshalEasyJSON(out) + } + if in.Pagination != nil { + const prefix string = ",\"pagination\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + (*in.Pagination).MarshalEasyJSON(out) + } + if in.Faceting != nil { + const prefix string = ",\"faceting\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + (*in.Faceting).MarshalEasyJSON(out) + } + if len(in.Embedders) != 0 { + const prefix string = ",\"embedders\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('{') + v62First := true + for v62Name, v62Value := range in.Embedders { + if v62First { + v62First = false + } else { + out.RawByte(',') + } + out.String(string(v62Name)) + out.RawByte(':') + (v62Value).MarshalEasyJSON(out) + } + out.RawByte('}') + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v Settings) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v Settings) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo15(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *Settings) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *Settings) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo15(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(in *jlexer.Lexer, out *SearchResponse) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "hits": + if in.IsNull() { + in.Skip() + out.Hits = nil + } else { + in.Delim('[') + if out.Hits == nil { + if !in.IsDelim(']') { + out.Hits = make([]interface{}, 0, 4) + } else { + out.Hits = []interface{}{} + } + } else { + out.Hits = (out.Hits)[:0] + } + for !in.IsDelim(']') { + var v63 interface{} + if m, ok := v63.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := v63.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + v63 = in.Interface() + } + out.Hits = append(out.Hits, v63) + in.WantComma() + } + in.Delim(']') + } + case "estimatedTotalHits": + out.EstimatedTotalHits = int64(in.Int64()) + case "offset": + out.Offset = int64(in.Int64()) + case "limit": + out.Limit = int64(in.Int64()) + case "processingTimeMs": + out.ProcessingTimeMs = int64(in.Int64()) + case "query": + out.Query = string(in.String()) + case "facetDistribution": + if m, ok := out.FacetDistribution.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := out.FacetDistribution.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + out.FacetDistribution = in.Interface() + } + case "totalHits": + out.TotalHits = int64(in.Int64()) + case "hitsPerPage": + out.HitsPerPage = int64(in.Int64()) + case "page": + out.Page = int64(in.Int64()) + case "totalPages": + out.TotalPages = int64(in.Int64()) + case "facetStats": + if m, ok := out.FacetStats.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := out.FacetStats.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + out.FacetStats = in.Interface() + } + case "indexUid": + out.IndexUID = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(out *jwriter.Writer, in SearchResponse) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"hits\":" + out.RawString(prefix[1:]) + if in.Hits == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v64, v65 := range in.Hits { + if v64 > 0 { + out.RawByte(',') + } + if m, ok := v65.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := v65.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(v65)) + } + } + out.RawByte(']') + } + } + if in.EstimatedTotalHits != 0 { + const prefix string = ",\"estimatedTotalHits\":" + out.RawString(prefix) + out.Int64(int64(in.EstimatedTotalHits)) + } + if in.Offset != 0 { + const prefix string = ",\"offset\":" + out.RawString(prefix) + out.Int64(int64(in.Offset)) + } + if in.Limit != 0 { + const prefix string = ",\"limit\":" + out.RawString(prefix) + out.Int64(int64(in.Limit)) + } + { + const prefix string = ",\"processingTimeMs\":" + out.RawString(prefix) + out.Int64(int64(in.ProcessingTimeMs)) + } + { + const prefix string = ",\"query\":" + out.RawString(prefix) + out.String(string(in.Query)) + } + if in.FacetDistribution != nil { + const prefix string = ",\"facetDistribution\":" + out.RawString(prefix) + if m, ok := in.FacetDistribution.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := in.FacetDistribution.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(in.FacetDistribution)) + } + } + if in.TotalHits != 0 { + const prefix string = ",\"totalHits\":" + out.RawString(prefix) + out.Int64(int64(in.TotalHits)) + } + if in.HitsPerPage != 0 { + const prefix string = ",\"hitsPerPage\":" + out.RawString(prefix) + out.Int64(int64(in.HitsPerPage)) + } + if in.Page != 0 { + const prefix string = ",\"page\":" + out.RawString(prefix) + out.Int64(int64(in.Page)) + } + if in.TotalPages != 0 { + const prefix string = ",\"totalPages\":" + out.RawString(prefix) + out.Int64(int64(in.TotalPages)) + } + if in.FacetStats != nil { + const prefix string = ",\"facetStats\":" + out.RawString(prefix) + if m, ok := in.FacetStats.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := in.FacetStats.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(in.FacetStats)) + } + } + if in.IndexUID != "" { + const prefix string = ",\"indexUid\":" + out.RawString(prefix) + out.String(string(in.IndexUID)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v SearchResponse) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v SearchResponse) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo16(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *SearchResponse) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *SearchResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo16(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(in *jlexer.Lexer, out *SearchRequestHybrid) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "semanticRatio": + out.SemanticRatio = float64(in.Float64()) + case "embedder": + out.Embedder = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(out *jwriter.Writer, in SearchRequestHybrid) { + out.RawByte('{') + first := true + _ = first + if in.SemanticRatio != 0 { + const prefix string = ",\"semanticRatio\":" + first = false + out.RawString(prefix[1:]) + out.Float64(float64(in.SemanticRatio)) + } + if in.Embedder != "" { + const prefix string = ",\"embedder\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.Embedder)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v SearchRequestHybrid) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v SearchRequestHybrid) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo17(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *SearchRequestHybrid) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *SearchRequestHybrid) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo17(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(in *jlexer.Lexer, out *SearchRequest) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "offset": + out.Offset = int64(in.Int64()) + case "limit": + out.Limit = int64(in.Int64()) + case "attributesToRetrieve": + if in.IsNull() { + in.Skip() + out.AttributesToRetrieve = nil + } else { + in.Delim('[') + if out.AttributesToRetrieve == nil { + if !in.IsDelim(']') { + out.AttributesToRetrieve = make([]string, 0, 4) + } else { + out.AttributesToRetrieve = []string{} + } + } else { + out.AttributesToRetrieve = (out.AttributesToRetrieve)[:0] + } + for !in.IsDelim(']') { + var v66 string + v66 = string(in.String()) + out.AttributesToRetrieve = append(out.AttributesToRetrieve, v66) + in.WantComma() + } + in.Delim(']') + } + case "attributesToSearchOn": + if in.IsNull() { + in.Skip() + out.AttributesToSearchOn = nil + } else { + in.Delim('[') + if out.AttributesToSearchOn == nil { + if !in.IsDelim(']') { + out.AttributesToSearchOn = make([]string, 0, 4) + } else { + out.AttributesToSearchOn = []string{} + } + } else { + out.AttributesToSearchOn = (out.AttributesToSearchOn)[:0] + } + for !in.IsDelim(']') { + var v67 string + v67 = string(in.String()) + out.AttributesToSearchOn = append(out.AttributesToSearchOn, v67) + in.WantComma() + } + in.Delim(']') + } + case "attributesToCrop": + if in.IsNull() { + in.Skip() + out.AttributesToCrop = nil + } else { + in.Delim('[') + if out.AttributesToCrop == nil { + if !in.IsDelim(']') { + out.AttributesToCrop = make([]string, 0, 4) + } else { + out.AttributesToCrop = []string{} + } + } else { + out.AttributesToCrop = (out.AttributesToCrop)[:0] + } + for !in.IsDelim(']') { + var v68 string + v68 = string(in.String()) + out.AttributesToCrop = append(out.AttributesToCrop, v68) + in.WantComma() + } + in.Delim(']') + } + case "cropLength": + out.CropLength = int64(in.Int64()) + case "cropMarker": + out.CropMarker = string(in.String()) + case "attributesToHighlight": + if in.IsNull() { + in.Skip() + out.AttributesToHighlight = nil + } else { + in.Delim('[') + if out.AttributesToHighlight == nil { + if !in.IsDelim(']') { + out.AttributesToHighlight = make([]string, 0, 4) + } else { + out.AttributesToHighlight = []string{} + } + } else { + out.AttributesToHighlight = (out.AttributesToHighlight)[:0] + } + for !in.IsDelim(']') { + var v69 string + v69 = string(in.String()) + out.AttributesToHighlight = append(out.AttributesToHighlight, v69) + in.WantComma() + } + in.Delim(']') + } + case "highlightPreTag": + out.HighlightPreTag = string(in.String()) + case "highlightPostTag": + out.HighlightPostTag = string(in.String()) + case "matchingStrategy": + out.MatchingStrategy = string(in.String()) + case "filter": + if m, ok := out.Filter.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := out.Filter.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + out.Filter = in.Interface() + } + case "showMatchesPosition": + out.ShowMatchesPosition = bool(in.Bool()) + case "showRankingScore": + out.ShowRankingScore = bool(in.Bool()) + case "showRankingScoreDetails": + out.ShowRankingScoreDetails = bool(in.Bool()) + case "facets": + if in.IsNull() { + in.Skip() + out.Facets = nil + } else { + in.Delim('[') + if out.Facets == nil { + if !in.IsDelim(']') { + out.Facets = make([]string, 0, 4) + } else { + out.Facets = []string{} + } + } else { + out.Facets = (out.Facets)[:0] + } + for !in.IsDelim(']') { + var v70 string + v70 = string(in.String()) + out.Facets = append(out.Facets, v70) + in.WantComma() + } + in.Delim(']') + } + case "sort": + if in.IsNull() { + in.Skip() + out.Sort = nil + } else { + in.Delim('[') + if out.Sort == nil { + if !in.IsDelim(']') { + out.Sort = make([]string, 0, 4) + } else { + out.Sort = []string{} + } + } else { + out.Sort = (out.Sort)[:0] + } + for !in.IsDelim(']') { + var v71 string + v71 = string(in.String()) + out.Sort = append(out.Sort, v71) + in.WantComma() + } + in.Delim(']') + } + case "vector": + if in.IsNull() { + in.Skip() + out.Vector = nil + } else { + in.Delim('[') + if out.Vector == nil { + if !in.IsDelim(']') { + out.Vector = make([]float32, 0, 16) + } else { + out.Vector = []float32{} + } + } else { + out.Vector = (out.Vector)[:0] + } + for !in.IsDelim(']') { + var v72 float32 + v72 = float32(in.Float32()) + out.Vector = append(out.Vector, v72) + in.WantComma() + } + in.Delim(']') + } + case "hitsPerPage": + out.HitsPerPage = int64(in.Int64()) + case "page": + out.Page = int64(in.Int64()) + case "indexUid": + out.IndexUID = string(in.String()) + case "q": + out.Query = string(in.String()) + case "distinct": + out.Distinct = string(in.String()) + case "hybrid": + if in.IsNull() { + in.Skip() + out.Hybrid = nil + } else { + if out.Hybrid == nil { + out.Hybrid = new(SearchRequestHybrid) + } + (*out.Hybrid).UnmarshalEasyJSON(in) + } + case "retrieveVectors": + out.RetrieveVectors = bool(in.Bool()) + case "rankingScoreThreshold": + out.RankingScoreThreshold = float64(in.Float64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(out *jwriter.Writer, in SearchRequest) { + out.RawByte('{') + first := true + _ = first + if in.Offset != 0 { + const prefix string = ",\"offset\":" + first = false + out.RawString(prefix[1:]) + out.Int64(int64(in.Offset)) + } + if in.Limit != 0 { + const prefix string = ",\"limit\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Limit)) + } + if len(in.AttributesToRetrieve) != 0 { + const prefix string = ",\"attributesToRetrieve\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v73, v74 := range in.AttributesToRetrieve { + if v73 > 0 { + out.RawByte(',') + } + out.String(string(v74)) + } + out.RawByte(']') + } + } + if len(in.AttributesToSearchOn) != 0 { + const prefix string = ",\"attributesToSearchOn\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v75, v76 := range in.AttributesToSearchOn { + if v75 > 0 { + out.RawByte(',') + } + out.String(string(v76)) + } + out.RawByte(']') + } + } + if len(in.AttributesToCrop) != 0 { + const prefix string = ",\"attributesToCrop\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v77, v78 := range in.AttributesToCrop { + if v77 > 0 { + out.RawByte(',') + } + out.String(string(v78)) + } + out.RawByte(']') + } + } + if in.CropLength != 0 { + const prefix string = ",\"cropLength\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.CropLength)) + } + if in.CropMarker != "" { + const prefix string = ",\"cropMarker\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.CropMarker)) + } + if len(in.AttributesToHighlight) != 0 { + const prefix string = ",\"attributesToHighlight\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v79, v80 := range in.AttributesToHighlight { + if v79 > 0 { + out.RawByte(',') + } + out.String(string(v80)) + } + out.RawByte(']') + } + } + if in.HighlightPreTag != "" { + const prefix string = ",\"highlightPreTag\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.HighlightPreTag)) + } + if in.HighlightPostTag != "" { + const prefix string = ",\"highlightPostTag\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.HighlightPostTag)) + } + if in.MatchingStrategy != "" { + const prefix string = ",\"matchingStrategy\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.MatchingStrategy)) + } + if in.Filter != nil { + const prefix string = ",\"filter\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + if m, ok := in.Filter.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := in.Filter.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(in.Filter)) + } + } + if in.ShowMatchesPosition { + const prefix string = ",\"showMatchesPosition\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Bool(bool(in.ShowMatchesPosition)) + } + if in.ShowRankingScore { + const prefix string = ",\"showRankingScore\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Bool(bool(in.ShowRankingScore)) + } + if in.ShowRankingScoreDetails { + const prefix string = ",\"showRankingScoreDetails\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Bool(bool(in.ShowRankingScoreDetails)) + } + if len(in.Facets) != 0 { + const prefix string = ",\"facets\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v81, v82 := range in.Facets { + if v81 > 0 { + out.RawByte(',') + } + out.String(string(v82)) + } + out.RawByte(']') + } + } + if len(in.Sort) != 0 { + const prefix string = ",\"sort\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v83, v84 := range in.Sort { + if v83 > 0 { + out.RawByte(',') + } + out.String(string(v84)) + } + out.RawByte(']') + } + } + if len(in.Vector) != 0 { + const prefix string = ",\"vector\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v85, v86 := range in.Vector { + if v85 > 0 { + out.RawByte(',') + } + out.Float32(float32(v86)) + } + out.RawByte(']') + } + } + if in.HitsPerPage != 0 { + const prefix string = ",\"hitsPerPage\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.HitsPerPage)) + } + if in.Page != 0 { + const prefix string = ",\"page\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Page)) + } + if in.IndexUID != "" { + const prefix string = ",\"indexUid\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.IndexUID)) + } + { + const prefix string = ",\"q\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.Query)) + } + if in.Distinct != "" { + const prefix string = ",\"distinct\":" + out.RawString(prefix) + out.String(string(in.Distinct)) + } + if in.Hybrid != nil { + const prefix string = ",\"hybrid\":" + out.RawString(prefix) + (*in.Hybrid).MarshalEasyJSON(out) + } + if in.RetrieveVectors { + const prefix string = ",\"retrieveVectors\":" + out.RawString(prefix) + out.Bool(bool(in.RetrieveVectors)) + } + if in.RankingScoreThreshold != 0 { + const prefix string = ",\"rankingScoreThreshold\":" + out.RawString(prefix) + out.Float64(float64(in.RankingScoreThreshold)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v SearchRequest) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v SearchRequest) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo18(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *SearchRequest) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *SearchRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo18(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(in *jlexer.Lexer, out *Pagination) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "maxTotalHits": + out.MaxTotalHits = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(out *jwriter.Writer, in Pagination) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"maxTotalHits\":" + out.RawString(prefix[1:]) + out.Int64(int64(in.MaxTotalHits)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v Pagination) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v Pagination) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo19(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *Pagination) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *Pagination) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo19(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(in *jlexer.Lexer, out *MultiSearchResponse) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "results": + if in.IsNull() { + in.Skip() + out.Results = nil + } else { + in.Delim('[') + if out.Results == nil { + if !in.IsDelim(']') { + out.Results = make([]SearchResponse, 0, 0) + } else { + out.Results = []SearchResponse{} + } + } else { + out.Results = (out.Results)[:0] + } + for !in.IsDelim(']') { + var v87 SearchResponse + (v87).UnmarshalEasyJSON(in) + out.Results = append(out.Results, v87) + in.WantComma() + } + in.Delim(']') + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(out *jwriter.Writer, in MultiSearchResponse) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"results\":" + out.RawString(prefix[1:]) + if in.Results == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v88, v89 := range in.Results { + if v88 > 0 { + out.RawByte(',') + } + (v89).MarshalEasyJSON(out) + } + out.RawByte(']') + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v MultiSearchResponse) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v MultiSearchResponse) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo20(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *MultiSearchResponse) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *MultiSearchResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo20(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(in *jlexer.Lexer, out *MultiSearchRequest) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "queries": + if in.IsNull() { + in.Skip() + out.Queries = nil + } else { + in.Delim('[') + if out.Queries == nil { + if !in.IsDelim(']') { + out.Queries = make([]*SearchRequest, 0, 8) + } else { + out.Queries = []*SearchRequest{} + } + } else { + out.Queries = (out.Queries)[:0] + } + for !in.IsDelim(']') { + var v90 *SearchRequest + if in.IsNull() { + in.Skip() + v90 = nil + } else { + if v90 == nil { + v90 = new(SearchRequest) + } + (*v90).UnmarshalEasyJSON(in) + } + out.Queries = append(out.Queries, v90) + in.WantComma() + } + in.Delim(']') + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(out *jwriter.Writer, in MultiSearchRequest) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"queries\":" + out.RawString(prefix[1:]) + if in.Queries == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v91, v92 := range in.Queries { + if v91 > 0 { + out.RawByte(',') + } + if v92 == nil { + out.RawString("null") + } else { + (*v92).MarshalEasyJSON(out) + } + } + out.RawByte(']') + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v MultiSearchRequest) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v MultiSearchRequest) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo21(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *MultiSearchRequest) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *MultiSearchRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo21(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(in *jlexer.Lexer, out *MinWordSizeForTypos) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "oneTypo": + out.OneTypo = int64(in.Int64()) + case "twoTypos": + out.TwoTypos = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(out *jwriter.Writer, in MinWordSizeForTypos) { + out.RawByte('{') + first := true + _ = first + if in.OneTypo != 0 { + const prefix string = ",\"oneTypo\":" + first = false + out.RawString(prefix[1:]) + out.Int64(int64(in.OneTypo)) + } + if in.TwoTypos != 0 { + const prefix string = ",\"twoTypos\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.TwoTypos)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v MinWordSizeForTypos) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v MinWordSizeForTypos) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo22(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *MinWordSizeForTypos) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *MinWordSizeForTypos) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo22(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(in *jlexer.Lexer, out *KeysResults) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "results": + if in.IsNull() { + in.Skip() + out.Results = nil + } else { + in.Delim('[') + if out.Results == nil { + if !in.IsDelim(']') { + out.Results = make([]Key, 0, 0) + } else { + out.Results = []Key{} + } + } else { + out.Results = (out.Results)[:0] + } + for !in.IsDelim(']') { + var v93 Key + (v93).UnmarshalEasyJSON(in) + out.Results = append(out.Results, v93) + in.WantComma() + } + in.Delim(']') + } + case "offset": + out.Offset = int64(in.Int64()) + case "limit": + out.Limit = int64(in.Int64()) + case "total": + out.Total = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(out *jwriter.Writer, in KeysResults) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"results\":" + out.RawString(prefix[1:]) + if in.Results == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v94, v95 := range in.Results { + if v94 > 0 { + out.RawByte(',') + } + (v95).MarshalEasyJSON(out) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"offset\":" + out.RawString(prefix) + out.Int64(int64(in.Offset)) + } + { + const prefix string = ",\"limit\":" + out.RawString(prefix) + out.Int64(int64(in.Limit)) + } + { + const prefix string = ",\"total\":" + out.RawString(prefix) + out.Int64(int64(in.Total)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v KeysResults) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v KeysResults) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo23(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *KeysResults) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *KeysResults) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo23(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(in *jlexer.Lexer, out *KeysQuery) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "Limit": + out.Limit = int64(in.Int64()) + case "Offset": + out.Offset = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(out *jwriter.Writer, in KeysQuery) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"Limit\":" + out.RawString(prefix[1:]) + out.Int64(int64(in.Limit)) + } + { + const prefix string = ",\"Offset\":" + out.RawString(prefix) + out.Int64(int64(in.Offset)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v KeysQuery) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v KeysQuery) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo24(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *KeysQuery) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *KeysQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo24(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(in *jlexer.Lexer, out *KeyUpdate) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "name": + out.Name = string(in.String()) + case "description": + out.Description = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(out *jwriter.Writer, in KeyUpdate) { + out.RawByte('{') + first := true + _ = first + if in.Name != "" { + const prefix string = ",\"name\":" + first = false + out.RawString(prefix[1:]) + out.String(string(in.Name)) + } + if in.Description != "" { + const prefix string = ",\"description\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.Description)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v KeyUpdate) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v KeyUpdate) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo25(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *KeyUpdate) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *KeyUpdate) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo25(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(in *jlexer.Lexer, out *KeyParsed) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "name": + out.Name = string(in.String()) + case "description": + out.Description = string(in.String()) + case "uid": + out.UID = string(in.String()) + case "actions": + if in.IsNull() { + in.Skip() + out.Actions = nil + } else { + in.Delim('[') + if out.Actions == nil { + if !in.IsDelim(']') { + out.Actions = make([]string, 0, 4) + } else { + out.Actions = []string{} + } + } else { + out.Actions = (out.Actions)[:0] + } + for !in.IsDelim(']') { + var v96 string + v96 = string(in.String()) + out.Actions = append(out.Actions, v96) + in.WantComma() + } + in.Delim(']') + } + case "indexes": + if in.IsNull() { + in.Skip() + out.Indexes = nil + } else { + in.Delim('[') + if out.Indexes == nil { + if !in.IsDelim(']') { + out.Indexes = make([]string, 0, 4) + } else { + out.Indexes = []string{} + } + } else { + out.Indexes = (out.Indexes)[:0] + } + for !in.IsDelim(']') { + var v97 string + v97 = string(in.String()) + out.Indexes = append(out.Indexes, v97) + in.WantComma() + } + in.Delim(']') + } + case "expiresAt": + if in.IsNull() { + in.Skip() + out.ExpiresAt = nil + } else { + if out.ExpiresAt == nil { + out.ExpiresAt = new(string) + } + *out.ExpiresAt = string(in.String()) + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(out *jwriter.Writer, in KeyParsed) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"name\":" + out.RawString(prefix[1:]) + out.String(string(in.Name)) + } + { + const prefix string = ",\"description\":" + out.RawString(prefix) + out.String(string(in.Description)) + } + if in.UID != "" { + const prefix string = ",\"uid\":" + out.RawString(prefix) + out.String(string(in.UID)) + } + if len(in.Actions) != 0 { + const prefix string = ",\"actions\":" + out.RawString(prefix) + { + out.RawByte('[') + for v98, v99 := range in.Actions { + if v98 > 0 { + out.RawByte(',') + } + out.String(string(v99)) + } + out.RawByte(']') + } + } + if len(in.Indexes) != 0 { + const prefix string = ",\"indexes\":" + out.RawString(prefix) + { + out.RawByte('[') + for v100, v101 := range in.Indexes { + if v100 > 0 { + out.RawByte(',') + } + out.String(string(v101)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"expiresAt\":" + out.RawString(prefix) + if in.ExpiresAt == nil { + out.RawString("null") + } else { + out.String(string(*in.ExpiresAt)) + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v KeyParsed) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v KeyParsed) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo26(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *KeyParsed) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *KeyParsed) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo26(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(in *jlexer.Lexer, out *Key) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "name": + out.Name = string(in.String()) + case "description": + out.Description = string(in.String()) + case "key": + out.Key = string(in.String()) + case "uid": + out.UID = string(in.String()) + case "actions": + if in.IsNull() { + in.Skip() + out.Actions = nil + } else { + in.Delim('[') + if out.Actions == nil { + if !in.IsDelim(']') { + out.Actions = make([]string, 0, 4) + } else { + out.Actions = []string{} + } + } else { + out.Actions = (out.Actions)[:0] + } + for !in.IsDelim(']') { + var v102 string + v102 = string(in.String()) + out.Actions = append(out.Actions, v102) + in.WantComma() + } + in.Delim(']') + } + case "indexes": + if in.IsNull() { + in.Skip() + out.Indexes = nil + } else { + in.Delim('[') + if out.Indexes == nil { + if !in.IsDelim(']') { + out.Indexes = make([]string, 0, 4) + } else { + out.Indexes = []string{} + } + } else { + out.Indexes = (out.Indexes)[:0] + } + for !in.IsDelim(']') { + var v103 string + v103 = string(in.String()) + out.Indexes = append(out.Indexes, v103) + in.WantComma() + } + in.Delim(']') + } + case "createdAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.CreatedAt).UnmarshalJSON(data)) + } + case "updatedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.UpdatedAt).UnmarshalJSON(data)) + } + case "expiresAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.ExpiresAt).UnmarshalJSON(data)) + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(out *jwriter.Writer, in Key) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"name\":" + out.RawString(prefix[1:]) + out.String(string(in.Name)) + } + { + const prefix string = ",\"description\":" + out.RawString(prefix) + out.String(string(in.Description)) + } + if in.Key != "" { + const prefix string = ",\"key\":" + out.RawString(prefix) + out.String(string(in.Key)) + } + if in.UID != "" { + const prefix string = ",\"uid\":" + out.RawString(prefix) + out.String(string(in.UID)) + } + if len(in.Actions) != 0 { + const prefix string = ",\"actions\":" + out.RawString(prefix) + { + out.RawByte('[') + for v104, v105 := range in.Actions { + if v104 > 0 { + out.RawByte(',') + } + out.String(string(v105)) + } + out.RawByte(']') + } + } + if len(in.Indexes) != 0 { + const prefix string = ",\"indexes\":" + out.RawString(prefix) + { + out.RawByte('[') + for v106, v107 := range in.Indexes { + if v106 > 0 { + out.RawByte(',') + } + out.String(string(v107)) + } + out.RawByte(']') + } + } + if true { + const prefix string = ",\"createdAt\":" + out.RawString(prefix) + out.Raw((in.CreatedAt).MarshalJSON()) + } + if true { + const prefix string = ",\"updatedAt\":" + out.RawString(prefix) + out.Raw((in.UpdatedAt).MarshalJSON()) + } + { + const prefix string = ",\"expiresAt\":" + out.RawString(prefix) + out.Raw((in.ExpiresAt).MarshalJSON()) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v Key) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v Key) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo27(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *Key) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *Key) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo27(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(in *jlexer.Lexer, out *IndexesResults) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "results": + if in.IsNull() { + in.Skip() + out.Results = nil + } else { + in.Delim('[') + if out.Results == nil { + if !in.IsDelim(']') { + out.Results = make([]*IndexResult, 0, 8) + } else { + out.Results = []*IndexResult{} + } + } else { + out.Results = (out.Results)[:0] + } + for !in.IsDelim(']') { + var v108 *IndexResult + if in.IsNull() { + in.Skip() + v108 = nil + } else { + if v108 == nil { + v108 = new(IndexResult) + } + (*v108).UnmarshalEasyJSON(in) + } + out.Results = append(out.Results, v108) + in.WantComma() + } + in.Delim(']') + } + case "offset": + out.Offset = int64(in.Int64()) + case "limit": + out.Limit = int64(in.Int64()) + case "total": + out.Total = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(out *jwriter.Writer, in IndexesResults) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"results\":" + out.RawString(prefix[1:]) + if in.Results == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v109, v110 := range in.Results { + if v109 > 0 { + out.RawByte(',') + } + if v110 == nil { + out.RawString("null") + } else { + (*v110).MarshalEasyJSON(out) + } + } + out.RawByte(']') + } + } + { + const prefix string = ",\"offset\":" + out.RawString(prefix) + out.Int64(int64(in.Offset)) + } + { + const prefix string = ",\"limit\":" + out.RawString(prefix) + out.Int64(int64(in.Limit)) + } + { + const prefix string = ",\"total\":" + out.RawString(prefix) + out.Int64(int64(in.Total)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v IndexesResults) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v IndexesResults) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo28(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *IndexesResults) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *IndexesResults) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo28(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(in *jlexer.Lexer, out *IndexesQuery) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "Limit": + out.Limit = int64(in.Int64()) + case "Offset": + out.Offset = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(out *jwriter.Writer, in IndexesQuery) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"Limit\":" + out.RawString(prefix[1:]) + out.Int64(int64(in.Limit)) + } + { + const prefix string = ",\"Offset\":" + out.RawString(prefix) + out.Int64(int64(in.Offset)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v IndexesQuery) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v IndexesQuery) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo29(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *IndexesQuery) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *IndexesQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo29(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(in *jlexer.Lexer, out *IndexResult) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "uid": + out.UID = string(in.String()) + case "createdAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.CreatedAt).UnmarshalJSON(data)) + } + case "updatedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.UpdatedAt).UnmarshalJSON(data)) + } + case "primaryKey": + out.PrimaryKey = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(out *jwriter.Writer, in IndexResult) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"uid\":" + out.RawString(prefix[1:]) + out.String(string(in.UID)) + } + { + const prefix string = ",\"createdAt\":" + out.RawString(prefix) + out.Raw((in.CreatedAt).MarshalJSON()) + } + { + const prefix string = ",\"updatedAt\":" + out.RawString(prefix) + out.Raw((in.UpdatedAt).MarshalJSON()) + } + if in.PrimaryKey != "" { + const prefix string = ",\"primaryKey\":" + out.RawString(prefix) + out.String(string(in.PrimaryKey)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v IndexResult) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v IndexResult) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo30(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *IndexResult) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *IndexResult) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo30(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(in *jlexer.Lexer, out *IndexConfig) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "Uid": + out.Uid = string(in.String()) + case "PrimaryKey": + out.PrimaryKey = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(out *jwriter.Writer, in IndexConfig) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"Uid\":" + out.RawString(prefix[1:]) + out.String(string(in.Uid)) + } + { + const prefix string = ",\"PrimaryKey\":" + out.RawString(prefix) + out.String(string(in.PrimaryKey)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v IndexConfig) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v IndexConfig) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo31(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *IndexConfig) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *IndexConfig) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo31(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(in *jlexer.Lexer, out *Health) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "status": + out.Status = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(out *jwriter.Writer, in Health) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"status\":" + out.RawString(prefix[1:]) + out.String(string(in.Status)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v Health) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v Health) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo32(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *Health) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *Health) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo32(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(in *jlexer.Lexer, out *Faceting) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "maxValuesPerFacet": + out.MaxValuesPerFacet = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(out *jwriter.Writer, in Faceting) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"maxValuesPerFacet\":" + out.RawString(prefix[1:]) + out.Int64(int64(in.MaxValuesPerFacet)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v Faceting) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v Faceting) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo33(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *Faceting) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *Faceting) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo33(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(in *jlexer.Lexer, out *FacetSearchResponse) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "facetHits": + if in.IsNull() { + in.Skip() + out.FacetHits = nil + } else { + in.Delim('[') + if out.FacetHits == nil { + if !in.IsDelim(']') { + out.FacetHits = make([]interface{}, 0, 4) + } else { + out.FacetHits = []interface{}{} + } + } else { + out.FacetHits = (out.FacetHits)[:0] + } + for !in.IsDelim(']') { + var v111 interface{} + if m, ok := v111.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := v111.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + v111 = in.Interface() + } + out.FacetHits = append(out.FacetHits, v111) + in.WantComma() + } + in.Delim(']') + } + case "facetQuery": + out.FacetQuery = string(in.String()) + case "processingTimeMs": + out.ProcessingTimeMs = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(out *jwriter.Writer, in FacetSearchResponse) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"facetHits\":" + out.RawString(prefix[1:]) + if in.FacetHits == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v112, v113 := range in.FacetHits { + if v112 > 0 { + out.RawByte(',') + } + if m, ok := v113.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := v113.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(v113)) + } + } + out.RawByte(']') + } + } + { + const prefix string = ",\"facetQuery\":" + out.RawString(prefix) + out.String(string(in.FacetQuery)) + } + { + const prefix string = ",\"processingTimeMs\":" + out.RawString(prefix) + out.Int64(int64(in.ProcessingTimeMs)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v FacetSearchResponse) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v FacetSearchResponse) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo34(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *FacetSearchResponse) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *FacetSearchResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo34(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(in *jlexer.Lexer, out *FacetSearchRequest) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "facetName": + out.FacetName = string(in.String()) + case "facetQuery": + out.FacetQuery = string(in.String()) + case "q": + out.Q = string(in.String()) + case "filter": + out.Filter = string(in.String()) + case "matchingStrategy": + out.MatchingStrategy = string(in.String()) + case "attributesToSearchOn": + if in.IsNull() { + in.Skip() + out.AttributesToSearchOn = nil + } else { + in.Delim('[') + if out.AttributesToSearchOn == nil { + if !in.IsDelim(']') { + out.AttributesToSearchOn = make([]string, 0, 4) + } else { + out.AttributesToSearchOn = []string{} + } + } else { + out.AttributesToSearchOn = (out.AttributesToSearchOn)[:0] + } + for !in.IsDelim(']') { + var v114 string + v114 = string(in.String()) + out.AttributesToSearchOn = append(out.AttributesToSearchOn, v114) + in.WantComma() + } + in.Delim(']') + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(out *jwriter.Writer, in FacetSearchRequest) { + out.RawByte('{') + first := true + _ = first + if in.FacetName != "" { + const prefix string = ",\"facetName\":" + first = false + out.RawString(prefix[1:]) + out.String(string(in.FacetName)) + } + if in.FacetQuery != "" { + const prefix string = ",\"facetQuery\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.FacetQuery)) + } + if in.Q != "" { + const prefix string = ",\"q\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.Q)) + } + if in.Filter != "" { + const prefix string = ",\"filter\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.Filter)) + } + if in.MatchingStrategy != "" { + const prefix string = ",\"matchingStrategy\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.MatchingStrategy)) + } + if len(in.AttributesToSearchOn) != 0 { + const prefix string = ",\"attributesToSearchOn\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v115, v116 := range in.AttributesToSearchOn { + if v115 > 0 { + out.RawByte(',') + } + out.String(string(v116)) + } + out.RawByte(']') + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v FacetSearchRequest) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v FacetSearchRequest) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo35(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *FacetSearchRequest) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *FacetSearchRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo35(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(in *jlexer.Lexer, out *Embedder) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "source": + out.Source = string(in.String()) + case "apiKey": + out.ApiKey = string(in.String()) + case "model": + out.Model = string(in.String()) + case "dimensions": + out.Dimensions = int(in.Int()) + case "documentTemplate": + out.DocumentTemplate = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(out *jwriter.Writer, in Embedder) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"source\":" + out.RawString(prefix[1:]) + out.String(string(in.Source)) + } + if in.ApiKey != "" { + const prefix string = ",\"apiKey\":" + out.RawString(prefix) + out.String(string(in.ApiKey)) + } + if in.Model != "" { + const prefix string = ",\"model\":" + out.RawString(prefix) + out.String(string(in.Model)) + } + if in.Dimensions != 0 { + const prefix string = ",\"dimensions\":" + out.RawString(prefix) + out.Int(int(in.Dimensions)) + } + if in.DocumentTemplate != "" { + const prefix string = ",\"documentTemplate\":" + out.RawString(prefix) + out.String(string(in.DocumentTemplate)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v Embedder) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v Embedder) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo36(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *Embedder) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *Embedder) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo36(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(in *jlexer.Lexer, out *DocumentsResult) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "results": + if in.IsNull() { + in.Skip() + out.Results = nil + } else { + in.Delim('[') + if out.Results == nil { + if !in.IsDelim(']') { + out.Results = make([]map[string]interface{}, 0, 8) + } else { + out.Results = []map[string]interface{}{} + } + } else { + out.Results = (out.Results)[:0] + } + for !in.IsDelim(']') { + var v117 map[string]interface{} + if in.IsNull() { + in.Skip() + } else { + in.Delim('{') + v117 = make(map[string]interface{}) + for !in.IsDelim('}') { + key := string(in.String()) + in.WantColon() + var v118 interface{} + if m, ok := v118.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := v118.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + v118 = in.Interface() + } + (v117)[key] = v118 + in.WantComma() + } + in.Delim('}') + } + out.Results = append(out.Results, v117) + in.WantComma() + } + in.Delim(']') + } + case "limit": + out.Limit = int64(in.Int64()) + case "offset": + out.Offset = int64(in.Int64()) + case "total": + out.Total = int64(in.Int64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(out *jwriter.Writer, in DocumentsResult) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"results\":" + out.RawString(prefix[1:]) + if in.Results == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v119, v120 := range in.Results { + if v119 > 0 { + out.RawByte(',') + } + if v120 == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { + out.RawString(`null`) + } else { + out.RawByte('{') + v121First := true + for v121Name, v121Value := range v120 { + if v121First { + v121First = false + } else { + out.RawByte(',') + } + out.String(string(v121Name)) + out.RawByte(':') + if m, ok := v121Value.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := v121Value.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(v121Value)) + } + } + out.RawByte('}') + } + } + out.RawByte(']') + } + } + { + const prefix string = ",\"limit\":" + out.RawString(prefix) + out.Int64(int64(in.Limit)) + } + { + const prefix string = ",\"offset\":" + out.RawString(prefix) + out.Int64(int64(in.Offset)) + } + { + const prefix string = ",\"total\":" + out.RawString(prefix) + out.Int64(int64(in.Total)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v DocumentsResult) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v DocumentsResult) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo37(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *DocumentsResult) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *DocumentsResult) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo37(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(in *jlexer.Lexer, out *DocumentsQuery) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "offset": + out.Offset = int64(in.Int64()) + case "limit": + out.Limit = int64(in.Int64()) + case "fields": + if in.IsNull() { + in.Skip() + out.Fields = nil + } else { + in.Delim('[') + if out.Fields == nil { + if !in.IsDelim(']') { + out.Fields = make([]string, 0, 4) + } else { + out.Fields = []string{} + } + } else { + out.Fields = (out.Fields)[:0] + } + for !in.IsDelim(']') { + var v122 string + v122 = string(in.String()) + out.Fields = append(out.Fields, v122) + in.WantComma() + } + in.Delim(']') + } + case "filter": + if m, ok := out.Filter.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := out.Filter.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + out.Filter = in.Interface() + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(out *jwriter.Writer, in DocumentsQuery) { + out.RawByte('{') + first := true + _ = first + if in.Offset != 0 { + const prefix string = ",\"offset\":" + first = false + out.RawString(prefix[1:]) + out.Int64(int64(in.Offset)) + } + if in.Limit != 0 { + const prefix string = ",\"limit\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.Limit)) + } + if len(in.Fields) != 0 { + const prefix string = ",\"fields\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v123, v124 := range in.Fields { + if v123 > 0 { + out.RawByte(',') + } + out.String(string(v124)) + } + out.RawByte(']') + } + } + if in.Filter != nil { + const prefix string = ",\"filter\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + if m, ok := in.Filter.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := in.Filter.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(in.Filter)) + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v DocumentsQuery) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v DocumentsQuery) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo38(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *DocumentsQuery) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *DocumentsQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo38(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(in *jlexer.Lexer, out *DocumentQuery) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "fields": + if in.IsNull() { + in.Skip() + out.Fields = nil + } else { + in.Delim('[') + if out.Fields == nil { + if !in.IsDelim(']') { + out.Fields = make([]string, 0, 4) + } else { + out.Fields = []string{} + } + } else { + out.Fields = (out.Fields)[:0] + } + for !in.IsDelim(']') { + var v125 string + v125 = string(in.String()) + out.Fields = append(out.Fields, v125) + in.WantComma() + } + in.Delim(']') + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(out *jwriter.Writer, in DocumentQuery) { + out.RawByte('{') + first := true + _ = first + if len(in.Fields) != 0 { + const prefix string = ",\"fields\":" + first = false + out.RawString(prefix[1:]) + { + out.RawByte('[') + for v126, v127 := range in.Fields { + if v126 > 0 { + out.RawByte(',') + } + out.String(string(v127)) + } + out.RawByte(']') + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v DocumentQuery) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v DocumentQuery) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo39(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *DocumentQuery) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *DocumentQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo39(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(in *jlexer.Lexer, out *Details) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "receivedDocuments": + out.ReceivedDocuments = int64(in.Int64()) + case "indexedDocuments": + out.IndexedDocuments = int64(in.Int64()) + case "deletedDocuments": + out.DeletedDocuments = int64(in.Int64()) + case "primaryKey": + out.PrimaryKey = string(in.String()) + case "providedIds": + out.ProvidedIds = int64(in.Int64()) + case "rankingRules": + if in.IsNull() { + in.Skip() + out.RankingRules = nil + } else { + in.Delim('[') + if out.RankingRules == nil { + if !in.IsDelim(']') { + out.RankingRules = make([]string, 0, 4) + } else { + out.RankingRules = []string{} + } + } else { + out.RankingRules = (out.RankingRules)[:0] + } + for !in.IsDelim(']') { + var v128 string + v128 = string(in.String()) + out.RankingRules = append(out.RankingRules, v128) + in.WantComma() + } + in.Delim(']') + } + case "distinctAttribute": + if in.IsNull() { + in.Skip() + out.DistinctAttribute = nil + } else { + if out.DistinctAttribute == nil { + out.DistinctAttribute = new(string) + } + *out.DistinctAttribute = string(in.String()) + } + case "searchableAttributes": + if in.IsNull() { + in.Skip() + out.SearchableAttributes = nil + } else { + in.Delim('[') + if out.SearchableAttributes == nil { + if !in.IsDelim(']') { + out.SearchableAttributes = make([]string, 0, 4) + } else { + out.SearchableAttributes = []string{} + } + } else { + out.SearchableAttributes = (out.SearchableAttributes)[:0] + } + for !in.IsDelim(']') { + var v129 string + v129 = string(in.String()) + out.SearchableAttributes = append(out.SearchableAttributes, v129) + in.WantComma() + } + in.Delim(']') + } + case "displayedAttributes": + if in.IsNull() { + in.Skip() + out.DisplayedAttributes = nil + } else { + in.Delim('[') + if out.DisplayedAttributes == nil { + if !in.IsDelim(']') { + out.DisplayedAttributes = make([]string, 0, 4) + } else { + out.DisplayedAttributes = []string{} + } + } else { + out.DisplayedAttributes = (out.DisplayedAttributes)[:0] + } + for !in.IsDelim(']') { + var v130 string + v130 = string(in.String()) + out.DisplayedAttributes = append(out.DisplayedAttributes, v130) + in.WantComma() + } + in.Delim(']') + } + case "stopWords": + if in.IsNull() { + in.Skip() + out.StopWords = nil + } else { + in.Delim('[') + if out.StopWords == nil { + if !in.IsDelim(']') { + out.StopWords = make([]string, 0, 4) + } else { + out.StopWords = []string{} + } + } else { + out.StopWords = (out.StopWords)[:0] + } + for !in.IsDelim(']') { + var v131 string + v131 = string(in.String()) + out.StopWords = append(out.StopWords, v131) + in.WantComma() + } + in.Delim(']') + } + case "synonyms": + if in.IsNull() { + in.Skip() + } else { + in.Delim('{') + if !in.IsDelim('}') { + out.Synonyms = make(map[string][]string) + } else { + out.Synonyms = nil + } + for !in.IsDelim('}') { + key := string(in.String()) + in.WantColon() + var v132 []string + if in.IsNull() { + in.Skip() + v132 = nil + } else { + in.Delim('[') + if v132 == nil { + if !in.IsDelim(']') { + v132 = make([]string, 0, 4) + } else { + v132 = []string{} + } + } else { + v132 = (v132)[:0] + } + for !in.IsDelim(']') { + var v133 string + v133 = string(in.String()) + v132 = append(v132, v133) + in.WantComma() + } + in.Delim(']') + } + (out.Synonyms)[key] = v132 + in.WantComma() + } + in.Delim('}') + } + case "filterableAttributes": + if in.IsNull() { + in.Skip() + out.FilterableAttributes = nil + } else { + in.Delim('[') + if out.FilterableAttributes == nil { + if !in.IsDelim(']') { + out.FilterableAttributes = make([]string, 0, 4) + } else { + out.FilterableAttributes = []string{} + } + } else { + out.FilterableAttributes = (out.FilterableAttributes)[:0] + } + for !in.IsDelim(']') { + var v134 string + v134 = string(in.String()) + out.FilterableAttributes = append(out.FilterableAttributes, v134) + in.WantComma() + } + in.Delim(']') + } + case "sortableAttributes": + if in.IsNull() { + in.Skip() + out.SortableAttributes = nil + } else { + in.Delim('[') + if out.SortableAttributes == nil { + if !in.IsDelim(']') { + out.SortableAttributes = make([]string, 0, 4) + } else { + out.SortableAttributes = []string{} + } + } else { + out.SortableAttributes = (out.SortableAttributes)[:0] + } + for !in.IsDelim(']') { + var v135 string + v135 = string(in.String()) + out.SortableAttributes = append(out.SortableAttributes, v135) + in.WantComma() + } + in.Delim(']') + } + case "typoTolerance": + if in.IsNull() { + in.Skip() + out.TypoTolerance = nil + } else { + if out.TypoTolerance == nil { + out.TypoTolerance = new(TypoTolerance) + } + (*out.TypoTolerance).UnmarshalEasyJSON(in) + } + case "pagination": + if in.IsNull() { + in.Skip() + out.Pagination = nil + } else { + if out.Pagination == nil { + out.Pagination = new(Pagination) + } + (*out.Pagination).UnmarshalEasyJSON(in) + } + case "faceting": + if in.IsNull() { + in.Skip() + out.Faceting = nil + } else { + if out.Faceting == nil { + out.Faceting = new(Faceting) + } + (*out.Faceting).UnmarshalEasyJSON(in) + } + case "matchedTasks": + out.MatchedTasks = int64(in.Int64()) + case "canceledTasks": + out.CanceledTasks = int64(in.Int64()) + case "deletedTasks": + out.DeletedTasks = int64(in.Int64()) + case "originalFilter": + out.OriginalFilter = string(in.String()) + case "swaps": + if in.IsNull() { + in.Skip() + out.Swaps = nil + } else { + in.Delim('[') + if out.Swaps == nil { + if !in.IsDelim(']') { + out.Swaps = make([]SwapIndexesParams, 0, 2) + } else { + out.Swaps = []SwapIndexesParams{} + } + } else { + out.Swaps = (out.Swaps)[:0] + } + for !in.IsDelim(']') { + var v136 SwapIndexesParams + (v136).UnmarshalEasyJSON(in) + out.Swaps = append(out.Swaps, v136) + in.WantComma() + } + in.Delim(']') + } + case "dumpUid": + out.DumpUid = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(out *jwriter.Writer, in Details) { + out.RawByte('{') + first := true + _ = first + if in.ReceivedDocuments != 0 { + const prefix string = ",\"receivedDocuments\":" + first = false + out.RawString(prefix[1:]) + out.Int64(int64(in.ReceivedDocuments)) + } + if in.IndexedDocuments != 0 { + const prefix string = ",\"indexedDocuments\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.IndexedDocuments)) + } + if in.DeletedDocuments != 0 { + const prefix string = ",\"deletedDocuments\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.DeletedDocuments)) + } + if in.PrimaryKey != "" { + const prefix string = ",\"primaryKey\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.PrimaryKey)) + } + if in.ProvidedIds != 0 { + const prefix string = ",\"providedIds\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.ProvidedIds)) + } + if len(in.RankingRules) != 0 { + const prefix string = ",\"rankingRules\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v137, v138 := range in.RankingRules { + if v137 > 0 { + out.RawByte(',') + } + out.String(string(v138)) + } + out.RawByte(']') + } + } + if in.DistinctAttribute != nil { + const prefix string = ",\"distinctAttribute\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(*in.DistinctAttribute)) + } + if len(in.SearchableAttributes) != 0 { + const prefix string = ",\"searchableAttributes\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v139, v140 := range in.SearchableAttributes { + if v139 > 0 { + out.RawByte(',') + } + out.String(string(v140)) + } + out.RawByte(']') + } + } + if len(in.DisplayedAttributes) != 0 { + const prefix string = ",\"displayedAttributes\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v141, v142 := range in.DisplayedAttributes { + if v141 > 0 { + out.RawByte(',') + } + out.String(string(v142)) + } + out.RawByte(']') + } + } + if len(in.StopWords) != 0 { + const prefix string = ",\"stopWords\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v143, v144 := range in.StopWords { + if v143 > 0 { + out.RawByte(',') + } + out.String(string(v144)) + } + out.RawByte(']') + } + } + if len(in.Synonyms) != 0 { + const prefix string = ",\"synonyms\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('{') + v145First := true + for v145Name, v145Value := range in.Synonyms { + if v145First { + v145First = false + } else { + out.RawByte(',') + } + out.String(string(v145Name)) + out.RawByte(':') + if v145Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v146, v147 := range v145Value { + if v146 > 0 { + out.RawByte(',') + } + out.String(string(v147)) + } + out.RawByte(']') + } + } + out.RawByte('}') + } + } + if len(in.FilterableAttributes) != 0 { + const prefix string = ",\"filterableAttributes\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v148, v149 := range in.FilterableAttributes { + if v148 > 0 { + out.RawByte(',') + } + out.String(string(v149)) + } + out.RawByte(']') + } + } + if len(in.SortableAttributes) != 0 { + const prefix string = ",\"sortableAttributes\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v150, v151 := range in.SortableAttributes { + if v150 > 0 { + out.RawByte(',') + } + out.String(string(v151)) + } + out.RawByte(']') + } + } + if in.TypoTolerance != nil { + const prefix string = ",\"typoTolerance\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + (*in.TypoTolerance).MarshalEasyJSON(out) + } + if in.Pagination != nil { + const prefix string = ",\"pagination\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + (*in.Pagination).MarshalEasyJSON(out) + } + if in.Faceting != nil { + const prefix string = ",\"faceting\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + (*in.Faceting).MarshalEasyJSON(out) + } + if in.MatchedTasks != 0 { + const prefix string = ",\"matchedTasks\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.MatchedTasks)) + } + if in.CanceledTasks != 0 { + const prefix string = ",\"canceledTasks\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.CanceledTasks)) + } + if in.DeletedTasks != 0 { + const prefix string = ",\"deletedTasks\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.Int64(int64(in.DeletedTasks)) + } + if in.OriginalFilter != "" { + const prefix string = ",\"originalFilter\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.OriginalFilter)) + } + if len(in.Swaps) != 0 { + const prefix string = ",\"swaps\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + { + out.RawByte('[') + for v152, v153 := range in.Swaps { + if v152 > 0 { + out.RawByte(',') + } + (v153).MarshalEasyJSON(out) + } + out.RawByte(']') + } + } + if in.DumpUid != "" { + const prefix string = ",\"dumpUid\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.DumpUid)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v Details) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v Details) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo40(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *Details) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *Details) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo40(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(in *jlexer.Lexer, out *DeleteTasksQuery) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "UIDS": + if in.IsNull() { + in.Skip() + out.UIDS = nil + } else { + in.Delim('[') + if out.UIDS == nil { + if !in.IsDelim(']') { + out.UIDS = make([]int64, 0, 8) + } else { + out.UIDS = []int64{} + } + } else { + out.UIDS = (out.UIDS)[:0] + } + for !in.IsDelim(']') { + var v154 int64 + v154 = int64(in.Int64()) + out.UIDS = append(out.UIDS, v154) + in.WantComma() + } + in.Delim(']') + } + case "IndexUIDS": + if in.IsNull() { + in.Skip() + out.IndexUIDS = nil + } else { + in.Delim('[') + if out.IndexUIDS == nil { + if !in.IsDelim(']') { + out.IndexUIDS = make([]string, 0, 4) + } else { + out.IndexUIDS = []string{} + } + } else { + out.IndexUIDS = (out.IndexUIDS)[:0] + } + for !in.IsDelim(']') { + var v155 string + v155 = string(in.String()) + out.IndexUIDS = append(out.IndexUIDS, v155) + in.WantComma() + } + in.Delim(']') + } + case "Statuses": + if in.IsNull() { + in.Skip() + out.Statuses = nil + } else { + in.Delim('[') + if out.Statuses == nil { + if !in.IsDelim(']') { + out.Statuses = make([]TaskStatus, 0, 4) + } else { + out.Statuses = []TaskStatus{} + } + } else { + out.Statuses = (out.Statuses)[:0] + } + for !in.IsDelim(']') { + var v156 TaskStatus + v156 = TaskStatus(in.String()) + out.Statuses = append(out.Statuses, v156) + in.WantComma() + } + in.Delim(']') + } + case "Types": + if in.IsNull() { + in.Skip() + out.Types = nil + } else { + in.Delim('[') + if out.Types == nil { + if !in.IsDelim(']') { + out.Types = make([]TaskType, 0, 4) + } else { + out.Types = []TaskType{} + } + } else { + out.Types = (out.Types)[:0] + } + for !in.IsDelim(']') { + var v157 TaskType + v157 = TaskType(in.String()) + out.Types = append(out.Types, v157) + in.WantComma() + } + in.Delim(']') + } + case "CanceledBy": + if in.IsNull() { + in.Skip() + out.CanceledBy = nil + } else { + in.Delim('[') + if out.CanceledBy == nil { + if !in.IsDelim(']') { + out.CanceledBy = make([]int64, 0, 8) + } else { + out.CanceledBy = []int64{} + } + } else { + out.CanceledBy = (out.CanceledBy)[:0] + } + for !in.IsDelim(']') { + var v158 int64 + v158 = int64(in.Int64()) + out.CanceledBy = append(out.CanceledBy, v158) + in.WantComma() + } + in.Delim(']') + } + case "BeforeEnqueuedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.BeforeEnqueuedAt).UnmarshalJSON(data)) + } + case "AfterEnqueuedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.AfterEnqueuedAt).UnmarshalJSON(data)) + } + case "BeforeStartedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.BeforeStartedAt).UnmarshalJSON(data)) + } + case "AfterStartedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.AfterStartedAt).UnmarshalJSON(data)) + } + case "BeforeFinishedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.BeforeFinishedAt).UnmarshalJSON(data)) + } + case "AfterFinishedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.AfterFinishedAt).UnmarshalJSON(data)) + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(out *jwriter.Writer, in DeleteTasksQuery) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"UIDS\":" + out.RawString(prefix[1:]) + if in.UIDS == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v159, v160 := range in.UIDS { + if v159 > 0 { + out.RawByte(',') + } + out.Int64(int64(v160)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"IndexUIDS\":" + out.RawString(prefix) + if in.IndexUIDS == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v161, v162 := range in.IndexUIDS { + if v161 > 0 { + out.RawByte(',') + } + out.String(string(v162)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"Statuses\":" + out.RawString(prefix) + if in.Statuses == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v163, v164 := range in.Statuses { + if v163 > 0 { + out.RawByte(',') + } + out.String(string(v164)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"Types\":" + out.RawString(prefix) + if in.Types == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v165, v166 := range in.Types { + if v165 > 0 { + out.RawByte(',') + } + out.String(string(v166)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"CanceledBy\":" + out.RawString(prefix) + if in.CanceledBy == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v167, v168 := range in.CanceledBy { + if v167 > 0 { + out.RawByte(',') + } + out.Int64(int64(v168)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"BeforeEnqueuedAt\":" + out.RawString(prefix) + out.Raw((in.BeforeEnqueuedAt).MarshalJSON()) + } + { + const prefix string = ",\"AfterEnqueuedAt\":" + out.RawString(prefix) + out.Raw((in.AfterEnqueuedAt).MarshalJSON()) + } + { + const prefix string = ",\"BeforeStartedAt\":" + out.RawString(prefix) + out.Raw((in.BeforeStartedAt).MarshalJSON()) + } + { + const prefix string = ",\"AfterStartedAt\":" + out.RawString(prefix) + out.Raw((in.AfterStartedAt).MarshalJSON()) + } + { + const prefix string = ",\"BeforeFinishedAt\":" + out.RawString(prefix) + out.Raw((in.BeforeFinishedAt).MarshalJSON()) + } + { + const prefix string = ",\"AfterFinishedAt\":" + out.RawString(prefix) + out.Raw((in.AfterFinishedAt).MarshalJSON()) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v DeleteTasksQuery) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v DeleteTasksQuery) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo41(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *DeleteTasksQuery) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *DeleteTasksQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo41(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(in *jlexer.Lexer, out *CsvDocumentsQuery) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "primaryKey": + out.PrimaryKey = string(in.String()) + case "csvDelimiter": + out.CsvDelimiter = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(out *jwriter.Writer, in CsvDocumentsQuery) { + out.RawByte('{') + first := true + _ = first + if in.PrimaryKey != "" { + const prefix string = ",\"primaryKey\":" + first = false + out.RawString(prefix[1:]) + out.String(string(in.PrimaryKey)) + } + if in.CsvDelimiter != "" { + const prefix string = ",\"csvDelimiter\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.CsvDelimiter)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v CsvDocumentsQuery) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v CsvDocumentsQuery) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo42(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *CsvDocumentsQuery) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *CsvDocumentsQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo42(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo43(in *jlexer.Lexer, out *CreateIndexRequest) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "uid": + out.UID = string(in.String()) + case "primaryKey": + out.PrimaryKey = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo43(out *jwriter.Writer, in CreateIndexRequest) { + out.RawByte('{') + first := true + _ = first + if in.UID != "" { + const prefix string = ",\"uid\":" + first = false + out.RawString(prefix[1:]) + out.String(string(in.UID)) + } + if in.PrimaryKey != "" { + const prefix string = ",\"primaryKey\":" + if first { + first = false + out.RawString(prefix[1:]) + } else { + out.RawString(prefix) + } + out.String(string(in.PrimaryKey)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v CreateIndexRequest) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo43(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v CreateIndexRequest) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo43(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *CreateIndexRequest) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo43(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *CreateIndexRequest) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo43(l, v) +} +func easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo44(in *jlexer.Lexer, out *CancelTasksQuery) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "UIDS": + if in.IsNull() { + in.Skip() + out.UIDS = nil + } else { + in.Delim('[') + if out.UIDS == nil { + if !in.IsDelim(']') { + out.UIDS = make([]int64, 0, 8) + } else { + out.UIDS = []int64{} + } + } else { + out.UIDS = (out.UIDS)[:0] + } + for !in.IsDelim(']') { + var v169 int64 + v169 = int64(in.Int64()) + out.UIDS = append(out.UIDS, v169) + in.WantComma() + } + in.Delim(']') + } + case "IndexUIDS": + if in.IsNull() { + in.Skip() + out.IndexUIDS = nil + } else { + in.Delim('[') + if out.IndexUIDS == nil { + if !in.IsDelim(']') { + out.IndexUIDS = make([]string, 0, 4) + } else { + out.IndexUIDS = []string{} + } + } else { + out.IndexUIDS = (out.IndexUIDS)[:0] + } + for !in.IsDelim(']') { + var v170 string + v170 = string(in.String()) + out.IndexUIDS = append(out.IndexUIDS, v170) + in.WantComma() + } + in.Delim(']') + } + case "Statuses": + if in.IsNull() { + in.Skip() + out.Statuses = nil + } else { + in.Delim('[') + if out.Statuses == nil { + if !in.IsDelim(']') { + out.Statuses = make([]TaskStatus, 0, 4) + } else { + out.Statuses = []TaskStatus{} + } + } else { + out.Statuses = (out.Statuses)[:0] + } + for !in.IsDelim(']') { + var v171 TaskStatus + v171 = TaskStatus(in.String()) + out.Statuses = append(out.Statuses, v171) + in.WantComma() + } + in.Delim(']') + } + case "Types": + if in.IsNull() { + in.Skip() + out.Types = nil + } else { + in.Delim('[') + if out.Types == nil { + if !in.IsDelim(']') { + out.Types = make([]TaskType, 0, 4) + } else { + out.Types = []TaskType{} + } + } else { + out.Types = (out.Types)[:0] + } + for !in.IsDelim(']') { + var v172 TaskType + v172 = TaskType(in.String()) + out.Types = append(out.Types, v172) + in.WantComma() + } + in.Delim(']') + } + case "BeforeEnqueuedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.BeforeEnqueuedAt).UnmarshalJSON(data)) + } + case "AfterEnqueuedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.AfterEnqueuedAt).UnmarshalJSON(data)) + } + case "BeforeStartedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.BeforeStartedAt).UnmarshalJSON(data)) + } + case "AfterStartedAt": + if data := in.Raw(); in.Ok() { + in.AddError((out.AfterStartedAt).UnmarshalJSON(data)) + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo44(out *jwriter.Writer, in CancelTasksQuery) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"UIDS\":" + out.RawString(prefix[1:]) + if in.UIDS == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v173, v174 := range in.UIDS { + if v173 > 0 { + out.RawByte(',') + } + out.Int64(int64(v174)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"IndexUIDS\":" + out.RawString(prefix) + if in.IndexUIDS == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v175, v176 := range in.IndexUIDS { + if v175 > 0 { + out.RawByte(',') + } + out.String(string(v176)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"Statuses\":" + out.RawString(prefix) + if in.Statuses == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v177, v178 := range in.Statuses { + if v177 > 0 { + out.RawByte(',') + } + out.String(string(v178)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"Types\":" + out.RawString(prefix) + if in.Types == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v179, v180 := range in.Types { + if v179 > 0 { + out.RawByte(',') + } + out.String(string(v180)) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"BeforeEnqueuedAt\":" + out.RawString(prefix) + out.Raw((in.BeforeEnqueuedAt).MarshalJSON()) + } + { + const prefix string = ",\"AfterEnqueuedAt\":" + out.RawString(prefix) + out.Raw((in.AfterEnqueuedAt).MarshalJSON()) + } + { + const prefix string = ",\"BeforeStartedAt\":" + out.RawString(prefix) + out.Raw((in.BeforeStartedAt).MarshalJSON()) + } + { + const prefix string = ",\"AfterStartedAt\":" + out.RawString(prefix) + out.Raw((in.AfterStartedAt).MarshalJSON()) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v CancelTasksQuery) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo44(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v CancelTasksQuery) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6601e8cdEncodeGithubComMeilisearchMeilisearchGo44(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *CancelTasksQuery) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo44(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *CancelTasksQuery) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6601e8cdDecodeGithubComMeilisearchMeilisearchGo44(l, v) +} From bb59fa33cfe301260073671895dcef2993a5d8b3 Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 11:01:45 +0330 Subject: [PATCH 29/43] chore: add example test for function New and Connect --- example_test.go | 47 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 example_test.go diff --git a/example_test.go b/example_test.go new file mode 100644 index 00000000..9a11df75 --- /dev/null +++ b/example_test.go @@ -0,0 +1,47 @@ +package meilisearch + +import ( + "fmt" + "os" +) + +func ExampleNew() { + // WithAPIKey is optional + meili := New("http://localhost:7700", WithAPIKey("foobar")) + + // An index is where the documents are stored. + idx := meili.Index("movies") + + // If the index 'movies' does not exist, Meilisearch creates it when you first add the documents. + documents := []map[string]interface{}{ + {"id": 1, "title": "Carol", "genres": []string{"Romance", "Drama"}}, + {"id": 2, "title": "Wonder Woman", "genres": []string{"Action", "Adventure"}}, + {"id": 3, "title": "Life of Pi", "genres": []string{"Adventure", "Drama"}}, + {"id": 4, "title": "Mad Max: Fury Road", "genres": []string{"Adventure", "Science Fiction"}}, + {"id": 5, "title": "Moana", "genres": []string{"Fantasy", "Action"}}, + {"id": 6, "title": "Philadelphia", "genres": []string{"Drama"}}, + } + task, err := idx.AddDocuments(documents) + if err != nil { + fmt.Println(err) + os.Exit(1) + } + + fmt.Println(task.TaskUID) +} + +func ExampleConnect() { + meili, err := Connect("http://localhost:7700", WithAPIKey("foobar")) + if err != nil { + fmt.Println(err) + return + } + + ver, err := meili.Version() + if err != nil { + fmt.Println(err) + return + } + + fmt.Println(ver.PkgVersion) +} From 794b6ad5a6db60a60349d010a474ab355519517e Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 11:02:03 +0330 Subject: [PATCH 30/43] docs: update readme for new meilisearch --- README.md | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index af4540dc..eaf40069 100644 --- a/README.md +++ b/README.md @@ -86,10 +86,8 @@ import ( ) func main() { - client := meilisearch.NewClient(meilisearch.ClientConfig{ - Host: "http://127.0.0.1:7700", - APIKey: "masterKey", - }) + client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("foobar")) + // An index is where the documents are stored. index := client.Index("movies") From b94d7f1a1703505c7cbfabe35514dda6592b62d6 Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 11:02:21 +0330 Subject: [PATCH 31/43] chore: add helper function with test --- helper.go | 136 +++++++++++++++++++++++++++++++++++++++++++++++++ helper_test.go | 118 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 254 insertions(+) create mode 100644 helper.go create mode 100644 helper_test.go diff --git a/helper.go b/helper.go new file mode 100644 index 00000000..55eaef8d --- /dev/null +++ b/helper.go @@ -0,0 +1,136 @@ +package meilisearch + +import ( + "bytes" + "context" + "encoding/csv" + "encoding/json" + "fmt" + "net/url" + "regexp" + "strconv" + "strings" + "time" +) + +func IsValidUUID(uuid string) bool { + r := regexp.MustCompile("^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-4[a-fA-F0-9]{3}-[8|9|aA|bB][a-fA-F0-9]{3}-[a-fA-F0-9]{12}$") + return r.MatchString(uuid) +} + +// This function allows the user to create a Key with an ExpiresAt in time.Time +// and transform the Key structure into a KeyParsed structure to send the time format +// managed by meilisearch +func convertKeyToParsedKey(key Key) (resp KeyParsed) { + resp = KeyParsed{ + Name: key.Name, + Description: key.Description, + UID: key.UID, + Actions: key.Actions, + Indexes: key.Indexes, + } + + // Convert time.Time to *string to feat the exact ISO-8601 + // format of meilisearch + if !key.ExpiresAt.IsZero() { + resp.ExpiresAt = formatDate(key.ExpiresAt, true) + } + return resp +} + +func encodeTasksQuery(param *TasksQuery, req *internalRequest) { + if param.Limit != 0 { + req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10) + } + if param.From != 0 { + req.withQueryParams["from"] = strconv.FormatInt(param.From, 10) + } + if len(param.Statuses) != 0 { + var statuses []string + for _, status := range param.Statuses { + statuses = append(statuses, string(status)) + } + req.withQueryParams["statuses"] = strings.Join(statuses, ",") + } + if len(param.Types) != 0 { + var types []string + for _, t := range param.Types { + types = append(types, string(t)) + } + req.withQueryParams["types"] = strings.Join(types, ",") + } + if len(param.IndexUIDS) != 0 { + req.withQueryParams["indexUids"] = strings.Join(param.IndexUIDS, ",") + } + if len(param.UIDS) != 0 { + req.withQueryParams["uids"] = strings.Trim(strings.Join(strings.Fields(fmt.Sprint(param.UIDS)), ","), "[]") + } + if len(param.CanceledBy) != 0 { + req.withQueryParams["canceledBy"] = strings.Trim(strings.Join(strings.Fields(fmt.Sprint(param.CanceledBy)), ","), "[]") + } + if !param.BeforeEnqueuedAt.IsZero() { + req.withQueryParams["beforeEnqueuedAt"] = *formatDate(param.BeforeEnqueuedAt, false) + } + if !param.AfterEnqueuedAt.IsZero() { + req.withQueryParams["afterEnqueuedAt"] = *formatDate(param.AfterEnqueuedAt, false) + } + if !param.BeforeStartedAt.IsZero() { + req.withQueryParams["beforeStartedAt"] = *formatDate(param.BeforeStartedAt, false) + } + if !param.AfterStartedAt.IsZero() { + req.withQueryParams["afterStartedAt"] = *formatDate(param.AfterStartedAt, false) + } + if !param.BeforeFinishedAt.IsZero() { + req.withQueryParams["beforeFinishedAt"] = *formatDate(param.BeforeFinishedAt, false) + } + if !param.AfterFinishedAt.IsZero() { + req.withQueryParams["afterFinishedAt"] = *formatDate(param.AfterFinishedAt, false) + } +} + +func formatDate(date time.Time, _ bool) *string { + const format = "2006-01-02T15:04:05Z" + timeParsedToString := date.Format(format) + return &timeParsedToString +} + +func transformStringVariadicToMap(primaryKey ...string) (options map[string]string) { + if primaryKey != nil { + return map[string]string{ + "primaryKey": primaryKey[0], + } + } + return nil +} + +func transformCsvDocumentsQueryToMap(options *CsvDocumentsQuery) map[string]string { + var optionsMap map[string]string + data, _ := json.Marshal(options) + _ = json.Unmarshal(data, &optionsMap) + return optionsMap +} + +func generateQueryForOptions(options map[string]string) (urlQuery string) { + q := url.Values{} + for key, val := range options { + q.Add(key, val) + } + return q.Encode() +} + +func sendCsvRecords(ctx context.Context, documentsCsvFunc func(ctx context.Context, recs []byte, op *CsvDocumentsQuery) (resp *TaskInfo, err error), records [][]string, options *CsvDocumentsQuery) (*TaskInfo, error) { + b := new(bytes.Buffer) + w := csv.NewWriter(b) + w.UseCRLF = true + + err := w.WriteAll(records) + if err != nil { + return nil, fmt.Errorf("could not write CSV records: %w", err) + } + + resp, err := documentsCsvFunc(ctx, b.Bytes(), options) + if err != nil { + return nil, err + } + return resp, nil +} diff --git a/helper_test.go b/helper_test.go new file mode 100644 index 00000000..7412b780 --- /dev/null +++ b/helper_test.go @@ -0,0 +1,118 @@ +package meilisearch + +import ( + "net/url" + "reflect" + "strconv" + "testing" + "time" +) + +func (req *internalRequest) init() { + req.withQueryParams = make(map[string]string) +} + +func formatDateForComparison(date time.Time) string { + const format = "2006-01-02T15:04:05Z" + return date.Format(format) +} + +func TestConvertKeyToParsedKey(t *testing.T) { + key := Key{ + Name: "test", + Description: "test description", + UID: "123", + Actions: []string{"read", "write"}, + Indexes: []string{"index1", "index2"}, + ExpiresAt: time.Now(), + } + + expectedExpiresAt := formatDateForComparison(key.ExpiresAt) + parsedKey := convertKeyToParsedKey(key) + + if parsedKey.Name != key.Name || + parsedKey.Description != key.Description || + parsedKey.UID != key.UID || + !reflect.DeepEqual(parsedKey.Actions, key.Actions) || + !reflect.DeepEqual(parsedKey.Indexes, key.Indexes) || + parsedKey.ExpiresAt == nil || *parsedKey.ExpiresAt != expectedExpiresAt { + t.Errorf("convertKeyToParsedKey(%v) = %v; want %v", key, parsedKey, key) + } +} + +func TestEncodeTasksQuery(t *testing.T) { + param := &TasksQuery{ + Limit: 10, + From: 5, + Statuses: []TaskStatus{"queued", "running"}, + Types: []TaskType{"type1", "type2"}, + IndexUIDS: []string{"uid1", "uid2"}, + UIDS: []int64{1, 2, 3}, + CanceledBy: []int64{4, 5}, + BeforeEnqueuedAt: time.Now().Add(-10 * time.Hour), + AfterEnqueuedAt: time.Now().Add(-20 * time.Hour), + BeforeStartedAt: time.Now().Add(-30 * time.Hour), + AfterStartedAt: time.Now().Add(-40 * time.Hour), + BeforeFinishedAt: time.Now().Add(-50 * time.Hour), + AfterFinishedAt: time.Now().Add(-60 * time.Hour), + } + req := &internalRequest{} + req.init() + + encodeTasksQuery(param, req) + + expectedParams := map[string]string{ + "limit": strconv.FormatInt(param.Limit, 10), + "from": strconv.FormatInt(param.From, 10), + "statuses": "queued,running", + "types": "type1,type2", + "indexUids": "uid1,uid2", + "uids": "1,2,3", + "canceledBy": "4,5", + "beforeEnqueuedAt": formatDateForComparison(param.BeforeEnqueuedAt), + "afterEnqueuedAt": formatDateForComparison(param.AfterEnqueuedAt), + "beforeStartedAt": formatDateForComparison(param.BeforeStartedAt), + "afterStartedAt": formatDateForComparison(param.AfterStartedAt), + "beforeFinishedAt": formatDateForComparison(param.BeforeFinishedAt), + "afterFinishedAt": formatDateForComparison(param.AfterFinishedAt), + } + + for k, v := range expectedParams { + if req.withQueryParams[k] != v { + t.Errorf("encodeTasksQuery() param %v = %v; want %v", k, req.withQueryParams[k], v) + } + } +} + +func TestTransformStringVariadicToMap(t *testing.T) { + tests := []struct { + input []string + expect map[string]string + }{ + {[]string{"primaryKey1"}, map[string]string{"primaryKey": "primaryKey1"}}, + {nil, nil}, + } + + for _, test := range tests { + result := transformStringVariadicToMap(test.input...) + if !reflect.DeepEqual(result, test.expect) { + t.Errorf("transformStringVariadicToMap(%v) = %v; want %v", test.input, result, test.expect) + } + } +} + +func TestGenerateQueryForOptions(t *testing.T) { + options := map[string]string{ + "key1": "value1", + "key2": "value2", + } + + expected := url.Values{} + expected.Add("key1", "value1") + expected.Add("key2", "value2") + + result := generateQueryForOptions(options) + if result != expected.Encode() { + t.Errorf("generateQueryForOptions(%v) = %v; want %v", options, result, expected.Encode()) + } +} From af7e8b8c259f0ae8932a333d0576b8c9fb44e2b7 Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 11:06:34 +0330 Subject: [PATCH 32/43] feat: add service manager to manage index, key, task and other operation --- meilisearch.go | 813 +++++++++++++++++ meilisearch_test.go | 2111 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 2924 insertions(+) create mode 100644 meilisearch.go create mode 100644 meilisearch_test.go diff --git a/meilisearch.go b/meilisearch.go new file mode 100644 index 00000000..b8876741 --- /dev/null +++ b/meilisearch.go @@ -0,0 +1,813 @@ +package meilisearch + +import ( + "context" + "fmt" + "github.com/golang-jwt/jwt/v4" + "net/http" + "strconv" + "time" +) + +type meilisearch struct { + client *client +} + +type ServiceManager interface { + // Index retrieves an IndexManager for a specific index. + Index(uid string) IndexManager + + // GetIndex fetches the details of a specific index. + GetIndex(indexID string) (*IndexResult, error) + + // GetIndexWithContext fetches the details of a specific index with a context for cancellation. + GetIndexWithContext(ctx context.Context, indexID string) (*IndexResult, error) + + // GetRawIndex fetches the raw JSON representation of a specific index. + GetRawIndex(uid string) (map[string]interface{}, error) + + // GetRawIndexWithContext fetches the raw JSON representation of a specific index with a context for cancellation. + GetRawIndexWithContext(ctx context.Context, uid string) (map[string]interface{}, error) + + // ListIndexes lists all indexes. + ListIndexes(param *IndexesQuery) (*IndexesResults, error) + + // ListIndexesWithContext lists all indexes with a context for cancellation. + ListIndexesWithContext(ctx context.Context, param *IndexesQuery) (*IndexesResults, error) + + // GetRawIndexes fetches the raw JSON representation of all indexes. + GetRawIndexes(param *IndexesQuery) (map[string]interface{}, error) + + // GetRawIndexesWithContext fetches the raw JSON representation of all indexes with a context for cancellation. + GetRawIndexesWithContext(ctx context.Context, param *IndexesQuery) (map[string]interface{}, error) + + // CreateIndex creates a new index. + CreateIndex(config *IndexConfig) (*TaskInfo, error) + + // CreateIndexWithContext creates a new index with a context for cancellation. + CreateIndexWithContext(ctx context.Context, config *IndexConfig) (*TaskInfo, error) + + // DeleteIndex deletes a specific index. + DeleteIndex(uid string) (*TaskInfo, error) + + // DeleteIndexWithContext deletes a specific index with a context for cancellation. + DeleteIndexWithContext(ctx context.Context, uid string) (*TaskInfo, error) + + // MultiSearch performs a multi-index search. + MultiSearch(queries *MultiSearchRequest) (*MultiSearchResponse, error) + + // MultiSearchWithContext performs a multi-index search with a context for cancellation. + MultiSearchWithContext(ctx context.Context, queries *MultiSearchRequest) (*MultiSearchResponse, error) + + // CreateKey creates a new API key. + CreateKey(request *Key) (*Key, error) + + // CreateKeyWithContext creates a new API key with a context for cancellation. + CreateKeyWithContext(ctx context.Context, request *Key) (*Key, error) + + // GetKey fetches the details of a specific API key. + GetKey(identifier string) (*Key, error) + + // GetKeyWithContext fetches the details of a specific API key with a context for cancellation. + GetKeyWithContext(ctx context.Context, identifier string) (*Key, error) + + // GetKeys lists all API keys. + GetKeys(param *KeysQuery) (*KeysResults, error) + + // GetKeysWithContext lists all API keys with a context for cancellation. + GetKeysWithContext(ctx context.Context, param *KeysQuery) (*KeysResults, error) + + // UpdateKey updates a specific API key. + UpdateKey(keyOrUID string, request *Key) (*Key, error) + + // UpdateKeyWithContext updates a specific API key with a context for cancellation. + UpdateKeyWithContext(ctx context.Context, keyOrUID string, request *Key) (*Key, error) + + // DeleteKey deletes a specific API key. + DeleteKey(keyOrUID string) (bool, error) + + // DeleteKeyWithContext deletes a specific API key with a context for cancellation. + DeleteKeyWithContext(ctx context.Context, keyOrUID string) (bool, error) + + // GetTask fetches the details of a specific task. + GetTask(taskUID int64) (*Task, error) + + // GetTaskWithContext fetches the details of a specific task with a context for cancellation. + GetTaskWithContext(ctx context.Context, taskUID int64) (*Task, error) + + // GetTasks lists all tasks. + GetTasks(param *TasksQuery) (*TaskResult, error) + + // GetTasksWithContext lists all tasks with a context for cancellation. + GetTasksWithContext(ctx context.Context, param *TasksQuery) (*TaskResult, error) + + // CancelTasks cancels specific tasks. + CancelTasks(param *CancelTasksQuery) (*TaskInfo, error) + + // CancelTasksWithContext cancels specific tasks with a context for cancellation. + CancelTasksWithContext(ctx context.Context, param *CancelTasksQuery) (*TaskInfo, error) + + // DeleteTasks deletes specific tasks. + DeleteTasks(param *DeleteTasksQuery) (*TaskInfo, error) + + // DeleteTasksWithContext deletes specific tasks with a context for cancellation. + DeleteTasksWithContext(ctx context.Context, param *DeleteTasksQuery) (*TaskInfo, error) + + // WaitForTask waits for a specific task to complete. + WaitForTask(taskUID int64, interval time.Duration) (*Task, error) + + // WaitForTaskWithContext waits for a specific task to complete with a context for cancellation. + WaitForTaskWithContext(ctx context.Context, taskUID int64, interval time.Duration) (*Task, error) + + // SwapIndexes swaps the positions of two indexes. + SwapIndexes(param []*SwapIndexesParams) (*TaskInfo, error) + + // SwapIndexesWithContext swaps the positions of two indexes with a context for cancellation. + SwapIndexesWithContext(ctx context.Context, param []*SwapIndexesParams) (*TaskInfo, error) + + // GenerateTenantToken generates a tenant token for multi-tenancy. + GenerateTenantToken(apiKeyUID string, searchRules map[string]interface{}, options *TenantTokenOptions) (string, error) + + // GetStats fetches global stats. + GetStats() (*Stats, error) + + // GetStatsWithContext fetches global stats with a context for cancellation. + GetStatsWithContext(ctx context.Context) (*Stats, error) + + // CreateDump creates a database dump. + CreateDump() (*TaskInfo, error) + + // CreateDumpWithContext creates a database dump with a context for cancellation. + CreateDumpWithContext(ctx context.Context) (*TaskInfo, error) + + // Version fetches the version of the Meilisearch server. + Version() (*Version, error) + + // VersionWithContext fetches the version of the Meilisearch server with a context for cancellation. + VersionWithContext(ctx context.Context) (*Version, error) + + // Health checks the health of the Meilisearch server. + Health() (*Health, error) + + // HealthWithContext checks the health of the Meilisearch server with a context for cancellation. + HealthWithContext(ctx context.Context) (*Health, error) + + // IsHealthy checks if the Meilisearch server is healthy. + IsHealthy() bool + + // Close closes the connection to the Meilisearch server. + Close() +} + +// New create new service manager for operating on meilisearch +func New(host string, options ...Option) ServiceManager { + defOpt := defaultMeiliOpt + + for _, opt := range options { + opt(defOpt) + } + + return &meilisearch{ + client: newClient( + defOpt.client, + host, + defOpt.apiKey, + ), + } +} + +// Connect create service manager and check connection with meilisearch +func Connect(host string, options ...Option) (ServiceManager, error) { + meili := New(host, options...) + + if !meili.IsHealthy() { + return nil, ErrConnectingFailed + } + + return meili, nil +} + +func (m *meilisearch) Index(uid string) IndexManager { + return newIndex(m.client, uid) +} + +func (m *meilisearch) GetIndex(indexID string) (*IndexResult, error) { + return m.GetIndexWithContext(context.Background(), indexID) +} + +func (m *meilisearch) GetIndexWithContext(ctx context.Context, indexID string) (*IndexResult, error) { + return newIndex(m.client, indexID).FetchInfoWithContext(ctx) +} + +func (m *meilisearch) GetRawIndex(uid string) (map[string]interface{}, error) { + return m.GetRawIndexWithContext(context.Background(), uid) +} + +func (m *meilisearch) GetRawIndexWithContext(ctx context.Context, uid string) (map[string]interface{}, error) { + resp := map[string]interface{}{} + req := &internalRequest{ + endpoint: "/indexes/" + uid, + method: http.MethodGet, + withRequest: nil, + withResponse: &resp, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "GetRawIndex", + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) ListIndexes(param *IndexesQuery) (*IndexesResults, error) { + return m.ListIndexesWithContext(context.Background(), param) +} + +func (m *meilisearch) ListIndexesWithContext(ctx context.Context, param *IndexesQuery) (*IndexesResults, error) { + resp := new(IndexesResults) + req := &internalRequest{ + endpoint: "/indexes", + method: http.MethodGet, + withRequest: nil, + withResponse: &resp, + withQueryParams: map[string]string{}, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "GetIndexes", + } + if param != nil && param.Limit != 0 { + req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10) + } + if param != nil && param.Offset != 0 { + req.withQueryParams["offset"] = strconv.FormatInt(param.Offset, 10) + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + + for i := range resp.Results { + resp.Results[i].IndexManager = newIndex(m.client, resp.Results[i].UID) + } + + return resp, nil +} + +func (m *meilisearch) GetRawIndexes(param *IndexesQuery) (map[string]interface{}, error) { + return m.GetRawIndexesWithContext(context.Background(), param) +} + +func (m *meilisearch) GetRawIndexesWithContext(ctx context.Context, param *IndexesQuery) (map[string]interface{}, error) { + resp := map[string]interface{}{} + req := &internalRequest{ + endpoint: "/indexes", + method: http.MethodGet, + withRequest: nil, + withResponse: &resp, + withQueryParams: map[string]string{}, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "GetRawIndexes", + } + if param != nil && param.Limit != 0 { + req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10) + } + if param != nil && param.Offset != 0 { + req.withQueryParams["offset"] = strconv.FormatInt(param.Offset, 10) + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) CreateIndex(config *IndexConfig) (*TaskInfo, error) { + return m.CreateIndexWithContext(context.Background(), config) +} + +func (m *meilisearch) CreateIndexWithContext(ctx context.Context, config *IndexConfig) (*TaskInfo, error) { + request := &CreateIndexRequest{ + UID: config.Uid, + PrimaryKey: config.PrimaryKey, + } + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes", + method: http.MethodPost, + contentType: contentTypeJSON, + withRequest: request, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusAccepted}, + functionName: "CreateIndex", + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) DeleteIndex(uid string) (*TaskInfo, error) { + return m.DeleteIndexWithContext(context.Background(), uid) +} + +func (m *meilisearch) DeleteIndexWithContext(ctx context.Context, uid string) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + uid, + method: http.MethodDelete, + withRequest: nil, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusAccepted}, + functionName: "DeleteIndex", + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) MultiSearch(queries *MultiSearchRequest) (*MultiSearchResponse, error) { + return m.MultiSearchWithContext(context.Background(), queries) +} + +func (m *meilisearch) MultiSearchWithContext(ctx context.Context, queries *MultiSearchRequest) (*MultiSearchResponse, error) { + resp := new(MultiSearchResponse) + + for i := 0; i < len(queries.Queries); i++ { + queries.Queries[i].validate() + } + + req := &internalRequest{ + endpoint: "/multi-search", + method: http.MethodPost, + contentType: contentTypeJSON, + withRequest: queries, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "MultiSearch", + } + + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + + return resp, nil +} + +func (m *meilisearch) CreateKey(request *Key) (*Key, error) { + return m.CreateKeyWithContext(context.Background(), request) +} + +func (m *meilisearch) CreateKeyWithContext(ctx context.Context, request *Key) (*Key, error) { + parsedRequest := convertKeyToParsedKey(*request) + resp := new(Key) + req := &internalRequest{ + endpoint: "/keys", + method: http.MethodPost, + contentType: contentTypeJSON, + withRequest: &parsedRequest, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusCreated}, + functionName: "CreateKey", + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) GetKey(identifier string) (*Key, error) { + return m.GetKeyWithContext(context.Background(), identifier) +} + +func (m *meilisearch) GetKeyWithContext(ctx context.Context, identifier string) (*Key, error) { + resp := new(Key) + req := &internalRequest{ + endpoint: "/keys/" + identifier, + method: http.MethodGet, + withRequest: nil, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "GetKey", + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) GetKeys(param *KeysQuery) (*KeysResults, error) { + return m.GetKeysWithContext(context.Background(), param) +} + +func (m *meilisearch) GetKeysWithContext(ctx context.Context, param *KeysQuery) (*KeysResults, error) { + resp := new(KeysResults) + req := &internalRequest{ + endpoint: "/keys", + method: http.MethodGet, + withRequest: nil, + withResponse: resp, + withQueryParams: map[string]string{}, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "GetKeys", + } + if param != nil && param.Limit != 0 { + req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10) + } + if param != nil && param.Offset != 0 { + req.withQueryParams["offset"] = strconv.FormatInt(param.Offset, 10) + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) UpdateKey(keyOrUID string, request *Key) (*Key, error) { + return m.UpdateKeyWithContext(context.Background(), keyOrUID, request) +} + +func (m *meilisearch) UpdateKeyWithContext(ctx context.Context, keyOrUID string, request *Key) (*Key, error) { + parsedRequest := KeyUpdate{Name: request.Name, Description: request.Description} + resp := new(Key) + req := &internalRequest{ + endpoint: "/keys/" + keyOrUID, + method: http.MethodPatch, + contentType: contentTypeJSON, + withRequest: &parsedRequest, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "UpdateKey", + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) DeleteKey(keyOrUID string) (bool, error) { + return m.DeleteKeyWithContext(context.Background(), keyOrUID) +} + +func (m *meilisearch) DeleteKeyWithContext(ctx context.Context, keyOrUID string) (bool, error) { + req := &internalRequest{ + endpoint: "/keys/" + keyOrUID, + method: http.MethodDelete, + withRequest: nil, + withResponse: nil, + acceptedStatusCodes: []int{http.StatusNoContent}, + functionName: "DeleteKey", + } + if err := m.client.executeRequest(ctx, req); err != nil { + return false, err + } + return true, nil +} + +func (m *meilisearch) GetTask(taskUID int64) (*Task, error) { + return m.GetTaskWithContext(context.Background(), taskUID) +} + +func (m *meilisearch) GetTaskWithContext(ctx context.Context, taskUID int64) (*Task, error) { + return getTask(ctx, m.client, taskUID) +} + +func (m *meilisearch) GetTasks(param *TasksQuery) (*TaskResult, error) { + return m.GetTasksWithContext(context.Background(), param) +} + +func (m *meilisearch) GetTasksWithContext(ctx context.Context, param *TasksQuery) (*TaskResult, error) { + resp := new(TaskResult) + req := &internalRequest{ + endpoint: "/tasks", + method: http.MethodGet, + withRequest: nil, + withResponse: &resp, + withQueryParams: map[string]string{}, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "GetTasks", + } + if param != nil { + encodeTasksQuery(param, req) + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) CancelTasks(param *CancelTasksQuery) (*TaskInfo, error) { + return m.CancelTasksWithContext(context.Background(), param) +} + +func (m *meilisearch) CancelTasksWithContext(ctx context.Context, param *CancelTasksQuery) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/tasks/cancel", + method: http.MethodPost, + withRequest: nil, + withResponse: &resp, + withQueryParams: map[string]string{}, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "CancelTasks", + } + if param != nil { + paramToSend := &TasksQuery{ + UIDS: param.UIDS, + IndexUIDS: param.IndexUIDS, + Statuses: param.Statuses, + Types: param.Types, + BeforeEnqueuedAt: param.BeforeEnqueuedAt, + AfterEnqueuedAt: param.AfterEnqueuedAt, + BeforeStartedAt: param.BeforeStartedAt, + AfterStartedAt: param.AfterStartedAt, + } + encodeTasksQuery(paramToSend, req) + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) DeleteTasks(param *DeleteTasksQuery) (*TaskInfo, error) { + return m.DeleteTasksWithContext(context.Background(), param) +} + +func (m *meilisearch) DeleteTasksWithContext(ctx context.Context, param *DeleteTasksQuery) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/tasks", + method: http.MethodDelete, + withRequest: nil, + withResponse: &resp, + withQueryParams: map[string]string{}, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "DeleteTasks", + } + if param != nil { + paramToSend := &TasksQuery{ + UIDS: param.UIDS, + IndexUIDS: param.IndexUIDS, + Statuses: param.Statuses, + Types: param.Types, + CanceledBy: param.CanceledBy, + BeforeEnqueuedAt: param.BeforeEnqueuedAt, + AfterEnqueuedAt: param.AfterEnqueuedAt, + BeforeStartedAt: param.BeforeStartedAt, + AfterStartedAt: param.AfterStartedAt, + BeforeFinishedAt: param.BeforeFinishedAt, + AfterFinishedAt: param.AfterFinishedAt, + } + encodeTasksQuery(paramToSend, req) + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) SwapIndexes(param []*SwapIndexesParams) (*TaskInfo, error) { + return m.SwapIndexesWithContext(context.Background(), param) +} + +func (m *meilisearch) SwapIndexesWithContext(ctx context.Context, param []*SwapIndexesParams) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/swap-indexes", + method: http.MethodPost, + contentType: contentTypeJSON, + withRequest: param, + withResponse: &resp, + acceptedStatusCodes: []int{http.StatusAccepted}, + functionName: "SwapIndexes", + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) WaitForTask(taskUID int64, interval time.Duration) (*Task, error) { + return waitForTask(context.Background(), m.client, taskUID, interval) +} + +func (m *meilisearch) WaitForTaskWithContext(ctx context.Context, taskUID int64, interval time.Duration) (*Task, error) { + return waitForTask(ctx, m.client, taskUID, interval) +} + +func (m *meilisearch) GenerateTenantToken( + apiKeyUID string, + searchRules map[string]interface{}, + options *TenantTokenOptions, +) (string, error) { + // validate the arguments + if searchRules == nil { + return "", fmt.Errorf("GenerateTenantToken: The search rules added in the token generation " + + "must be of type array or object") + } + if (options == nil || options.APIKey == "") && m.client.apiKey == "" { + return "", fmt.Errorf("GenerateTenantToken: The API key used for the token " + + "generation must exist and be a valid meilisearch key") + } + if apiKeyUID == "" || !IsValidUUID(apiKeyUID) { + return "", fmt.Errorf("GenerateTenantToken: The uid used for the token " + + "generation must exist and comply to uuid4 format") + } + if options != nil && !options.ExpiresAt.IsZero() && options.ExpiresAt.Before(time.Now()) { + return "", fmt.Errorf("GenerateTenantToken: When the expiresAt field in " + + "the token generation has a value, it must be a date set in the future") + } + + var secret string + if options == nil || options.APIKey == "" { + secret = m.client.apiKey + } else { + secret = options.APIKey + } + + // For HMAC signing method, the key should be any []byte + hmacSampleSecret := []byte(secret) + + // Create the claims + claims := TenantTokenClaims{} + if options != nil && !options.ExpiresAt.IsZero() { + claims.RegisteredClaims = jwt.RegisteredClaims{ + ExpiresAt: jwt.NewNumericDate(options.ExpiresAt), + } + } + claims.APIKeyUID = apiKeyUID + claims.SearchRules = searchRules + + // Create a new token object, specifying signing method and the claims + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + + // Sign and get the complete encoded token as a string using the secret + tokenString, err := token.SignedString(hmacSampleSecret) + + return tokenString, err +} + +func (m *meilisearch) GetStats() (*Stats, error) { + return m.GetStatsWithContext(context.Background()) +} + +func (m *meilisearch) GetStatsWithContext(ctx context.Context) (*Stats, error) { + resp := new(Stats) + req := &internalRequest{ + endpoint: "/stats", + method: http.MethodGet, + withRequest: nil, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "GetStats", + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) CreateDump() (*TaskInfo, error) { + return m.CreateDumpWithContext(context.Background()) +} + +func (m *meilisearch) CreateDumpWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/dumps", + method: http.MethodPost, + contentType: contentTypeJSON, + withRequest: nil, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusAccepted}, + functionName: "CreateDump", + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) Version() (*Version, error) { + return m.VersionWithContext(context.Background()) +} + +func (m *meilisearch) VersionWithContext(ctx context.Context) (*Version, error) { + resp := new(Version) + req := &internalRequest{ + endpoint: "/version", + method: http.MethodGet, + withRequest: nil, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "Version", + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) Health() (*Health, error) { + return m.HealthWithContext(context.Background()) +} + +func (m *meilisearch) HealthWithContext(ctx context.Context) (*Health, error) { + resp := new(Health) + req := &internalRequest{ + endpoint: "/health", + method: http.MethodGet, + withRequest: nil, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "Health", + } + if err := m.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (m *meilisearch) IsHealthy() bool { + res, err := m.HealthWithContext(context.Background()) + return err == nil && res.Status == "available" +} + +func (m *meilisearch) Close() { + m.client.client.CloseIdleConnections() +} + +func getTask(ctx context.Context, cli *client, taskUID int64) (*Task, error) { + resp := new(Task) + req := &internalRequest{ + endpoint: "/tasks/" + strconv.FormatInt(taskUID, 10), + method: http.MethodGet, + withRequest: nil, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "GetTask", + } + if err := cli.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func waitForTask(ctx context.Context, cli *client, taskUID int64, interval time.Duration) (*Task, error) { + if interval == 0 { + interval = 50 * time.Millisecond + } + + // extract closure to get the task and check the status first before the ticker + fn := func() (*Task, error) { + getTask, err := getTask(ctx, cli, taskUID) + if err != nil { + return nil, err + } + + if getTask.Status != TaskStatusEnqueued && getTask.Status != TaskStatusProcessing { + return getTask, nil + } + return nil, nil + } + + // run first before the ticker, we do not want to wait for the first interval + task, err := fn() + if err != nil { + // Return error if it exists + return nil, err + } + + // Return task if it exists + if task != nil { + return task, nil + } + + // Create a ticker to check the task status, because our initial check was not successful + ticker := time.NewTicker(interval) + + // Defer the stop of the ticker, help GC to cleanup + defer func() { + // we might want to revist this, go.mod now is 1.16 + // however I still encouter the issue on go 1.22.2 + // there are 2 issues regarding tickers + // https://go-review.googlesource.com/c/go/+/512355 + // https://github.com/golang/go/issues/61542 + ticker.Stop() + ticker = nil + }() + + for { + select { + case <-ctx.Done(): + return nil, ctx.Err() + case <-ticker.C: + task, err := fn() + if err != nil { + return nil, err + } + + if task != nil { + return task, nil + } + } + } +} diff --git a/meilisearch_test.go b/meilisearch_test.go new file mode 100644 index 00000000..a5513db1 --- /dev/null +++ b/meilisearch_test.go @@ -0,0 +1,2111 @@ +package meilisearch + +import ( + "context" + "crypto/tls" + "errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "math" + "reflect" + "strings" + "sync" + "testing" + "time" +) + +func Test_Version(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + tests := []struct { + name string + client ServiceManager + }{ + { + name: "TestVersion", + client: sv, + }, + { + name: "TestVersionWithCustomClient", + client: customSv, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotResp, err := tt.client.Version() + require.NoError(t, err) + require.NotNil(t, gotResp, "Version() should not return nil value") + }) + } +} + +func TestClient_TimeoutError(t *testing.T) { + sv := setup(t, "") + + t.Cleanup(cleanup(sv)) + + tests := []struct { + name string + sv ServiceManager + expectedError Error + }{ + { + name: "TestTimeoutError", + sv: sv, + expectedError: Error{ + MeilisearchApiError: meilisearchApiError{}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + time.Sleep(2 * time.Second) + gotResp, err := tt.sv.VersionWithContext(ctx) + require.Error(t, err) + require.Nil(t, gotResp) + }) + } +} + +func Test_GetStats(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + tests := []struct { + name string + client ServiceManager + }{ + { + name: "TestGetStats", + client: sv, + }, + { + name: "TestGetStatsWithCustomClient", + client: customSv, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotResp, err := tt.client.GetStats() + require.NoError(t, err) + require.NotNil(t, gotResp, "GetStats() should not return nil value") + }) + } +} + +func Test_GetKey(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + t.Cleanup(cleanup(sv, customSv)) + + tests := []struct { + name string + client ServiceManager + }{ + { + name: "TestGetKey", + client: sv, + }, + { + name: "TestGetKeyWithCustomClient", + client: customSv, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotResp, err := tt.client.GetKeys(nil) + require.NoError(t, err) + + gotKey, err := tt.client.GetKey(gotResp.Results[0].Key) + require.NoError(t, err) + require.NotNil(t, gotKey.ExpiresAt) + require.NotNil(t, gotKey.CreatedAt) + require.NotNil(t, gotKey.UpdatedAt) + }) + } +} + +func Test_GetKeys(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + type args struct { + client ServiceManager + request *KeysQuery + } + tests := []struct { + name string + args args + }{ + { + name: "TestBasicGetKeys", + args: args{ + client: sv, + request: nil, + }, + }, + { + name: "TestGetKeysWithCustomClient", + args: args{ + client: customSv, + request: nil, + }, + }, + { + name: "TestGetKeysWithEmptyParam", + args: args{ + client: sv, + request: &KeysQuery{}, + }, + }, + { + name: "TestGetKeysWithLimit", + args: args{ + client: sv, + request: &KeysQuery{ + Limit: 1, + }, + }, + }, + { + name: "TestGetKeysWithOffset", + args: args{ + client: sv, + request: &KeysQuery{ + Limit: 2, + Offset: 1, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotResp, err := tt.args.client.GetKeys(tt.args.request) + + require.NoError(t, err) + require.NotNil(t, gotResp, "GetKeys() should not return nil value") + switch { + case tt.args.request != nil && tt.args.request.Limit != 0 && tt.args.request.Offset == 0: + require.Equal(t, tt.args.request.Limit, int64(len(gotResp.Results))) + case tt.args.request != nil && tt.args.request.Limit == 2 && tt.args.request.Offset == 1: + require.GreaterOrEqual(t, len(gotResp.Results), int(tt.args.request.Limit-tt.args.request.Offset)) + default: + require.GreaterOrEqual(t, len(gotResp.Results), 2) + } + }) + } +} + +func Test_CreateKey(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + tests := []struct { + name string + client ServiceManager + key Key + }{ + { + name: "TestCreateBasicKey", + client: sv, + key: Key{ + Actions: []string{"*"}, + Indexes: []string{"*"}, + }, + }, + { + name: "TestCreateKeyWithCustomClient", + client: customSv, + key: Key{ + Actions: []string{"*"}, + Indexes: []string{"*"}, + }, + }, + { + name: "TestCreateKeyWithExpirationAt", + client: sv, + key: Key{ + Actions: []string{"*"}, + Indexes: []string{"*"}, + ExpiresAt: time.Now().Add(time.Hour * 10), + }, + }, + { + name: "TestCreateKeyWithDescription", + client: sv, + key: Key{ + Name: "TestCreateKeyWithDescription", + Description: "TestCreateKeyWithDescription", + Actions: []string{"*"}, + Indexes: []string{"*"}, + }, + }, + { + name: "TestCreateKeyWithActions", + client: sv, + key: Key{ + Name: "TestCreateKeyWithActions", + Description: "TestCreateKeyWithActions", + Actions: []string{"documents.add", "documents.delete"}, + Indexes: []string{"*"}, + }, + }, + { + name: "TestCreateKeyWithIndexes", + client: sv, + key: Key{ + Name: "TestCreateKeyWithIndexes", + Description: "TestCreateKeyWithIndexes", + Actions: []string{"*"}, + Indexes: []string{"movies", "games"}, + }, + }, + { + name: "TestCreateKeyWithWildcardedAction", + client: sv, + key: Key{ + Name: "TestCreateKeyWithWildcardedAction", + Description: "TestCreateKeyWithWildcardedAction", + Actions: []string{"documents.*"}, + Indexes: []string{"movies", "games"}, + }, + }, + { + name: "TestCreateKeyWithUID", + client: sv, + key: Key{ + Name: "TestCreateKeyWithUID", + UID: "9aec34f4-e44c-4917-86c2-9c9403abb3b6", + Actions: []string{"*"}, + Indexes: []string{"*"}, + }, + }, + { + name: "TestCreateKeyWithAllOptions", + client: sv, + key: Key{ + Name: "TestCreateKeyWithAllOptions", + Description: "TestCreateKeyWithAllOptions", + UID: "9aec34f4-e44c-4917-86c2-9c9403abb3b6", + Actions: []string{"documents.add", "documents.delete"}, + Indexes: []string{"movies", "games"}, + ExpiresAt: time.Now().Add(time.Hour * 10), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + const Format = "2006-01-02T15:04:05" + t.Cleanup(cleanup(tt.client)) + + gotResp, err := tt.client.CreateKey(&tt.key) + require.NoError(t, err) + + gotKey, err := tt.client.GetKey(gotResp.Key) + require.NoError(t, err) + require.Equal(t, tt.key.Name, gotKey.Name) + require.Equal(t, tt.key.Description, gotKey.Description) + if tt.key.UID != "" { + require.Equal(t, tt.key.UID, gotKey.UID) + } + require.Equal(t, tt.key.Actions, gotKey.Actions) + require.Equal(t, tt.key.Indexes, gotKey.Indexes) + if !tt.key.ExpiresAt.IsZero() { + require.Equal(t, tt.key.ExpiresAt.Format(Format), gotKey.ExpiresAt.Format(Format)) + } + }) + } +} + +func Test_UpdateKey(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + tests := []struct { + name string + client ServiceManager + keyToCreate Key + keyToUpdate Key + }{ + { + name: "TestUpdateKeyWithDescription", + client: sv, + keyToCreate: Key{ + Actions: []string{"*"}, + Indexes: []string{"*"}, + }, + keyToUpdate: Key{ + Description: "TestUpdateKeyWithDescription", + }, + }, + { + name: "TestUpdateKeyWithCustomClientWithDescription", + client: customSv, + keyToCreate: Key{ + Actions: []string{"*"}, + Indexes: []string{"TestUpdateKeyWithCustomClientWithDescription"}, + }, + keyToUpdate: Key{ + Description: "TestUpdateKeyWithCustomClientWithDescription", + }, + }, + { + name: "TestUpdateKeyWithName", + client: sv, + keyToCreate: Key{ + Actions: []string{"*"}, + Indexes: []string{"TestUpdateKeyWithName"}, + }, + keyToUpdate: Key{ + Name: "TestUpdateKeyWithName", + }, + }, + { + name: "TestUpdateKeyWithNameAndAction", + client: sv, + keyToCreate: Key{ + Actions: []string{"search"}, + Indexes: []string{"*"}, + }, + keyToUpdate: Key{ + Name: "TestUpdateKeyWithName", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + const Format = "2006-01-02T15:04:05" + c := tt.client + t.Cleanup(cleanup(c)) + + gotResp, err := c.CreateKey(&tt.keyToCreate) + require.NoError(t, err) + + if tt.keyToCreate.Description != "" { + require.Equal(t, tt.keyToCreate.Description, gotResp.Description) + } + if len(tt.keyToCreate.Actions) != 0 { + require.Equal(t, tt.keyToCreate.Actions, gotResp.Actions) + } + if len(tt.keyToCreate.Indexes) != 0 { + require.Equal(t, tt.keyToCreate.Indexes, gotResp.Indexes) + } + if !tt.keyToCreate.ExpiresAt.IsZero() { + require.Equal(t, tt.keyToCreate.ExpiresAt.Format(Format), gotResp.ExpiresAt.Format(Format)) + } + + gotKey, err := c.UpdateKey(gotResp.Key, &tt.keyToUpdate) + require.NoError(t, err) + + if tt.keyToUpdate.Description != "" { + require.Equal(t, tt.keyToUpdate.Description, gotKey.Description) + } + if len(tt.keyToUpdate.Actions) != 0 { + require.Equal(t, tt.keyToUpdate.Actions, gotKey.Actions) + } + if len(tt.keyToUpdate.Indexes) != 0 { + require.Equal(t, tt.keyToUpdate.Indexes, gotKey.Indexes) + } + if tt.keyToUpdate.Description != "" { + require.Equal(t, tt.keyToUpdate.Name, gotKey.Name) + } + }) + } +} + +func Test_DeleteKey(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + tests := []struct { + name string + client ServiceManager + key Key + }{ + { + name: "TestDeleteBasicKey", + client: sv, + key: Key{ + Actions: []string{"*"}, + Indexes: []string{"*"}, + }, + }, + { + name: "TestDeleteKeyWithCustomClient", + client: customSv, + key: Key{ + Actions: []string{"*"}, + Indexes: []string{"*"}, + }, + }, + { + name: "TestDeleteKeyWithExpirationAt", + client: sv, + key: Key{ + Actions: []string{"*"}, + Indexes: []string{"*"}, + ExpiresAt: time.Now().Add(time.Hour * 10), + }, + }, + { + name: "TestDeleteKeyWithDescription", + client: sv, + key: Key{ + Description: "TestDeleteKeyWithDescription", + Actions: []string{"*"}, + Indexes: []string{"*"}, + }, + }, + { + name: "TestDeleteKeyWithActions", + client: sv, + key: Key{ + Description: "TestDeleteKeyWithActions", + Actions: []string{"documents.add", "documents.delete"}, + Indexes: []string{"*"}, + }, + }, + { + name: "TestDeleteKeyWithIndexes", + client: sv, + key: Key{ + Description: "TestDeleteKeyWithIndexes", + Actions: []string{"*"}, + Indexes: []string{"movies", "games"}, + }, + }, + { + name: "TestDeleteKeyWithAllOptions", + client: sv, + key: Key{ + Description: "TestDeleteKeyWithAllOptions", + Actions: []string{"documents.add", "documents.delete"}, + Indexes: []string{"movies", "games"}, + ExpiresAt: time.Now().Add(time.Hour * 10), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := tt.client + + gotKey, err := c.CreateKey(&tt.key) + require.NoError(t, err) + + gotResp, err := c.DeleteKey(gotKey.Key) + require.NoError(t, err) + require.True(t, gotResp) + + gotResp, err = c.DeleteKey(gotKey.Key) + require.Error(t, err) + require.False(t, gotResp) + }) + } +} + +func Test_Health(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + badSv := setup(t, "http://wrongurl:1234") + + tests := []struct { + name string + client ServiceManager + wantResp *Health + wantErr bool + }{ + { + name: "TestHealth", + client: sv, + wantResp: &Health{ + Status: "available", + }, + wantErr: false, + }, + { + name: "TestHealthWithCustomClient", + client: customSv, + wantResp: &Health{ + Status: "available", + }, + wantErr: false, + }, + { + name: "TestHealthWithBadUrl", + client: badSv, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotResp, err := tt.client.Health() + if tt.wantErr { + require.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, tt.wantResp, gotResp, "Health() got response %v, want %v", gotResp, tt.wantResp) + } + }) + } +} + +func Test_IsHealthy(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + badSv := setup(t, "http://wrongurl:1234") + + tests := []struct { + name string + client ServiceManager + want bool + }{ + { + name: "TestIsHealthy", + client: sv, + want: true, + }, + { + name: "TestIsHealthyWithCustomClient", + client: customSv, + want: true, + }, + { + name: "TestIsHealthyWIthBadUrl", + client: badSv, + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := tt.client.IsHealthy() + require.Equal(t, tt.want, got, "IsHealthy() got response %v, want %v", got, tt.want) + }) + } +} + +func Test_CreateDump(t *testing.T) { + sv := setup(t, "") + + tests := []struct { + name string + client ServiceManager + wantResp *Task + }{ + { + name: "TestCreateDump", + client: sv, + wantResp: &Task{ + Status: "enqueued", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := tt.client + + task, err := c.CreateDump() + require.NoError(t, err) + if assert.NotNil(t, task, "CreateDump() should not return nil value") { + require.Equal(t, tt.wantResp.Status, task.Status, "CreateDump() got response status %v, want: %v", task.Status, tt.wantResp.Status) + } + + taskInfo, err := c.WaitForTask(task.TaskUID, 0) + + require.NoError(t, err) + require.NotNil(t, taskInfo) + require.NotNil(t, taskInfo.Details) + require.Equal(t, TaskStatusSucceeded, taskInfo.Status) + require.NotEmpty(t, taskInfo.Details.DumpUid) + }) + } +} + +func Test_GetTask(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + type args struct { + UID string + client ServiceManager + taskUID int64 + document []docTest + } + tests := []struct { + name string + args args + }{ + { + name: "TestBasicGetTask", + args: args{ + UID: "TestBasicGetTask", + client: sv, + taskUID: 0, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + }, + }, + { + name: "TestGetTaskWithCustomClient", + args: args{ + UID: "TestGetTaskWithCustomClient", + client: customSv, + taskUID: 1, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + }, + }, + { + name: "TestGetTask", + args: args{ + UID: "TestGetTask", + client: sv, + taskUID: 2, + document: []docTest{ + {ID: "456", Name: "Le Petit Prince"}, + {ID: "1", Name: "Alice In Wonderland"}, + }, + }, + }, + } + + t.Cleanup(cleanup(sv, customSv)) + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := tt.args.client + i := c.Index(tt.args.UID) + t.Cleanup(cleanup(c)) + + task, err := i.AddDocuments(tt.args.document) + require.NoError(t, err) + + _, err = c.WaitForTask(task.TaskUID, 0) + require.NoError(t, err) + + gotResp, err := c.GetTask(task.TaskUID) + require.NoError(t, err) + require.NotNil(t, gotResp) + require.NotNil(t, gotResp.Details) + require.GreaterOrEqual(t, gotResp.UID, tt.args.taskUID) + require.Equal(t, tt.args.UID, gotResp.IndexUID) + require.Equal(t, TaskStatusSucceeded, gotResp.Status) + require.Equal(t, int64(len(tt.args.document)), gotResp.Details.ReceivedDocuments) + require.Equal(t, int64(len(tt.args.document)), gotResp.Details.IndexedDocuments) + + // Make sure that timestamps are also retrieved + require.NotZero(t, gotResp.EnqueuedAt) + require.NotZero(t, gotResp.StartedAt) + require.NotZero(t, gotResp.FinishedAt) + }) + } +} + +func Test_GetTasks(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + type args struct { + UID string + client ServiceManager + document []docTest + query *TasksQuery + } + tests := []struct { + name string + args args + }{ + { + name: "TestBasicGetTasks", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: nil, + }, + }, + { + name: "TestGetTasksWithCustomClient", + args: args{ + UID: "indexUID", + client: customSv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: nil, + }, + }, + { + name: "TestGetTasksWithLimit", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + }, + }, + }, + { + name: "TestGetTasksWithLimit", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + }, + }, + }, + { + name: "TestGetTasksWithFrom", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + From: 0, + }, + }, + }, + { + name: "TestGetTasksWithParameters", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + From: 0, + IndexUIDS: []string{"indexUID"}, + }, + }, + }, + { + name: "TestGetTasksWithUidFilter", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + UIDS: []int64{1}, + }, + }, + }, + { + name: "TestGetTasksWithDateFilter", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + BeforeEnqueuedAt: time.Now(), + }, + }, + }, + { + name: "TestGetTasksWithCanceledByFilter", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + CanceledBy: []int64{1}, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := tt.args.client + i := c.Index(tt.args.UID) + t.Cleanup(cleanup(c)) + + task, err := i.AddDocuments(tt.args.document) + require.NoError(t, err) + + _, err = c.WaitForTask(task.TaskUID, 0) + require.NoError(t, err) + + gotResp, err := i.GetTasks(tt.args.query) + require.NoError(t, err) + require.NotNil(t, (*gotResp).Results[0].Status) + require.NotZero(t, (*gotResp).Results[0].UID) + require.NotNil(t, (*gotResp).Results[0].Type) + if tt.args.query != nil { + if tt.args.query.Limit != 0 { + require.Equal(t, tt.args.query.Limit, (*gotResp).Limit) + } else { + require.Equal(t, int64(20), (*gotResp).Limit) + } + if tt.args.query.From != 0 { + require.Equal(t, tt.args.query.From, (*gotResp).From) + } + } + }) + } +} + +func Test_GetTasksUsingClient(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + type args struct { + UID string + client ServiceManager + document []docTest + query *TasksQuery + expectedResults int + } + + tests := []struct { + name string + args args + }{ + { + name: "TestBasicGetTasks", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: nil, + expectedResults: 1, + }, + }, + { + name: "TestGetTasksWithCustomClient", + args: args{ + UID: "indexUID", + client: customSv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: nil, + expectedResults: 1, + }, + }, + { + name: "TestGetTasksWithLimit", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + }, + expectedResults: 1, + }, + }, + { + name: "TestGetTasksWithLimit", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + }, + expectedResults: 1, + }, + }, + { + name: "TestGetTasksWithFrom", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + From: 0, + }, + expectedResults: 1, + }, + }, + { + name: "TestGetTasksWithFrom_1", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + From: 1, + }, + expectedResults: 0, + }, + }, + { + name: "TestGetTasksWithParameters", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + From: 0, + IndexUIDS: []string{"indexUID"}, + }, + expectedResults: 1, + }, + }, + { + name: "TestGetTasksWithDateFilter", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + BeforeEnqueuedAt: time.Now(), + }, + expectedResults: 1, + }, + }, + + { + name: "TestGetTasksWithBeforeStartedAt", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + BeforeStartedAt: time.Now(), + }, + expectedResults: 1, + }, + }, + { + name: "TestGetTasksWithAfterStartedAt", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + AfterStartedAt: time.Now().Add(-time.Hour), + }, + expectedResults: 0, + }, + }, + { + name: "TestGetTasksWithBeforeFinishedAt", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + BeforeFinishedAt: time.Now().Add(time.Hour), + }, + expectedResults: 1, + }, + }, + { + name: "TestGetTasksWithAfterFinishedAt", + args: args{ + UID: "indexUID", + client: sv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: &TasksQuery{ + Limit: 1, + AfterFinishedAt: time.Now().Add(-time.Hour), + }, + expectedResults: 0, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := tt.args.client + i := c.Index(tt.args.UID) + + t.Cleanup(cleanup(c)) + + task, err := i.AddDocuments(tt.args.document) + require.NoError(t, err) + + _, err = c.WaitForTask(task.TaskUID, 0) + require.NoError(t, err) + + gotResp, err := c.GetTasks(tt.args.query) + require.NoError(t, err) + require.NotNil(t, gotResp) + // require.Equal(t, tt.args.expectedResults, len((*gotResp).Results)) + + if tt.args.expectedResults > 0 { + require.NotNil(t, (*gotResp).Results[0].Status) + require.NotZero(t, (*gotResp).Results[0].UID) + require.NotNil(t, (*gotResp).Results[0].Type) + } + if tt.args.query != nil { + if tt.args.query.Limit != 0 { + require.Equal(t, tt.args.query.Limit, (*gotResp).Limit) + } else { + require.Equal(t, int64(20), (*gotResp).Limit) + } + if tt.args.query.From != 0 && tt.args.expectedResults > 0 { + require.Equal(t, tt.args.query.From, (*gotResp).From) + } + } + }) + } +} + +func Test_GetTasksUsingClientAllFailures(t *testing.T) { + brokenSv := setup(t, "", WithAPIKey("wrong")) + + type args struct { + UID string + client ServiceManager + document []docTest + query *TasksQuery + expectedResults int + } + + tests := []struct { + name string + args args + }{ + { + name: "TestBasicGetTasks", + args: args{ + UID: "indexUID", + client: brokenSv, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + }, + query: nil, + expectedResults: 1, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := tt.args.client + t.Cleanup(cleanup(c)) + i := c.Index("NOT_EXISTS") + + _, err := c.DeleteIndex("NOT_EXISTS") + require.Error(t, err) + + _, err = c.WaitForTask(math.MaxInt32, 0) + require.Error(t, err) + + _, err = i.AddDocuments(tt.args.document) + require.Error(t, err) + + _, err = c.GetTasks(tt.args.query) + require.Error(t, err) + + _, err = c.GetStats() + require.Error(t, err) + + _, err = c.CreateKey(&Key{ + Name: "Wrong", + }) + require.Error(t, err) + + _, err = c.GetKey("Wrong") + require.Error(t, err) + + _, err = c.UpdateKey("Wrong", &Key{ + Name: "Wrong", + }) + require.Error(t, err) + + _, err = c.CreateDump() + require.Error(t, err) + + _, err = c.GetTask(1) + require.Error(t, err) + + _, err = c.DeleteTasks(nil) + require.Error(t, err) + + _, err = c.SwapIndexes([]*SwapIndexesParams{ + {Indexes: []string{"Wrong", "Worse"}}, + }) + require.Error(t, err) + }) + } +} + +func Test_CancelTasks(t *testing.T) { + sv := setup(t, "") + + type args struct { + UID string + client ServiceManager + query *CancelTasksQuery + } + tests := []struct { + name string + args args + want string + }{ + { + name: "TestCancelTasksWithNoFilters", + args: args{ + UID: "indexUID", + client: sv, + query: nil, + }, + want: "", + }, + { + name: "TestCancelTasksWithStatutes", + args: args{ + UID: "indexUID", + client: sv, + query: &CancelTasksQuery{ + Statuses: []TaskStatus{TaskStatusSucceeded}, + }, + }, + want: "?statuses=succeeded", + }, + { + name: "TestCancelTasksWithIndexUIDFilter", + args: args{ + UID: "indexUID", + client: sv, + query: &CancelTasksQuery{ + IndexUIDS: []string{"0"}, + }, + }, + want: "?indexUids=0", + }, + { + name: "TestCancelTasksWithMultipleIndexUIDsFilter", + args: args{ + UID: "indexUID", + client: sv, + query: &CancelTasksQuery{ + IndexUIDS: []string{"0", "1"}, + }, + }, + want: "?indexUids=0%2C1", + }, + { + name: "TestCancelTasksWithUidFilter", + args: args{ + UID: "indexUID", + client: sv, + query: &CancelTasksQuery{ + UIDS: []int64{0}, + }, + }, + want: "?uids=0", + }, + { + name: "TestCancelTasksWithMultipleUidsFilter", + args: args{ + UID: "indexUID", + client: sv, + query: &CancelTasksQuery{ + UIDS: []int64{0, 1}, + }, + }, + want: "?uids=0%2C1", + }, + { + name: "TestCancelTasksWithDateFilter", + args: args{ + UID: "indexUID", + client: sv, + query: &CancelTasksQuery{ + BeforeEnqueuedAt: time.Now(), + }, + }, + want: strings.NewReplacer(":", "%3A").Replace("?beforeEnqueuedAt=" + time.Now().Format("2006-01-02T15:04:05Z")), + }, + { + name: "TestCancelTasksWithParameters", + args: args{ + UID: "indexUID", + client: sv, + query: &CancelTasksQuery{ + Statuses: []TaskStatus{TaskStatusEnqueued}, + Types: []TaskType{TaskTypeDocumentAdditionOrUpdate}, + IndexUIDS: []string{"indexUID"}, + UIDS: []int64{1}, + AfterEnqueuedAt: time.Now(), + }, + }, + want: "?afterEnqueuedAt=" + strings.NewReplacer(":", "%3A").Replace(time.Now().Format("2006-01-02T15:04:05Z")) + "&indexUids=indexUID&statuses=enqueued&types=documentAdditionOrUpdate&uids=1", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := tt.args.client + t.Cleanup(cleanup(c)) + + gotResp, err := c.CancelTasks(tt.args.query) + if tt.args.query == nil { + require.Error(t, err) + require.Equal(t, "missing_task_filters", + err.(*Error).MeilisearchApiError.Code) + } else { + require.NoError(t, err) + + _, err = c.WaitForTask(gotResp.TaskUID, 0) + require.NoError(t, err) + + gotTask, err := c.GetTask(gotResp.TaskUID) + require.NoError(t, err) + + require.NotNil(t, gotResp.Status) + require.NotNil(t, gotResp.Type) + require.NotNil(t, gotResp.TaskUID) + require.NotNil(t, gotResp.EnqueuedAt) + require.Equal(t, "", gotResp.IndexUID) + require.Equal(t, TaskTypeTaskCancelation, gotResp.Type) + require.Equal(t, tt.want, gotTask.Details.OriginalFilter) + } + }) + } +} + +func Test_DeleteTasks(t *testing.T) { + sv := setup(t, "") + + type args struct { + UID string + client ServiceManager + query *DeleteTasksQuery + } + tests := []struct { + name string + args args + want string + }{ + { + name: "TestBasicDeleteTasks", + args: args{ + UID: "indexUID", + client: sv, + query: &DeleteTasksQuery{ + Statuses: []TaskStatus{TaskStatusEnqueued}, + }, + }, + want: "?statuses=enqueued", + }, + { + name: "TestDeleteTasksWithUidFilter", + args: args{ + UID: "indexUID", + client: sv, + query: &DeleteTasksQuery{ + UIDS: []int64{1}, + }, + }, + want: "?uids=1", + }, + { + name: "TestDeleteTasksWithMultipleUidsFilter", + args: args{ + UID: "indexUID", + client: sv, + query: &DeleteTasksQuery{ + UIDS: []int64{0, 1}, + }, + }, + want: "?uids=0%2C1", + }, + { + name: "TestDeleteTasksWithIndexUIDFilter", + args: args{ + UID: "indexUID", + client: sv, + query: &DeleteTasksQuery{ + IndexUIDS: []string{"0"}, + }, + }, + want: "?indexUids=0", + }, + { + name: "TestDeleteTasksWithMultipleIndexUIDsFilter", + args: args{ + UID: "indexUID", + client: sv, + query: &DeleteTasksQuery{ + IndexUIDS: []string{"0", "1"}, + }, + }, + want: "?indexUids=0%2C1", + }, + { + name: "TestDeleteTasksWithDateFilter", + args: args{ + UID: "indexUID", + client: sv, + query: &DeleteTasksQuery{ + BeforeEnqueuedAt: time.Now(), + }, + }, + want: strings.NewReplacer(":", "%3A").Replace("?beforeEnqueuedAt=" + time.Now().Format("2006-01-02T15:04:05Z")), + }, + { + name: "TestDeleteTasksWithCanceledByFilter", + args: args{ + UID: "indexUID", + client: sv, + query: &DeleteTasksQuery{ + CanceledBy: []int64{1}, + }, + }, + want: "?canceledBy=1", + }, + { + name: "TestDeleteTasksWithParameters", + args: args{ + UID: "indexUID", + client: sv, + query: &DeleteTasksQuery{ + Statuses: []TaskStatus{TaskStatusEnqueued}, + IndexUIDS: []string{"indexUID"}, + UIDS: []int64{1}, + AfterEnqueuedAt: time.Now(), + }, + }, + want: "?afterEnqueuedAt=" + strings.NewReplacer(":", "%3A").Replace(time.Now().Format("2006-01-02T15:04:05Z")) + "&indexUids=indexUID&statuses=enqueued&uids=1", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := tt.args.client + t.Cleanup(cleanup(c)) + + gotResp, err := c.DeleteTasks(tt.args.query) + require.NoError(t, err) + + _, err = c.WaitForTask(gotResp.TaskUID, 0) + require.NoError(t, err) + + gotTask, err := c.GetTask(gotResp.TaskUID) + require.NoError(t, err) + + require.NotNil(t, gotResp.Status) + require.NotNil(t, gotResp.Type) + require.NotNil(t, gotResp.TaskUID) + require.NotNil(t, gotResp.EnqueuedAt) + require.Equal(t, "", gotResp.IndexUID) + require.Equal(t, TaskTypeTaskDeletion, gotResp.Type) + require.NotNil(t, gotTask.Details.OriginalFilter) + require.Equal(t, tt.want, gotTask.Details.OriginalFilter) + }) + } +} + +func Test_SwapIndexes(t *testing.T) { + sv := setup(t, "") + + type args struct { + UID string + client ServiceManager + query []*SwapIndexesParams + } + tests := []struct { + name string + args args + }{ + { + name: "TestBasicSwapIndexes", + args: args{ + UID: "indexUID", + client: sv, + query: []*SwapIndexesParams{ + {Indexes: []string{"IndexA", "IndexB"}}, + }, + }, + }, + { + name: "TestSwapIndexesWithMultipleIndexes", + args: args{ + UID: "indexUID", + client: sv, + query: []*SwapIndexesParams{ + {Indexes: []string{"IndexA", "IndexB"}}, + {Indexes: []string{"Index1", "Index2"}}, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := tt.args.client + t.Cleanup(cleanup(c)) + + for _, params := range tt.args.query { + for _, idx := range params.Indexes { + task, err := c.CreateIndex(&IndexConfig{ + Uid: idx, + }) + require.NoError(t, err) + _, err = c.WaitForTask(task.TaskUID, 0) + require.NoError(t, err) + } + } + + gotResp, err := c.SwapIndexes(tt.args.query) + require.NoError(t, err) + + _, err = c.WaitForTask(gotResp.TaskUID, 0) + require.NoError(t, err) + + gotTask, err := c.GetTask(gotResp.TaskUID) + require.NoError(t, err) + + require.NotNil(t, gotResp.Status) + require.NotNil(t, gotResp.Type) + require.NotNil(t, gotResp.TaskUID) + require.NotNil(t, gotResp.EnqueuedAt) + require.Equal(t, gotTask.Status, TaskStatusSucceeded) + }) + } +} + +func Test_DefaultWaitForTask(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + type args struct { + UID string + client ServiceManager + taskUID *Task + document []docTest + } + tests := []struct { + name string + args args + want TaskStatus + }{ + { + name: "TestDefaultWaitForTask", + args: args{ + UID: "TestDefaultWaitForTask", + client: sv, + taskUID: &Task{ + UID: 0, + }, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + {ID: "456", Name: "Le Petit Prince"}, + {ID: "1", Name: "Alice In Wonderland"}, + }, + }, + want: "succeeded", + }, + { + name: "TestDefaultWaitForTaskWithCustomClient", + args: args{ + UID: "TestDefaultWaitForTaskWithCustomClient", + client: customSv, + taskUID: &Task{ + UID: 0, + }, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + {ID: "456", Name: "Le Petit Prince"}, + {ID: "1", Name: "Alice In Wonderland"}, + }, + }, + want: "succeeded", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := tt.args.client + t.Cleanup(cleanup(c)) + + task, err := c.Index(tt.args.UID).AddDocuments(tt.args.document) + require.NoError(t, err) + + gotTask, err := c.WaitForTask(task.TaskUID, 0) + require.NoError(t, err) + require.Equal(t, tt.want, gotTask.Status) + }) + } +} + +func Test_WaitForTaskWithContext(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + + type args struct { + UID string + client ServiceManager + interval time.Duration + timeout time.Duration + taskUID *Task + document []docTest + } + tests := []struct { + name string + args args + want TaskStatus + }{ + { + name: "TestWaitForTask50", + args: args{ + UID: "TestWaitForTask50", + client: sv, + interval: time.Millisecond * 50, + timeout: time.Second * 5, + taskUID: &Task{ + UID: 0, + }, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + {ID: "456", Name: "Le Petit Prince"}, + {ID: "1", Name: "Alice In Wonderland"}, + }, + }, + want: "succeeded", + }, + { + name: "TestWaitForTask50WithCustomClient", + args: args{ + UID: "TestWaitForTask50WithCustomClient", + client: customSv, + interval: time.Millisecond * 50, + timeout: time.Second * 5, + taskUID: &Task{ + UID: 0, + }, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + {ID: "456", Name: "Le Petit Prince"}, + {ID: "1", Name: "Alice In Wonderland"}, + }, + }, + want: "succeeded", + }, + { + name: "TestWaitForTask10", + args: args{ + UID: "TestWaitForTask10", + client: sv, + interval: time.Millisecond * 10, + timeout: time.Second * 5, + taskUID: &Task{ + UID: 1, + }, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + {ID: "456", Name: "Le Petit Prince"}, + {ID: "1", Name: "Alice In Wonderland"}, + }, + }, + want: "succeeded", + }, + { + name: "TestWaitForTaskWithTimeout", + args: args{ + UID: "TestWaitForTaskWithTimeout", + client: sv, + interval: time.Millisecond * 50, + timeout: time.Millisecond * 10, + taskUID: &Task{ + UID: 1, + }, + document: []docTest{ + {ID: "123", Name: "Pride and Prejudice"}, + {ID: "456", Name: "Le Petit Prince"}, + {ID: "1", Name: "Alice In Wonderland"}, + }, + }, + want: "succeeded", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := tt.args.client + t.Cleanup(cleanup(c)) + + task, err := c.Index(tt.args.UID).AddDocuments(tt.args.document) + require.NoError(t, err) + + ctx, cancelFunc := context.WithTimeout(context.Background(), tt.args.timeout) + defer cancelFunc() + + gotTask, err := c.WaitForTaskWithContext(ctx, task.TaskUID, 0) + if tt.args.timeout < tt.args.interval { + require.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, tt.want, gotTask.Status) + } + }) + } +} + +func Test_ConnectionCloseByServer(t *testing.T) { + sv := setup(t, "") + + // Simulate 10 clients sending requests. + g := sync.WaitGroup{} + for i := 0; i < 10; i++ { + g.Add(1) + go func() { + defer g.Done() + + _, _ = sv.Index("foo").Search("bar", &SearchRequest{}) + time.Sleep(5 * time.Second) + _, err := sv.Index("foo").Search("bar", &SearchRequest{}) + var e *Error + if errors.As(err, &e) && e.ErrCode == MeilisearchCommunicationError { + require.NoErrorf(t, e, "unexpected error") + } + }() + } + g.Wait() +} + +func Test_GenerateTenantToken(t *testing.T) { + sv := setup(t, "") + privateSv := setup(t, "", WithAPIKey(getPrivateKey(sv))) + + type args struct { + IndexUIDS string + client ServiceManager + APIKeyUID string + searchRules map[string]interface{} + options *TenantTokenOptions + filter []string + } + tests := []struct { + name string + args args + wantErr bool + wantFilter bool + }{ + { + name: "TestDefaultGenerateTenantToken", + args: args{ + IndexUIDS: "TestDefaultGenerateTenantToken", + client: privateSv, + APIKeyUID: getPrivateUIDKey(sv), + searchRules: map[string]interface{}{ + "*": map[string]string{}, + }, + options: nil, + filter: nil, + }, + wantErr: false, + wantFilter: false, + }, + { + name: "TestGenerateTenantTokenWithApiKey", + args: args{ + IndexUIDS: "TestGenerateTenantTokenWithApiKey", + client: sv, + APIKeyUID: getPrivateUIDKey(sv), + searchRules: map[string]interface{}{ + "*": map[string]string{}, + }, + options: &TenantTokenOptions{ + APIKey: getPrivateKey(sv), + }, + filter: nil, + }, + wantErr: false, + wantFilter: false, + }, + { + name: "TestGenerateTenantTokenWithOnlyExpiresAt", + args: args{ + IndexUIDS: "TestGenerateTenantTokenWithOnlyExpiresAt", + client: privateSv, + APIKeyUID: getPrivateUIDKey(sv), + searchRules: map[string]interface{}{ + "*": map[string]string{}, + }, + options: &TenantTokenOptions{ + ExpiresAt: time.Now().Add(time.Hour * 10), + }, + filter: nil, + }, + wantErr: false, + wantFilter: false, + }, + { + name: "TestGenerateTenantTokenWithApiKeyAndExpiresAt", + args: args{ + IndexUIDS: "TestGenerateTenantTokenWithApiKeyAndExpiresAt", + client: sv, + APIKeyUID: getPrivateUIDKey(sv), + searchRules: map[string]interface{}{ + "*": map[string]string{}, + }, + options: &TenantTokenOptions{ + APIKey: getPrivateKey(sv), + ExpiresAt: time.Now().Add(time.Hour * 10), + }, + filter: nil, + }, + wantErr: false, + wantFilter: false, + }, + { + name: "TestGenerateTenantTokenWithFilters", + args: args{ + IndexUIDS: "indexUID", + client: privateSv, + APIKeyUID: getPrivateUIDKey(sv), + searchRules: map[string]interface{}{ + "*": map[string]string{ + "filter": "book_id > 1000", + }, + }, + options: nil, + filter: []string{ + "book_id", + }, + }, + wantErr: false, + wantFilter: true, + }, + { + name: "TestGenerateTenantTokenWithFilterOnOneINdex", + args: args{ + IndexUIDS: "indexUID", + client: privateSv, + APIKeyUID: getPrivateUIDKey(sv), + searchRules: map[string]interface{}{ + "indexUID": map[string]string{ + "filter": "year > 2000", + }, + }, + options: nil, + filter: []string{ + "year", + }, + }, + wantErr: false, + wantFilter: true, + }, + { + name: "TestGenerateTenantTokenWithoutSearchRules", + args: args{ + IndexUIDS: "TestGenerateTenantTokenWithoutSearchRules", + client: privateSv, + APIKeyUID: getPrivateUIDKey(sv), + searchRules: nil, + options: nil, + filter: nil, + }, + wantErr: true, + wantFilter: false, + }, + { + name: "TestGenerateTenantTokenWithoutApiKey", + args: args{ + IndexUIDS: "TestGenerateTenantTokenWithoutApiKey", + client: setup(t, "", WithAPIKey("")), + APIKeyUID: getPrivateUIDKey(sv), + searchRules: map[string]interface{}{ + "*": map[string]string{}, + }, + options: nil, + filter: nil, + }, + wantErr: true, + wantFilter: false, + }, + { + name: "TestGenerateTenantTokenWithBadExpiresAt", + args: args{ + IndexUIDS: "TestGenerateTenantTokenWithBadExpiresAt", + client: sv, + APIKeyUID: getPrivateUIDKey(sv), + searchRules: map[string]interface{}{ + "*": map[string]string{}, + }, + options: &TenantTokenOptions{ + ExpiresAt: time.Now().Add(-time.Hour * 10), + }, + filter: nil, + }, + wantErr: true, + wantFilter: false, + }, + { + name: "TestGenerateTenantTokenWithBadAPIKeyUID", + args: args{ + IndexUIDS: "TestGenerateTenantTokenWithBadAPIKeyUID", + client: sv, + APIKeyUID: getPrivateUIDKey(sv) + "1234", + searchRules: map[string]interface{}{ + "*": map[string]string{}, + }, + options: nil, + filter: nil, + }, + wantErr: true, + wantFilter: false, + }, + { + name: "TestGenerateTenantTokenWithEmptyAPIKeyUID", + args: args{ + IndexUIDS: "TestGenerateTenantTokenWithEmptyAPIKeyUID", + client: sv, + APIKeyUID: "", + searchRules: map[string]interface{}{ + "*": map[string]string{}, + }, + options: nil, + filter: nil, + }, + wantErr: true, + wantFilter: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := tt.args.client + t.Cleanup(cleanup(c)) + + token, err := c.GenerateTenantToken(tt.args.APIKeyUID, tt.args.searchRules, tt.args.options) + + if tt.wantErr { + require.Error(t, err) + } else { + require.NoError(t, err) + + if tt.wantFilter { + gotTask, err := c.Index(tt.args.IndexUIDS).UpdateFilterableAttributes(&tt.args.filter) + require.NoError(t, err, "UpdateFilterableAttributes() in TestGenerateTenantToken error should be nil") + testWaitForTask(t, c.Index(tt.args.IndexUIDS), gotTask) + } else { + _, err := setUpEmptyIndex(sv, &IndexConfig{Uid: tt.args.IndexUIDS}) + require.NoError(t, err, "CreateIndex() in TestGenerateTenantToken error should be nil") + } + + client := setup(t, "", WithAPIKey(token)) + + _, err = client.Index(tt.args.IndexUIDS).Search("", &SearchRequest{}) + + require.NoError(t, err) + } + }) + } +} + +func TestClient_MultiSearch(t *testing.T) { + sv := setup(t, "") + + type args struct { + client ServiceManager + queries *MultiSearchRequest + UIDS []string + } + tests := []struct { + name string + args args + want *MultiSearchResponse + wantErr bool + }{ + { + name: "TestClientMultiSearchOneIndex", + args: args{ + client: sv, + queries: &MultiSearchRequest{ + []*SearchRequest{ + { + IndexUID: "TestClientMultiSearchOneIndex", + Query: "wonder", + }, + }, + }, + UIDS: []string{"TestClientMultiSearchOneIndex"}, + }, + want: &MultiSearchResponse{ + Results: []SearchResponse{ + { + Hits: []interface{}{ + map[string]interface{}{ + "book_id": float64(1), + "title": "Alice In Wonderland", + }, + }, + EstimatedTotalHits: 1, + Offset: 0, + Limit: 20, + Query: "wonder", + IndexUID: "TestClientMultiSearchOneIndex", + }, + }, + }, + }, + { + name: "TestClientMultiSearchOnTwoIndexes", + args: args{ + client: sv, + queries: &MultiSearchRequest{ + []*SearchRequest{ + { + IndexUID: "TestClientMultiSearchOnTwoIndexes1", + Query: "wonder", + }, + { + IndexUID: "TestClientMultiSearchOnTwoIndexes2", + Query: "prince", + }, + }, + }, + UIDS: []string{"TestClientMultiSearchOnTwoIndexes1", "TestClientMultiSearchOnTwoIndexes2"}, + }, + want: &MultiSearchResponse{ + Results: []SearchResponse{ + { + Hits: []interface{}{ + map[string]interface{}{ + "book_id": float64(1), + "title": "Alice In Wonderland", + }, + }, + EstimatedTotalHits: 1, + Offset: 0, + Limit: 20, + Query: "wonder", + IndexUID: "TestClientMultiSearchOnTwoIndexes1", + }, + { + Hits: []interface{}{ + map[string]interface{}{ + "book_id": float64(456), + "title": "Le Petit Prince", + }, + map[string]interface{}{ + "book_id": float64(4), + "title": "Harry Potter and the Half-Blood Prince", + }, + }, + EstimatedTotalHits: 2, + Offset: 0, + Limit: 20, + Query: "prince", + IndexUID: "TestClientMultiSearchOnTwoIndexes2", + }, + }, + }, + }, + { + name: "TestClientMultiSearchNoIndex", + args: args{ + client: sv, + queries: &MultiSearchRequest{ + []*SearchRequest{ + { + Query: "", + }, + }, + }, + UIDS: []string{"TestClientMultiSearchNoIndex"}, + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + for _, UID := range tt.args.UIDS { + setUpBasicIndex(sv, UID) + } + c := tt.args.client + t.Cleanup(cleanup(c)) + + got, err := c.MultiSearch(tt.args.queries) + + if tt.wantErr { + require.Error(t, err) + } else { + require.NotNil(t, got) + for i := 0; i < len(tt.want.Results); i++ { + if !reflect.DeepEqual(got.Results[i].Hits, tt.want.Results[i].Hits) { + t.Errorf("Client.MultiSearch() = %v, want %v", got.Results[i].Hits, tt.want.Results[i].Hits) + } + require.Equal(t, tt.want.Results[i].EstimatedTotalHits, got.Results[i].EstimatedTotalHits) + require.Equal(t, tt.want.Results[i].Offset, got.Results[i].Offset) + require.Equal(t, tt.want.Results[i].Limit, got.Results[i].Limit) + require.Equal(t, tt.want.Results[i].Query, got.Results[i].Query) + require.Equal(t, tt.want.Results[i].IndexUID, got.Results[i].IndexUID) + } + } + }) + } +} From ded6cecd6143aa3a33a30b1ca2e83bacda4c10b0 Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 11:07:10 +0330 Subject: [PATCH 33/43] feat: add index manager to manage index and search --- client_index.go | 118 -- client_index_test.go | 768 -------- error.go | 2 +- error_test.go | 2 +- go.mod | 4 +- go.sum | 29 +- index.go | 658 +++++-- index_document.go | 594 ++++++ ...ocuments_test.go => index_document_test.go | 1655 ++--------------- index_documents.go | 568 ------ index_facet_search.go | 60 - index_facet_search_test.go | 213 --- index_search.go | 81 +- index_search_test.go | 450 +++-- index_settings.go | 592 +++--- index_settings_test.go | 528 ++++-- index_test.go | 186 +- main_test.go | 481 +++-- options.go | 5 +- types_test.go | 43 - 20 files changed, 2829 insertions(+), 4208 deletions(-) delete mode 100644 client_index.go delete mode 100644 client_index_test.go create mode 100644 index_document.go rename index_documents_test.go => index_document_test.go (52%) delete mode 100644 index_documents.go delete mode 100644 index_facet_search.go delete mode 100644 index_facet_search_test.go delete mode 100644 types_test.go diff --git a/client_index.go b/client_index.go deleted file mode 100644 index c0524265..00000000 --- a/client_index.go +++ /dev/null @@ -1,118 +0,0 @@ -package meilisearch - -import ( - "net/http" - "strconv" -) - -func (c *Client) Index(uid string) *Index { - return newIndex(c, uid) -} - -func (c *Client) GetIndex(uid string) (resp *Index, err error) { - return newIndex(c, uid).FetchInfo() -} - -func (c *Client) GetRawIndex(uid string) (resp map[string]interface{}, err error) { - resp = map[string]interface{}{} - req := internalRequest{ - endpoint: "/indexes/" + uid, - method: http.MethodGet, - withRequest: nil, - withResponse: &resp, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "GetRawIndex", - } - if err := c.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} - -func (c *Client) CreateIndex(config *IndexConfig) (resp *TaskInfo, err error) { - request := &CreateIndexRequest{ - UID: config.Uid, - PrimaryKey: config.PrimaryKey, - } - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes", - method: http.MethodPost, - contentType: contentTypeJSON, - withRequest: request, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusAccepted}, - functionName: "CreateIndex", - } - if err := c.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} - -func (c *Client) GetIndexes(param *IndexesQuery) (resp *IndexesResults, err error) { - resp = &IndexesResults{} - req := internalRequest{ - endpoint: "/indexes", - method: http.MethodGet, - withRequest: nil, - withResponse: &resp, - withQueryParams: map[string]string{}, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "GetIndexes", - } - if param != nil && param.Limit != 0 { - req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10) - } - if param != nil && param.Offset != 0 { - req.withQueryParams["offset"] = strconv.FormatInt(param.Offset, 10) - } - if err := c.executeRequest(req); err != nil { - return nil, err - } - - for i := range resp.Results { - resp.Results[i].client = c - } - - return resp, nil -} - -func (c *Client) GetRawIndexes(param *IndexesQuery) (resp map[string]interface{}, err error) { - resp = map[string]interface{}{} - req := internalRequest{ - endpoint: "/indexes", - method: http.MethodGet, - withRequest: nil, - withResponse: &resp, - withQueryParams: map[string]string{}, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "GetRawIndexes", - } - if param != nil && param.Limit != 0 { - req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10) - } - if param != nil && param.Offset != 0 { - req.withQueryParams["offset"] = strconv.FormatInt(param.Offset, 10) - } - if err := c.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} - -func (c *Client) DeleteIndex(uid string) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + uid, - method: http.MethodDelete, - withRequest: nil, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusAccepted}, - functionName: "DeleteIndex", - } - if err := c.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} diff --git a/client_index_test.go b/client_index_test.go deleted file mode 100644 index 9b88d282..00000000 --- a/client_index_test.go +++ /dev/null @@ -1,768 +0,0 @@ -package meilisearch - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestClient_CreateIndex(t *testing.T) { - type args struct { - config IndexConfig - } - tests := []struct { - name string - client *Client - args args - wantResp *Index - wantErr bool - expectedError Error - }{ - { - name: "TestBasicCreateIndex", - client: defaultClient, - args: args{ - config: IndexConfig{ - Uid: "TestBasicCreateIndex", - }, - }, - wantResp: &Index{ - UID: "TestBasicCreateIndex", - }, - wantErr: false, - }, - { - name: "TestCreateIndexWithCustomClient", - client: customClient, - args: args{ - config: IndexConfig{ - Uid: "TestCreateIndexWithCustomClient", - }, - }, - wantResp: &Index{ - UID: "TestCreateIndexWithCustomClient", - }, - wantErr: false, - }, - { - name: "TestCreateIndexWithPrimaryKey", - client: defaultClient, - args: args{ - config: IndexConfig{ - Uid: "TestCreateIndexWithPrimaryKey", - PrimaryKey: "PrimaryKey", - }, - }, - wantResp: &Index{ - UID: "TestCreateIndexWithPrimaryKey", - PrimaryKey: "PrimaryKey", - }, - wantErr: false, - }, - { - name: "TestCreateIndexInvalidUid", - client: defaultClient, - args: args{ - config: IndexConfig{ - Uid: "TestCreateIndexInvalidUid*", - }, - }, - wantErr: true, - expectedError: Error{ - MeilisearchApiError: meilisearchApiError{ - Code: "invalid_index_uid", - }, - }, - }, - { - name: "TestCreateIndexTimeout", - client: timeoutClient, - args: args{ - config: IndexConfig{ - Uid: "TestCreateIndexTimeout", - }, - }, - wantErr: true, - expectedError: Error{ - MeilisearchApiError: meilisearchApiError{}, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.client - t.Cleanup(cleanup(c)) - - gotResp, err := c.CreateIndex(&tt.args.config) - - if tt.wantErr { - require.Error(t, err) - require.Equal(t, tt.expectedError.MeilisearchApiError.Code, - err.(*Error).MeilisearchApiError.Code) - } else { - require.NoError(t, err) - require.Equal(t, gotResp.Type, TaskTypeIndexCreation) - require.Equal(t, gotResp.Status, TaskStatusEnqueued) - // Make sure that timestamps are also retrieved - require.NotZero(t, gotResp.EnqueuedAt) - - _, err := c.WaitForTask(gotResp.TaskUID) - require.NoError(t, err) - - index, err := c.GetIndex(tt.args.config.Uid) - - require.NoError(t, err) - if assert.NotNil(t, index) { - require.Equal(t, tt.wantResp.UID, gotResp.IndexUID) - require.Equal(t, tt.wantResp.UID, index.UID) - require.Equal(t, tt.wantResp.PrimaryKey, index.PrimaryKey) - } - } - }) - } -} - -func TestClient_DeleteIndex(t *testing.T) { - type args struct { - createUid []string - deleteUid []string - } - tests := []struct { - name string - client *Client - args args - wantErr bool - expectedError []Error - }{ - { - name: "TestBasicDeleteIndex", - client: defaultClient, - args: args{ - createUid: []string{"TestBasicDeleteIndex"}, - deleteUid: []string{"TestBasicDeleteIndex"}, - }, - wantErr: false, - }, - { - name: "TestDeleteIndexWithCustomClient", - client: customClient, - args: args{ - createUid: []string{"TestDeleteIndexWithCustomClient"}, - deleteUid: []string{"TestDeleteIndexWithCustomClient"}, - }, - wantErr: false, - }, - { - name: "TestMultipleDeleteIndex", - client: defaultClient, - args: args{ - createUid: []string{ - "TestMultipleDeleteIndex_2", - "TestMultipleDeleteIndex_3", - "TestMultipleDeleteIndex_4", - "TestMultipleDeleteIndex_5", - }, - deleteUid: []string{ - "TestMultipleDeleteIndex_2", - "TestMultipleDeleteIndex_3", - "TestMultipleDeleteIndex_4", - "TestMultipleDeleteIndex_5", - }, - }, - wantErr: false, - }, - { - name: "TestNotExistingDeleteIndex", - client: defaultClient, - args: args{ - deleteUid: []string{"TestNotExistingDeleteIndex"}, - }, - wantErr: false, - }, - { - name: "TestMultipleNotExistingDeleteIndex", - client: defaultClient, - args: args{ - deleteUid: []string{ - "TestMultipleNotExistingDeleteIndex_2", - "TestMultipleNotExistingDeleteIndex_3", - "TestMultipleNotExistingDeleteIndex_4", - "TestMultipleNotExistingDeleteIndex_5", - }, - }, - wantErr: false, - }, - { - name: "TestDeleteIndexTimeout", - client: timeoutClient, - args: args{ - deleteUid: []string{"TestDeleteIndexTimeout"}, - }, - wantErr: true, - expectedError: []Error{ - { - MeilisearchApiError: meilisearchApiError{}, - }, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.client - t.Cleanup(cleanup(c)) - - for _, uid := range tt.args.createUid { - _, err := SetUpEmptyIndex(&IndexConfig{Uid: uid}) - require.NoError(t, err, "CreateIndex() in TestDeleteIndex error should be nil") - } - for k := range tt.args.deleteUid { - gotResp, err := c.DeleteIndex(tt.args.deleteUid[k]) - if tt.wantErr { - require.Error(t, err) - require.Equal(t, tt.expectedError[k].MeilisearchApiError.Code, - err.(*Error).MeilisearchApiError.Code) - } else { - require.NoError(t, err) - require.Equal(t, gotResp.Type, TaskTypeIndexDeletion) - // Make sure that timestamps are also retrieved - require.NotZero(t, gotResp.EnqueuedAt) - } - } - }) - } -} - -func TestClient_GetIndexes(t *testing.T) { - type args struct { - uid []string - request *IndexesQuery - } - tests := []struct { - name string - client *Client - args args - wantResp *IndexesResults - }{ - { - name: "TestGetIndexesOnNoIndexes", - client: defaultClient, - args: args{ - uid: []string{}, - request: nil, - }, - wantResp: &IndexesResults{ - Offset: 0, - Limit: 20, - Total: 0, - }, - }, - { - name: "TestBasicGetIndexes", - client: defaultClient, - args: args{ - uid: []string{"TestBasicGetIndexes"}, - request: nil, - }, - wantResp: &IndexesResults{ - Results: []Index{ - { - UID: "TestBasicGetIndexes", - }, - }, - Offset: 0, - Limit: 20, - Total: 1, - }, - }, - { - name: "TestGetIndexesWithCustomClient", - client: customClient, - args: args{ - uid: []string{"TestGetIndexesWithCustomClient"}, - request: nil, - }, - wantResp: &IndexesResults{ - Results: []Index{ - { - UID: "TestGetIndexesWithCustomClient", - }, - }, - Offset: 0, - Limit: 20, - Total: 1, - }, - }, - { - name: "TestGetIndexesOnMultipleIndex", - client: defaultClient, - args: args{ - uid: []string{ - "TestGetIndexesOnMultipleIndex_1", - "TestGetIndexesOnMultipleIndex_2", - "TestGetIndexesOnMultipleIndex_3", - }, - request: nil, - }, - wantResp: &IndexesResults{ - Results: []Index{ - { - UID: "TestGetIndexesOnMultipleIndex_1", - }, - { - UID: "TestGetIndexesOnMultipleIndex_2", - }, - { - UID: "TestGetIndexesOnMultipleIndex_3", - }, - }, - Offset: 0, - Limit: 20, - Total: 3, - }, - }, - { - name: "TestGetIndexesOnMultipleIndexWithPrimaryKey", - client: defaultClient, - args: args{ - uid: []string{ - "TestGetIndexesOnMultipleIndexWithPrimaryKey_1", - "TestGetIndexesOnMultipleIndexWithPrimaryKey_2", - "TestGetIndexesOnMultipleIndexWithPrimaryKey_3", - }, - request: nil, - }, - wantResp: &IndexesResults{ - Results: []Index{ - { - UID: "TestGetIndexesOnMultipleIndexWithPrimaryKey_1", - PrimaryKey: "PrimaryKey1", - }, - { - UID: "TestGetIndexesOnMultipleIndexWithPrimaryKey_2", - PrimaryKey: "PrimaryKey2", - }, - { - UID: "TestGetIndexesOnMultipleIndexWithPrimaryKey_3", - PrimaryKey: "PrimaryKey3", - }, - }, - Offset: 0, - Limit: 20, - Total: 3, - }, - }, - { - name: "TestGetIndexesWithLimit", - client: defaultClient, - args: args{ - uid: []string{ - "TestGetIndexesWithLimit_1", - "TestGetIndexesWithLimit_2", - "TestGetIndexesWithLimit_3", - }, - request: &IndexesQuery{ - Limit: 1, - }, - }, - wantResp: &IndexesResults{ - Results: []Index{ - { - UID: "TestGetIndexesWithLimit_1", - }, - }, - Offset: 0, - Limit: 1, - Total: 3, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.client - t.Cleanup(cleanup(c)) - - for _, uid := range tt.args.uid { - _, err := SetUpEmptyIndex(&IndexConfig{Uid: uid}) - require.NoError(t, err, "CreateIndex() in TestGetIndexes error should be nil") - } - gotResp, err := c.GetIndexes(tt.args.request) - require.NoError(t, err) - require.Equal(t, len(tt.wantResp.Results), len(gotResp.Results)) - for i := range gotResp.Results { - require.Equal(t, tt.wantResp.Results[i].UID, gotResp.Results[i].UID) - } - require.Equal(t, tt.wantResp.Limit, gotResp.Limit) - require.Equal(t, tt.wantResp.Offset, gotResp.Offset) - require.Equal(t, tt.wantResp.Total, gotResp.Total) - }) - } -} - -func TestClient_GetRawIndexes(t *testing.T) { - type args struct { - uid []string - request *IndexesQuery - } - tests := []struct { - name string - client *Client - args args - wantResp map[string]interface{} - }{ - { - name: "TestGetRawIndexesOnNoIndexes", - client: defaultClient, - args: args{ - uid: []string{}, - request: nil, - }, - wantResp: map[string]interface{}{ - "results": []map[string]string{}, - "offset": float64(0), - "limit": float64(20), - "total": float64(0), - }, - }, - { - name: "TestBasicGetRawIndexes", - client: defaultClient, - args: args{ - uid: []string{"TestBasicGetRawIndexes"}, - request: nil, - }, - wantResp: map[string]interface{}{ - "results": []map[string]string{ - { - "uid": "TestBasicGetRawIndexes", - }, - }, - "offset": float64(0), - "limit": float64(20), - "total": float64(1), - }, - }, - { - name: "TestGetRawIndexesWithCustomClient", - client: customClient, - args: args{ - uid: []string{"TestGetRawIndexesWithCustomClient"}, - request: nil, - }, - wantResp: map[string]interface{}{ - "results": []map[string]string{ - { - "uid": "TestGetRawIndexesWithCustomClient", - }, - }, - "offset": float64(0), - "limit": float64(20), - "total": float64(1), - }, - }, - { - name: "TestGetRawIndexesOnMultipleIndex", - client: defaultClient, - args: args{ - uid: []string{ - "TestGetRawIndexesOnMultipleIndex_1", - "TestGetRawIndexesOnMultipleIndex_2", - "TestGetRawIndexesOnMultipleIndex_3", - }, - request: nil, - }, - wantResp: map[string]interface{}{ - "results": []map[string]string{ - { - "uid": "TestGetRawIndexesOnMultipleIndex_1", - }, - { - "uid": "TestGetRawIndexesOnMultipleIndex_2", - }, - { - "uid": "TestGetRawIndexesOnMultipleIndex_3", - }, - }, - "offset": float64(0), - "limit": float64(20), - "total": float64(3), - }, - }, - { - name: "TestGetRawIndexesOnMultipleIndexWithPrimaryKey", - client: defaultClient, - args: args{ - uid: []string{ - "TestGetRawIndexesOnMultipleIndexWithPrimaryKey_1", - "TestGetRawIndexesOnMultipleIndexWithPrimaryKey_2", - "TestGetRawIndexesOnMultipleIndexWithPrimaryKey_3", - }, - request: nil, - }, - wantResp: map[string]interface{}{ - "results": []map[string]string{ - { - "uid": "TestGetRawIndexesOnMultipleIndex_1", - "primaryKey": "PrimaryKey1", - }, - { - "uid": "TestGetRawIndexesOnMultipleIndex_2", - "primaryKey": "PrimaryKey2", - }, - { - "uid": "TestGetRawIndexesOnMultipleIndex_3", - "primaryKey": "PrimaryKey3", - }, - }, - "offset": float64(0), - "limit": float64(20), - "total": float64(3), - }, - }, - { - name: "TestGetRawIndexesWithLimit", - client: defaultClient, - args: args{ - uid: []string{ - "TestGetRawIndexesWithLimit_1", - "TestGetRawIndexesWithLimit_2", - "TestGetRawIndexesWithLimit_3", - }, - request: &IndexesQuery{ - Limit: 1, - }, - }, - wantResp: map[string]interface{}{ - "results": []map[string]interface{}{ - { - "uid": "TestGetIndexesWithLimit_1", - }, - }, - "lenResults": 1, - "offset": float64(0), - "limit": float64(1), - "total": float64(3), - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.client - t.Cleanup(cleanup(c)) - - for _, uid := range tt.args.uid { - _, err := SetUpEmptyIndex(&IndexConfig{Uid: uid}) - require.NoError(t, err, "CreateIndex() in TestGetRawIndexes error should be nil") - } - gotResp, err := c.GetRawIndexes(tt.args.request) - - require.NoError(t, err) - require.Equal(t, tt.wantResp["limit"], gotResp["limit"]) - require.Equal(t, tt.wantResp["offset"], gotResp["offset"]) - require.Equal(t, tt.wantResp["total"], gotResp["total"]) - }) - } -} - -func TestClient_GetIndex(t *testing.T) { - type args struct { - config IndexConfig - uid string - } - tests := []struct { - name string - client *Client - args args - wantResp *Index - wantCmp bool - }{ - { - name: "TestBasicGetIndex", - client: defaultClient, - args: args{ - config: IndexConfig{ - Uid: "TestBasicGetIndex", - }, - uid: "TestBasicGetIndex", - }, - wantResp: &Index{ - UID: "TestBasicGetIndex", - }, - wantCmp: false, - }, - { - name: "TestGetIndexWithCustomClient", - client: customClient, - args: args{ - config: IndexConfig{ - Uid: "TestGetIndexWithCustomClient", - }, - uid: "TestGetIndexWithCustomClient", - }, - wantResp: &Index{ - UID: "TestGetIndexWithCustomClient", - }, - wantCmp: false, - }, - { - name: "TestGetIndexWithPrimaryKey", - client: defaultClient, - args: args{ - config: IndexConfig{ - Uid: "TestGetIndexWithPrimaryKey", - PrimaryKey: "PrimaryKey", - }, - uid: "TestGetIndexWithPrimaryKey", - }, - wantResp: &Index{ - UID: "TestGetIndexWithPrimaryKey", - PrimaryKey: "PrimaryKey", - }, - wantCmp: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.client - t.Cleanup(cleanup(c)) - - gotCreatedResp, err := SetUpEmptyIndex(&tt.args.config) - if tt.args.config.Uid != "" { - require.NoError(t, err) - } else { - require.Error(t, err) - } - - gotResp, err := c.GetIndex(tt.args.uid) - if err != nil { - t.Errorf("GetIndex() error = %v", err) - return - } else { - require.NoError(t, err) - require.Equal(t, gotCreatedResp.UID, gotResp.UID) - require.Equal(t, tt.wantResp.UID, gotResp.UID) - require.Equal(t, tt.args.config.Uid, gotResp.UID) - require.Equal(t, tt.wantResp.PrimaryKey, gotResp.PrimaryKey) - // Make sure that timestamps are also retrieved - require.NotZero(t, gotResp.CreatedAt) - require.NotZero(t, gotResp.UpdatedAt) - } - }) - } -} - -func TestClient_GetRawIndex(t *testing.T) { - type args struct { - config IndexConfig - uid string - } - tests := []struct { - name string - client *Client - args args - wantResp map[string]interface{} - }{ - { - name: "TestBasicGetRawIndex", - client: defaultClient, - args: args{ - config: IndexConfig{ - Uid: "TestBasicGetRawIndex", - }, - uid: "TestBasicGetRawIndex", - }, - wantResp: map[string]interface{}{ - "uid": string("TestBasicGetRawIndex"), - }, - }, - { - name: "TestGetRawIndexWithCustomClient", - client: customClient, - args: args{ - config: IndexConfig{ - Uid: "TestGetRawIndexWithCustomClient", - }, - uid: "TestGetRawIndexWithCustomClient", - }, - wantResp: map[string]interface{}{ - "uid": string("TestGetRawIndexWithCustomClient"), - }, - }, - { - name: "TestGetRawIndexWithPrimaryKey", - client: defaultClient, - args: args{ - config: IndexConfig{ - Uid: "TestGetRawIndexWithPrimaryKey", - PrimaryKey: "PrimaryKey", - }, - uid: "TestGetRawIndexWithPrimaryKey", - }, - wantResp: map[string]interface{}{ - "uid": string("TestGetRawIndexWithPrimaryKey"), - "primaryKey": "PrimaryKey", - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.client - t.Cleanup(cleanup(c)) - - _, err := SetUpEmptyIndex(&tt.args.config) - require.NoError(t, err) - - gotResp, err := c.GetRawIndex(tt.args.uid) - if err != nil { - t.Errorf("GetRawIndex() error = %v", err) - return - } - require.NoError(t, err) - require.Equal(t, tt.wantResp["uid"], gotResp["uid"]) - require.Equal(t, tt.wantResp["primaryKey"], gotResp["primaryKey"]) - }) - } -} - -func TestClient_Index(t *testing.T) { - type args struct { - uid string - } - tests := []struct { - name string - client *Client - args args - want Index - }{ - { - name: "TestBasicIndex", - client: defaultClient, - args: args{ - uid: "TestBasicIndex", - }, - want: Index{ - UID: "TestBasicIndex", - }, - }, - { - name: "TestIndexWithCustomClient", - client: customClient, - args: args{ - uid: "TestIndexWithCustomClient", - }, - want: Index{ - UID: "TestIndexWithCustomClient", - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got := tt.client.Index(tt.args.uid) - require.NotNil(t, got) - require.Equal(t, tt.want.UID, got.UID) - // Timestamps should be empty unless fetched - require.Zero(t, got.CreatedAt) - require.Zero(t, got.UpdatedAt) - }) - } -} diff --git a/error.go b/error.go index 4f2e7e06..c70cc042 100644 --- a/error.go +++ b/error.go @@ -150,7 +150,7 @@ func (e *Error) ErrorBody(body []byte) { // VersionErrorHintMessage a hint to the error message if it may come from a version incompatibility with meilisearch func VersionErrorHintMessage(err error, req *internalRequest) error { return fmt.Errorf("%w. Hint: It might not be working because you're not up to date with the "+ - "meilisearch version that %s call requires", err, req.functionName) + "Meilisearch version that %s call requires", err, req.functionName) } func namedSprintf(format string, params map[string]interface{}) string { diff --git a/error_test.go b/error_test.go index bc510c28..fb186cc0 100644 --- a/error_test.go +++ b/error_test.go @@ -43,7 +43,7 @@ func TestError_VersionErrorHintMessage(t *testing.T) { err := VersionErrorHintMessage(tt.args.mockedError, tt.args.request) require.Error(t, err) fmt.Println(err) - require.Equal(t, tt.args.mockedError.Error()+". Hint: It might not be working because you're not up to date with the Meilisearch version that "+tt.args.request.functionName+" call requires.", err.Error()) + require.Equal(t, tt.args.mockedError.Error()+". Hint: It might not be working because you're not up to date with the Meilisearch version that "+tt.args.request.functionName+" call requires", err.Error()) }) } } diff --git a/go.mod b/go.mod index cc11f548..048c77cb 100644 --- a/go.mod +++ b/go.mod @@ -4,8 +4,6 @@ go 1.16 require ( github.com/golang-jwt/jwt/v4 v4.5.0 - github.com/klauspost/compress v1.15.6 // indirect github.com/mailru/easyjson v0.7.7 - github.com/stretchr/testify v1.8.2 - github.com/valyala/fasthttp v1.37.1-0.20220607072126-8a320890c08d + github.com/stretchr/testify v1.9.0 ) diff --git a/go.sum b/go.sum index f3005fe6..a5f193f9 100644 --- a/go.sum +++ b/go.sum @@ -1,5 +1,3 @@ -github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= -github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -7,9 +5,6 @@ github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOW github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= -github.com/klauspost/compress v1.15.0/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/compress v1.15.6 h1:6D9PcO8QWu0JyaQ2zUMmu16T1T+zjjEpP91guRsvDfY= -github.com/klauspost/compress v1.15.6/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -17,28 +12,12 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= -github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasthttp v1.37.1-0.20220607072126-8a320890c08d h1:xS9QTPgKl9ewGsAOPc+xW7DeStJDqYPfisDmeSCcbco= -github.com/valyala/fasthttp v1.37.1-0.20220607072126-8a320890c08d/go.mod h1:t/G+3rLek+CyY9bnIE+YlMRddxVAAGjhxndDB4i4C0I= -github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= -golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/index.go b/index.go index 6f653ca9..62b1b7f4 100644 --- a/index.go +++ b/index.go @@ -1,132 +1,537 @@ package meilisearch import ( + "context" "encoding/json" + "io" "net/http" "strconv" "strings" + "time" ) -// IndexConfig configure the Index -type IndexConfig struct { - - // Uid is the unique identifier of a given index. - Uid string - - // PrimaryKey is optional - PrimaryKey string - - client *Client //nolint:golint,unused,structcheck -} - -type IndexInterface interface { - FetchInfo() (resp *Index, err error) - FetchPrimaryKey() (resp *string, err error) - UpdateIndex(primaryKey string) (resp *TaskInfo, err error) - Delete(uid string) (ok bool, err error) - GetStats() (resp *StatsIndex, err error) - - AddDocuments(documentsPtr interface{}, primaryKey ...string) (resp *TaskInfo, err error) - AddDocumentsInBatches(documentsPtr interface{}, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) - AddDocumentsCsv(documents []byte, options *CsvDocumentsQuery) (resp *TaskInfo, err error) - AddDocumentsCsvInBatches(documents []byte, batchSize int, options *CsvDocumentsQuery) (resp []TaskInfo, err error) - AddDocumentsNdjson(documents []byte, primaryKey ...string) (resp *TaskInfo, err error) - AddDocumentsNdjsonInBatches(documents []byte, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) - UpdateDocuments(documentsPtr interface{}, primaryKey ...string) (resp *TaskInfo, err error) - UpdateDocumentsInBatches(documentsPtr interface{}, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) - UpdateDocumentsCsv(documents []byte, options *CsvDocumentsQuery) (resp *TaskInfo, err error) - UpdateDocumentsCsvInBatches(documents []byte, batchsize int, options *CsvDocumentsQuery) (resp []TaskInfo, err error) - UpdateDocumentsNdjson(documents []byte, primaryKey ...string) (resp *TaskInfo, err error) - UpdateDocumentsNdjsonInBatches(documents []byte, batchsize int, primaryKey ...string) (resp []TaskInfo, err error) - GetDocument(uid string, request *DocumentQuery, documentPtr interface{}) error +// index is the type that represent an index in meilisearch +type index struct { + uid string + primaryKey string + client *client +} + +type IndexManager interface { + // FetchInfo retrieves information about the index. + FetchInfo() (*IndexResult, error) + + // FetchInfoWithContext retrieves information about the index using the provided context for cancellation. + FetchInfoWithContext(ctx context.Context) (*IndexResult, error) + + // FetchPrimaryKey retrieves the primary key of the index. + FetchPrimaryKey() (*string, error) + + // FetchPrimaryKeyWithContext retrieves the primary key of the index using the provided context for cancellation. + FetchPrimaryKeyWithContext(ctx context.Context) (*string, error) + + // UpdateIndex updates the primary key of the index. + UpdateIndex(primaryKey string) (*TaskInfo, error) + + // UpdateIndexWithContext updates the primary key of the index using the provided context for cancellation. + UpdateIndexWithContext(ctx context.Context, primaryKey string) (*TaskInfo, error) + + // Delete removes the index identified by the given UID. + Delete(uid string) (bool, error) + + // DeleteWithContext removes the index identified by the given UID using the provided context for cancellation. + DeleteWithContext(ctx context.Context, uid string) (bool, error) + + // GetStats retrieves statistical information about the index. + GetStats() (*StatsIndex, error) + + // GetStatsWithContext retrieves statistical information about the index using the provided context for cancellation. + GetStatsWithContext(ctx context.Context) (*StatsIndex, error) + + // AddDocuments adds multiple documents to the index. + AddDocuments(documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error) + + // AddDocumentsWithContext adds multiple documents to the index using the provided context for cancellation. + AddDocumentsWithContext(ctx context.Context, documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error) + + // AddDocumentsInBatches adds documents to the index in batches of specified size. + AddDocumentsInBatches(documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error) + + // AddDocumentsInBatchesWithContext adds documents to the index in batches of specified size using the provided context for cancellation. + AddDocumentsInBatchesWithContext(ctx context.Context, documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error) + + // AddDocumentsCsv adds documents from a CSV byte array to the index. + AddDocumentsCsv(documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error) + + // AddDocumentsCsvWithContext adds documents from a CSV byte array to the index using the provided context for cancellation. + AddDocumentsCsvWithContext(ctx context.Context, documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error) + + // AddDocumentsCsvInBatches adds documents from a CSV byte array to the index in batches of specified size. + AddDocumentsCsvInBatches(documents []byte, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error) + + // AddDocumentsCsvInBatchesWithContext adds documents from a CSV byte array to the index in batches of specified size using the provided context for cancellation. + AddDocumentsCsvInBatchesWithContext(ctx context.Context, documents []byte, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error) + + // AddDocumentsCsvFromReaderInBatches adds documents from a CSV reader to the index in batches of specified size. + AddDocumentsCsvFromReaderInBatches(documents io.Reader, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error) + + // AddDocumentsCsvFromReaderInBatchesWithContext adds documents from a CSV reader to the index in batches of specified size using the provided context for cancellation. + AddDocumentsCsvFromReaderInBatchesWithContext(ctx context.Context, documents io.Reader, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error) + + // AddDocumentsCsvFromReader adds documents from a CSV reader to the index. + AddDocumentsCsvFromReader(documents io.Reader, options *CsvDocumentsQuery) (*TaskInfo, error) + + // AddDocumentsCsvFromReaderWithContext adds documents from a CSV reader to the index using the provided context for cancellation. + AddDocumentsCsvFromReaderWithContext(ctx context.Context, documents io.Reader, options *CsvDocumentsQuery) (*TaskInfo, error) + + // AddDocumentsNdjson adds documents from a NDJSON byte array to the index. + AddDocumentsNdjson(documents []byte, primaryKey ...string) (*TaskInfo, error) + + // AddDocumentsNdjsonWithContext adds documents from a NDJSON byte array to the index using the provided context for cancellation. + AddDocumentsNdjsonWithContext(ctx context.Context, documents []byte, primaryKey ...string) (*TaskInfo, error) + + // AddDocumentsNdjsonInBatches adds documents from a NDJSON byte array to the index in batches of specified size. + AddDocumentsNdjsonInBatches(documents []byte, batchSize int, primaryKey ...string) ([]TaskInfo, error) + + // AddDocumentsNdjsonInBatchesWithContext adds documents from a NDJSON byte array to the index in batches of specified size using the provided context for cancellation. + AddDocumentsNdjsonInBatchesWithContext(ctx context.Context, documents []byte, batchSize int, primaryKey ...string) ([]TaskInfo, error) + + // AddDocumentsNdjsonFromReader adds documents from a NDJSON reader to the index. + AddDocumentsNdjsonFromReader(documents io.Reader, primaryKey ...string) (*TaskInfo, error) + + // AddDocumentsNdjsonFromReaderWithContext adds documents from a NDJSON reader to the index using the provided context for cancellation. + AddDocumentsNdjsonFromReaderWithContext(ctx context.Context, documents io.Reader, primaryKey ...string) (*TaskInfo, error) + + // AddDocumentsNdjsonFromReaderInBatches adds documents from a NDJSON reader to the index in batches of specified size. + AddDocumentsNdjsonFromReaderInBatches(documents io.Reader, batchSize int, primaryKey ...string) ([]TaskInfo, error) + + // AddDocumentsNdjsonFromReaderInBatchesWithContext adds documents from a NDJSON reader to the index in batches of specified size using the provided context for cancellation. + AddDocumentsNdjsonFromReaderInBatchesWithContext(ctx context.Context, documents io.Reader, batchSize int, primaryKey ...string) ([]TaskInfo, error) + + // UpdateDocuments updates multiple documents in the index. + UpdateDocuments(documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error) + + // UpdateDocumentsWithContext updates multiple documents in the index using the provided context for cancellation. + UpdateDocumentsWithContext(ctx context.Context, documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error) + + // UpdateDocumentsInBatches updates documents in the index in batches of specified size. + UpdateDocumentsInBatches(documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error) + + // UpdateDocumentsInBatchesWithContext updates documents in the index in batches of specified size using the provided context for cancellation. + UpdateDocumentsInBatchesWithContext(ctx context.Context, documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error) + + // UpdateDocumentsCsv updates documents in the index from a CSV byte array. + UpdateDocumentsCsv(documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error) + + // UpdateDocumentsCsvWithContext updates documents in the index from a CSV byte array using the provided context for cancellation. + UpdateDocumentsCsvWithContext(ctx context.Context, documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error) + + // UpdateDocumentsCsvInBatches updates documents in the index from a CSV byte array in batches of specified size. + UpdateDocumentsCsvInBatches(documents []byte, batchsize int, options *CsvDocumentsQuery) ([]TaskInfo, error) + + // UpdateDocumentsCsvInBatchesWithContext updates documents in the index from a CSV byte array in batches of specified size using the provided context for cancellation. + UpdateDocumentsCsvInBatchesWithContext(ctx context.Context, documents []byte, batchsize int, options *CsvDocumentsQuery) ([]TaskInfo, error) + + // UpdateDocumentsNdjson updates documents in the index from a NDJSON byte array. + UpdateDocumentsNdjson(documents []byte, primaryKey ...string) (*TaskInfo, error) + + // UpdateDocumentsNdjsonWithContext updates documents in the index from a NDJSON byte array using the provided context for cancellation. + UpdateDocumentsNdjsonWithContext(ctx context.Context, documents []byte, primaryKey ...string) (*TaskInfo, error) + + // UpdateDocumentsNdjsonInBatches updates documents in the index from a NDJSON byte array in batches of specified size. + UpdateDocumentsNdjsonInBatches(documents []byte, batchsize int, primaryKey ...string) ([]TaskInfo, error) + + // UpdateDocumentsNdjsonInBatchesWithContext updates documents in the index from a NDJSON byte array in batches of specified size using the provided context for cancellation. + UpdateDocumentsNdjsonInBatchesWithContext(ctx context.Context, documents []byte, batchsize int, primaryKey ...string) ([]TaskInfo, error) + + // GetDocument retrieves a single document from the index by identifier. + GetDocument(identifier string, request *DocumentQuery, documentPtr interface{}) error + + // GetDocumentWithContext retrieves a single document from the index by identifier using the provided context for cancellation. + GetDocumentWithContext(ctx context.Context, identifier string, request *DocumentQuery, documentPtr interface{}) error + + // GetDocuments retrieves multiple documents from the index. GetDocuments(param *DocumentsQuery, resp *DocumentsResult) error - DeleteDocument(uid string) (resp *TaskInfo, err error) - DeleteDocuments(uid []string) (resp *TaskInfo, err error) - DeleteDocumentsByFilter(filter interface{}) (resp *TaskInfo, err error) - DeleteAllDocuments() (resp *TaskInfo, err error) + + // GetDocumentsWithContext retrieves multiple documents from the index using the provided context for cancellation. + GetDocumentsWithContext(ctx context.Context, param *DocumentsQuery, resp *DocumentsResult) error + + // DeleteDocument deletes a single document from the index by identifier. + DeleteDocument(identifier string) (*TaskInfo, error) + + // DeleteDocumentWithContext deletes a single document from the index by identifier using the provided context for cancellation. + DeleteDocumentWithContext(ctx context.Context, identifier string) (*TaskInfo, error) + + // DeleteDocuments deletes multiple documents from the index by identifiers. + DeleteDocuments(identifiers []string) (*TaskInfo, error) + + // DeleteDocumentsWithContext deletes multiple documents from the index by identifiers using the provided context for cancellation. + DeleteDocumentsWithContext(ctx context.Context, identifiers []string) (*TaskInfo, error) + + // DeleteDocumentsByFilter deletes documents from the index by filter. + DeleteDocumentsByFilter(filter interface{}) (*TaskInfo, error) + + // DeleteDocumentsByFilterWithContext deletes documents from the index by filter using the provided context for cancellation. + DeleteDocumentsByFilterWithContext(ctx context.Context, filter interface{}) (*TaskInfo, error) + + // DeleteAllDocuments deletes all documents from the index. + DeleteAllDocuments() (*TaskInfo, error) + + // DeleteAllDocumentsWithContext deletes all documents from the index using the provided context for cancellation. + DeleteAllDocumentsWithContext(ctx context.Context) (*TaskInfo, error) + + // Search performs a search query on the index. Search(query string, request *SearchRequest) (*SearchResponse, error) + + // SearchWithContext performs a search query on the index using the provided context for cancellation. + SearchWithContext(ctx context.Context, query string, request *SearchRequest) (*SearchResponse, error) + + // SearchRaw performs a raw search query on the index, returning a JSON response. SearchRaw(query string, request *SearchRequest) (*json.RawMessage, error) + + // SearchRawWithContext performs a raw search query on the index using the provided context for cancellation, returning a JSON response. + SearchRawWithContext(ctx context.Context, query string, request *SearchRequest) (*json.RawMessage, error) + + // FacetSearch performs a facet search query on the index. + FacetSearch(request *FacetSearchRequest) (*json.RawMessage, error) + + // FacetSearchWithContext performs a facet search query on the index using the provided context for cancellation. + FacetSearchWithContext(ctx context.Context, request *FacetSearchRequest) (*json.RawMessage, error) + + // SearchSimilarDocuments performs a search for similar documents. SearchSimilarDocuments(param *SimilarDocumentQuery, resp *SimilarDocumentResult) error - GetTask(taskUID int64) (resp *Task, err error) - GetTasks(param *TasksQuery) (resp *TaskResult, err error) - - GetSettings() (resp *Settings, err error) - UpdateSettings(request *Settings) (resp *TaskInfo, err error) - ResetSettings() (resp *TaskInfo, err error) - GetRankingRules() (resp *[]string, err error) - UpdateRankingRules(request *[]string) (resp *TaskInfo, err error) - ResetRankingRules() (resp *TaskInfo, err error) - GetDistinctAttribute() (resp *string, err error) - UpdateDistinctAttribute(request string) (resp *TaskInfo, err error) - ResetDistinctAttribute() (resp *TaskInfo, err error) - GetSearchableAttributes() (resp *[]string, err error) - UpdateSearchableAttributes(request *[]string) (resp *TaskInfo, err error) - ResetSearchableAttributes() (resp *TaskInfo, err error) - GetDisplayedAttributes() (resp *[]string, err error) - UpdateDisplayedAttributes(request *[]string) (resp *TaskInfo, err error) - ResetDisplayedAttributes() (resp *TaskInfo, err error) - GetStopWords() (resp *[]string, err error) - UpdateStopWords(request *[]string) (resp *TaskInfo, err error) - ResetStopWords() (resp *TaskInfo, err error) - GetSynonyms() (resp *map[string][]string, err error) - UpdateSynonyms(request *map[string][]string) (resp *TaskInfo, err error) - ResetSynonyms() (resp *TaskInfo, err error) - GetFilterableAttributes() (resp *[]string, err error) - UpdateFilterableAttributes(request *[]string) (resp *TaskInfo, err error) - ResetFilterableAttributes() (resp *TaskInfo, err error) - GetSearchCutoffMs() (resp int64, err error) - UpdateSearchCutoffMs(request int64) (resp *TaskInfo, err error) - ResetSearchCutoffMs() (resp *TaskInfo, err error) - - WaitForTask(taskUID int64, options ...WaitParams) (*Task, error) -} - -var _ IndexInterface = &Index{} - -func newIndex(client *Client, uid string) *Index { - return &Index{ - UID: uid, - client: client, + // SearchSimilarDocumentsWithContext performs a search for similar documents using the provided context for cancellation. + SearchSimilarDocumentsWithContext(ctx context.Context, param *SimilarDocumentQuery, resp *SimilarDocumentResult) error + + // GetTask retrieves a task by its UID. + GetTask(taskUID int64) (*Task, error) + + // GetTaskWithContext retrieves a task by its UID using the provided context for cancellation. + GetTaskWithContext(ctx context.Context, taskUID int64) (*Task, error) + + // GetTasks retrieves multiple tasks based on query parameters. + GetTasks(param *TasksQuery) (*TaskResult, error) + + // GetTasksWithContext retrieves multiple tasks based on query parameters using the provided context for cancellation. + GetTasksWithContext(ctx context.Context, param *TasksQuery) (*TaskResult, error) + + // GetSettings retrieves the settings of the index. + GetSettings() (*Settings, error) + + // GetSettingsWithContext retrieves the settings of the index using the provided context for cancellation. + GetSettingsWithContext(ctx context.Context) (*Settings, error) + + // UpdateSettings updates the settings of the index. + UpdateSettings(request *Settings) (*TaskInfo, error) + + // UpdateSettingsWithContext updates the settings of the index using the provided context for cancellation. + UpdateSettingsWithContext(ctx context.Context, request *Settings) (*TaskInfo, error) + + // ResetSettings resets the settings of the index to default values. + ResetSettings() (*TaskInfo, error) + + // ResetSettingsWithContext resets the settings of the index to default values using the provided context for cancellation. + ResetSettingsWithContext(ctx context.Context) (*TaskInfo, error) + + // GetRankingRules retrieves the ranking rules of the index. + GetRankingRules() (*[]string, error) + + // GetRankingRulesWithContext retrieves the ranking rules of the index using the provided context for cancellation. + GetRankingRulesWithContext(ctx context.Context) (*[]string, error) + + // UpdateRankingRules updates the ranking rules of the index. + UpdateRankingRules(request *[]string) (*TaskInfo, error) + + // UpdateRankingRulesWithContext updates the ranking rules of the index using the provided context for cancellation. + UpdateRankingRulesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error) + + // ResetRankingRules resets the ranking rules of the index to default values. + ResetRankingRules() (*TaskInfo, error) + + // ResetRankingRulesWithContext resets the ranking rules of the index to default values using the provided context for cancellation. + ResetRankingRulesWithContext(ctx context.Context) (*TaskInfo, error) + + // GetDistinctAttribute retrieves the distinct attribute of the index. + GetDistinctAttribute() (*string, error) + + // GetDistinctAttributeWithContext retrieves the distinct attribute of the index using the provided context for cancellation. + GetDistinctAttributeWithContext(ctx context.Context) (*string, error) + + // UpdateDistinctAttribute updates the distinct attribute of the index. + UpdateDistinctAttribute(request string) (*TaskInfo, error) + + // UpdateDistinctAttributeWithContext updates the distinct attribute of the index using the provided context for cancellation. + UpdateDistinctAttributeWithContext(ctx context.Context, request string) (*TaskInfo, error) + + // ResetDistinctAttribute resets the distinct attribute of the index to default value. + ResetDistinctAttribute() (*TaskInfo, error) + + // ResetDistinctAttributeWithContext resets the distinct attribute of the index to default value using the provided context for cancellation. + ResetDistinctAttributeWithContext(ctx context.Context) (*TaskInfo, error) + + // GetSearchableAttributes retrieves the searchable attributes of the index. + GetSearchableAttributes() (*[]string, error) + + // GetSearchableAttributesWithContext retrieves the searchable attributes of the index using the provided context for cancellation. + GetSearchableAttributesWithContext(ctx context.Context) (*[]string, error) + + // UpdateSearchableAttributes updates the searchable attributes of the index. + UpdateSearchableAttributes(request *[]string) (*TaskInfo, error) + + // UpdateSearchableAttributesWithContext updates the searchable attributes of the index using the provided context for cancellation. + UpdateSearchableAttributesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error) + + // ResetSearchableAttributes resets the searchable attributes of the index to default values. + ResetSearchableAttributes() (*TaskInfo, error) + + // ResetSearchableAttributesWithContext resets the searchable attributes of the index to default values using the provided context for cancellation. + ResetSearchableAttributesWithContext(ctx context.Context) (*TaskInfo, error) + + // GetDisplayedAttributes retrieves the displayed attributes of the index. + GetDisplayedAttributes() (*[]string, error) + + // GetDisplayedAttributesWithContext retrieves the displayed attributes of the index using the provided context for cancellation. + GetDisplayedAttributesWithContext(ctx context.Context) (*[]string, error) + + // UpdateDisplayedAttributes updates the displayed attributes of the index. + UpdateDisplayedAttributes(request *[]string) (*TaskInfo, error) + + // UpdateDisplayedAttributesWithContext updates the displayed attributes of the index using the provided context for cancellation. + UpdateDisplayedAttributesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error) + + // ResetDisplayedAttributes resets the displayed attributes of the index to default values. + ResetDisplayedAttributes() (*TaskInfo, error) + + // ResetDisplayedAttributesWithContext resets the displayed attributes of the index to default values using the provided context for cancellation. + ResetDisplayedAttributesWithContext(ctx context.Context) (*TaskInfo, error) + + // GetStopWords retrieves the stop words of the index. + GetStopWords() (*[]string, error) + + // GetStopWordsWithContext retrieves the stop words of the index using the provided context for cancellation. + GetStopWordsWithContext(ctx context.Context) (*[]string, error) + + // UpdateStopWords updates the stop words of the index. + UpdateStopWords(request *[]string) (*TaskInfo, error) + + // UpdateStopWordsWithContext updates the stop words of the index using the provided context for cancellation. + UpdateStopWordsWithContext(ctx context.Context, request *[]string) (*TaskInfo, error) + + // ResetStopWords resets the stop words of the index to default values. + ResetStopWords() (*TaskInfo, error) + + // ResetStopWordsWithContext resets the stop words of the index to default values using the provided context for cancellation. + ResetStopWordsWithContext(ctx context.Context) (*TaskInfo, error) + + // GetSynonyms retrieves the synonyms of the index. + GetSynonyms() (*map[string][]string, error) + + // GetSynonymsWithContext retrieves the synonyms of the index using the provided context for cancellation. + GetSynonymsWithContext(ctx context.Context) (*map[string][]string, error) + + // UpdateSynonyms updates the synonyms of the index. + UpdateSynonyms(request *map[string][]string) (*TaskInfo, error) + + // UpdateSynonymsWithContext updates the synonyms of the index using the provided context for cancellation. + UpdateSynonymsWithContext(ctx context.Context, request *map[string][]string) (*TaskInfo, error) + + // ResetSynonyms resets the synonyms of the index to default values. + ResetSynonyms() (*TaskInfo, error) + + // ResetSynonymsWithContext resets the synonyms of the index to default values using the provided context for cancellation. + ResetSynonymsWithContext(ctx context.Context) (*TaskInfo, error) + + // GetFilterableAttributes retrieves the filterable attributes of the index. + GetFilterableAttributes() (*[]string, error) + + // GetFilterableAttributesWithContext retrieves the filterable attributes of the index using the provided context for cancellation. + GetFilterableAttributesWithContext(ctx context.Context) (*[]string, error) + + // UpdateFilterableAttributes updates the filterable attributes of the index. + UpdateFilterableAttributes(request *[]string) (*TaskInfo, error) + + // UpdateFilterableAttributesWithContext updates the filterable attributes of the index using the provided context for cancellation. + UpdateFilterableAttributesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error) + + // ResetFilterableAttributes resets the filterable attributes of the index to default values. + ResetFilterableAttributes() (*TaskInfo, error) + + // ResetFilterableAttributesWithContext resets the filterable attributes of the index to default values using the provided context for cancellation. + ResetFilterableAttributesWithContext(ctx context.Context) (*TaskInfo, error) + + // GetSortableAttributes retrieves the sortable attributes of the index. + GetSortableAttributes() (*[]string, error) + + // GetSortableAttributesWithContext retrieves the sortable attributes of the index using the provided context for cancellation. + GetSortableAttributesWithContext(ctx context.Context) (*[]string, error) + + // UpdateSortableAttributes updates the sortable attributes of the index. + UpdateSortableAttributes(request *[]string) (*TaskInfo, error) + + // UpdateSortableAttributesWithContext updates the sortable attributes of the index using the provided context for cancellation. + UpdateSortableAttributesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error) + + // ResetSortableAttributes resets the sortable attributes of the index to default values. + ResetSortableAttributes() (*TaskInfo, error) + + // ResetSortableAttributesWithContext resets the sortable attributes of the index to default values using the provided context for cancellation. + ResetSortableAttributesWithContext(ctx context.Context) (*TaskInfo, error) + + // GetTypoTolerance retrieves the typo tolerance settings of the index. + GetTypoTolerance() (*TypoTolerance, error) + + // GetTypoToleranceWithContext retrieves the typo tolerance settings of the index using the provided context for cancellation. + GetTypoToleranceWithContext(ctx context.Context) (*TypoTolerance, error) + + // UpdateTypoTolerance updates the typo tolerance settings of the index. + UpdateTypoTolerance(request *TypoTolerance) (*TaskInfo, error) + + // UpdateTypoToleranceWithContext updates the typo tolerance settings of the index using the provided context for cancellation. + UpdateTypoToleranceWithContext(ctx context.Context, request *TypoTolerance) (*TaskInfo, error) + + // ResetTypoTolerance resets the typo tolerance settings of the index to default values. + ResetTypoTolerance() (*TaskInfo, error) + + // ResetTypoToleranceWithContext resets the typo tolerance settings of the index to default values using the provided context for cancellation. + ResetTypoToleranceWithContext(ctx context.Context) (*TaskInfo, error) + + // GetPagination retrieves the pagination settings of the index. + GetPagination() (*Pagination, error) + + // GetPaginationWithContext retrieves the pagination settings of the index using the provided context for cancellation. + GetPaginationWithContext(ctx context.Context) (*Pagination, error) + + // UpdatePagination updates the pagination settings of the index. + UpdatePagination(request *Pagination) (*TaskInfo, error) + + // UpdatePaginationWithContext updates the pagination settings of the index using the provided context for cancellation. + UpdatePaginationWithContext(ctx context.Context, request *Pagination) (*TaskInfo, error) + + // ResetPagination resets the pagination settings of the index to default values. + ResetPagination() (*TaskInfo, error) + + // ResetPaginationWithContext resets the pagination settings of the index to default values using the provided context for cancellation. + ResetPaginationWithContext(ctx context.Context) (*TaskInfo, error) + + // GetFaceting retrieves the faceting settings of the index. + GetFaceting() (*Faceting, error) + + // GetFacetingWithContext retrieves the faceting settings of the index using the provided context for cancellation. + GetFacetingWithContext(ctx context.Context) (*Faceting, error) + + // UpdateFaceting updates the faceting settings of the index. + UpdateFaceting(request *Faceting) (*TaskInfo, error) + + // UpdateFacetingWithContext updates the faceting settings of the index using the provided context for cancellation. + UpdateFacetingWithContext(ctx context.Context, request *Faceting) (*TaskInfo, error) + + // ResetFaceting resets the faceting settings of the index to default values. + ResetFaceting() (*TaskInfo, error) + + // ResetFacetingWithContext resets the faceting settings of the index to default values using the provided context for cancellation. + ResetFacetingWithContext(ctx context.Context) (*TaskInfo, error) + + // GetEmbedders retrieves the embedders of the index. + GetEmbedders() (map[string]Embedder, error) + + // GetEmbeddersWithContext retrieves the embedders of the index using the provided context for cancellation. + GetEmbeddersWithContext(ctx context.Context) (map[string]Embedder, error) + + // UpdateEmbedders updates the embedders of the index. + UpdateEmbedders(request map[string]Embedder) (*TaskInfo, error) + + // UpdateEmbeddersWithContext updates the embedders of the index using the provided context for cancellation. + UpdateEmbeddersWithContext(ctx context.Context, request map[string]Embedder) (*TaskInfo, error) + + // ResetEmbedders resets the embedders of the index to default values. + ResetEmbedders() (*TaskInfo, error) + + // ResetEmbeddersWithContext resets the embedders of the index to default values using the provided context for cancellation. + ResetEmbeddersWithContext(ctx context.Context) (*TaskInfo, error) + + // GetSearchCutoffMs retrieves the search cutoff time in milliseconds. + GetSearchCutoffMs() (int64, error) + + // GetSearchCutoffMsWithContext retrieves the search cutoff time in milliseconds using the provided context for cancellation. + GetSearchCutoffMsWithContext(ctx context.Context) (int64, error) + + // UpdateSearchCutoffMs updates the search cutoff time in milliseconds. + UpdateSearchCutoffMs(request int64) (*TaskInfo, error) + + // UpdateSearchCutoffMsWithContext updates the search cutoff time in milliseconds using the provided context for cancellation. + UpdateSearchCutoffMsWithContext(ctx context.Context, request int64) (*TaskInfo, error) + + // ResetSearchCutoffMs resets the search cutoff time in milliseconds to default value. + ResetSearchCutoffMs() (*TaskInfo, error) + + // ResetSearchCutoffMsWithContext resets the search cutoff time in milliseconds to default value using the provided context for cancellation. + ResetSearchCutoffMsWithContext(ctx context.Context) (*TaskInfo, error) + + // WaitForTask waits for a task to complete by its UID with the given interval. + WaitForTask(taskUID int64, interval time.Duration) (*Task, error) + + // WaitForTaskWithContext waits for a task to complete by its UID with the given interval using the provided context for cancellation. + WaitForTaskWithContext(ctx context.Context, taskUID int64, interval time.Duration) (*Task, error) +} + +func newIndex(cli *client, uid string) IndexManager { + return &index{ + client: cli, + uid: uid, } } -func (i *Index) FetchInfo() (resp *Index, err error) { - resp = newIndex(i.client, i.UID) - req := internalRequest{ - endpoint: "/indexes/" + i.UID, +func (i *index) FetchInfo() (*IndexResult, error) { + return i.FetchInfoWithContext(context.Background()) +} + +func (i *index) FetchInfoWithContext(ctx context.Context) (*IndexResult, error) { + resp := new(IndexResult) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid, method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "FetchInfo", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } - i.PrimaryKey = resp.PrimaryKey - i.CreatedAt = resp.CreatedAt - i.UpdatedAt = resp.UpdatedAt + if resp.PrimaryKey != "" { + i.primaryKey = resp.PrimaryKey + } + resp.IndexManager = i return resp, nil } -func (i Index) FetchPrimaryKey() (resp *string, err error) { - index, err := i.FetchInfo() +func (i *index) FetchPrimaryKey() (*string, error) { + return i.FetchPrimaryKeyWithContext(context.Background()) +} + +func (i *index) FetchPrimaryKeyWithContext(ctx context.Context) (*string, error) { + idx, err := i.FetchInfoWithContext(ctx) if err != nil { return nil, err } - return &index.PrimaryKey, nil + i.primaryKey = idx.PrimaryKey + return &idx.PrimaryKey, nil } -func (i *Index) UpdateIndex(primaryKey string) (resp *TaskInfo, err error) { +func (i *index) UpdateIndex(primaryKey string) (*TaskInfo, error) { + return i.UpdateIndexWithContext(context.Background(), primaryKey) +} + +func (i *index) UpdateIndexWithContext(ctx context.Context, primaryKey string) (*TaskInfo, error) { request := &UpdateIndexRequest{ PrimaryKey: primaryKey, } - i.PrimaryKey = primaryKey - resp = &TaskInfo{} + i.primaryKey = primaryKey + resp := new(TaskInfo) - req := internalRequest{ - endpoint: "/indexes/" + i.UID, + req := &internalRequest{ + endpoint: "/indexes/" + i.uid, method: http.MethodPatch, contentType: contentTypeJSON, withRequest: request, @@ -134,15 +539,20 @@ func (i *Index) UpdateIndex(primaryKey string) (resp *TaskInfo, err error) { acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateIndex", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } + i.primaryKey = primaryKey return resp, nil } -func (i Index) Delete(uid string) (ok bool, err error) { - resp := &TaskInfo{} - req := internalRequest{ +func (i *index) Delete(uid string) (bool, error) { + return i.DeleteWithContext(context.Background(), uid) +} + +func (i *index) DeleteWithContext(ctx context.Context, uid string) (bool, error) { + resp := new(TaskInfo) + req := &internalRequest{ endpoint: "/indexes/" + uid, method: http.MethodDelete, withRequest: nil, @@ -151,39 +561,52 @@ func (i Index) Delete(uid string) (ok bool, err error) { functionName: "Delete", } // err is not nil if status code is not 204 StatusNoContent - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return false, err } + i.primaryKey = "" return true, nil } -func (i Index) GetStats() (resp *StatsIndex, err error) { - resp = &StatsIndex{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/stats", +func (i *index) GetStats() (*StatsIndex, error) { + return i.GetStatsWithContext(context.Background()) +} + +func (i *index) GetStatsWithContext(ctx context.Context) (*StatsIndex, error) { + resp := new(StatsIndex) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/stats", method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetStats", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetTask(taskUID int64) (resp *Task, err error) { - return i.client.GetTask(taskUID) +func (i *index) GetTask(taskUID int64) (*Task, error) { + return i.GetTaskWithContext(context.Background(), taskUID) +} + +func (i *index) GetTaskWithContext(ctx context.Context, taskUID int64) (*Task, error) { + return getTask(ctx, i.client, taskUID) } -func (i Index) GetTasks(param *TasksQuery) (resp *TaskResult, err error) { - resp = &TaskResult{} - req := internalRequest{ +func (i *index) GetTasks(param *TasksQuery) (*TaskResult, error) { + return i.GetTasksWithContext(context.Background(), param) +} + +func (i *index) GetTasksWithContext(ctx context.Context, param *TasksQuery) (*TaskResult, error) { + resp := new(TaskResult) + req := &internalRequest{ endpoint: "/tasks", method: http.MethodGet, withRequest: nil, - withResponse: &resp, + withResponse: resp, withQueryParams: map[string]string{}, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetTasks", @@ -211,23 +634,22 @@ func (i Index) GetTasks(param *TasksQuery) (resp *TaskResult, err error) { req.withQueryParams["types"] = strings.Join(types, ",") } if len(param.IndexUIDS) != 0 { - param.IndexUIDS = append(param.IndexUIDS, i.UID) + param.IndexUIDS = append(param.IndexUIDS, i.uid) req.withQueryParams["indexUids"] = strings.Join(param.IndexUIDS, ",") } else { - req.withQueryParams["indexUids"] = i.UID + req.withQueryParams["indexUids"] = i.uid } } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -// WaitForTask waits for a task to be processed. -// The function will check by regular interval provided in parameter interval -// the TaskStatus. -// If no ctx and interval are provided WaitForTask will check each 50ms the -// status of a task. -func (i Index) WaitForTask(taskUID int64, options ...WaitParams) (*Task, error) { - return i.client.WaitForTask(taskUID, options...) +func (i *index) WaitForTask(taskUID int64, interval time.Duration) (*Task, error) { + return waitForTask(context.Background(), i.client, taskUID, interval) +} + +func (i *index) WaitForTaskWithContext(ctx context.Context, taskUID int64, interval time.Duration) (*Task, error) { + return waitForTask(ctx, i.client, taskUID, interval) } diff --git a/index_document.go b/index_document.go new file mode 100644 index 00000000..cccf66f1 --- /dev/null +++ b/index_document.go @@ -0,0 +1,594 @@ +package meilisearch + +import ( + "bufio" + "bytes" + "context" + "encoding/csv" + "fmt" + "io" + "math" + "net/http" + "reflect" + "strconv" + "strings" +) + +func (i *index) AddDocuments(documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error) { + return i.AddDocumentsWithContext(context.Background(), documentsPtr, primaryKey...) +} + +func (i *index) AddDocumentsWithContext(ctx context.Context, documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error) { + return i.addDocuments(ctx, documentsPtr, contentTypeJSON, transformStringVariadicToMap(primaryKey...)) +} + +func (i *index) AddDocumentsInBatches(documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error) { + return i.AddDocumentsInBatchesWithContext(context.Background(), documentsPtr, batchSize, primaryKey...) +} + +func (i *index) AddDocumentsInBatchesWithContext(ctx context.Context, documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error) { + return i.saveDocumentsInBatches(ctx, documentsPtr, batchSize, i.AddDocumentsWithContext, primaryKey...) +} + +func (i *index) AddDocumentsCsv(documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error) { + return i.AddDocumentsCsvWithContext(context.Background(), documents, options) +} + +func (i *index) AddDocumentsCsvWithContext(ctx context.Context, documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error) { + // []byte avoids JSON conversion in Client.sendRequest() + return i.addDocuments(ctx, documents, contentTypeCSV, transformCsvDocumentsQueryToMap(options)) +} + +func (i *index) AddDocumentsCsvInBatches(documents []byte, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error) { + return i.AddDocumentsCsvInBatchesWithContext(context.Background(), documents, batchSize, options) +} + +func (i *index) AddDocumentsCsvInBatchesWithContext(ctx context.Context, documents []byte, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error) { + // Reuse io.Reader implementation + return i.AddDocumentsCsvFromReaderInBatchesWithContext(ctx, bytes.NewReader(documents), batchSize, options) +} + +func (i *index) AddDocumentsCsvFromReaderInBatches(documents io.Reader, batchSize int, options *CsvDocumentsQuery) (resp []TaskInfo, err error) { + return i.AddDocumentsCsvFromReaderInBatchesWithContext(context.Background(), documents, batchSize, options) +} + +func (i *index) AddDocumentsCsvFromReaderInBatchesWithContext(ctx context.Context, documents io.Reader, batchSize int, options *CsvDocumentsQuery) (resp []TaskInfo, err error) { + return i.saveDocumentsFromReaderInBatches(ctx, documents, batchSize, i.AddDocumentsCsvWithContext, options) +} + +func (i *index) AddDocumentsCsvFromReader(documents io.Reader, options *CsvDocumentsQuery) (resp *TaskInfo, err error) { + return i.AddDocumentsCsvFromReaderWithContext(context.Background(), documents, options) +} + +func (i *index) AddDocumentsCsvFromReaderWithContext(ctx context.Context, documents io.Reader, options *CsvDocumentsQuery) (resp *TaskInfo, err error) { + // Using io.Reader would avoid JSON conversion in Client.sendRequest(), but + // read content to memory anyway because of problems with streamed bodies + data, err := io.ReadAll(documents) + if err != nil { + return nil, fmt.Errorf("could not read documents: %w", err) + } + return i.addDocuments(ctx, data, contentTypeCSV, transformCsvDocumentsQueryToMap(options)) +} + +func (i *index) AddDocumentsNdjson(documents []byte, primaryKey ...string) (*TaskInfo, error) { + return i.AddDocumentsNdjsonWithContext(context.Background(), documents, primaryKey...) +} + +func (i *index) AddDocumentsNdjsonWithContext(ctx context.Context, documents []byte, primaryKey ...string) (*TaskInfo, error) { + // []byte avoids JSON conversion in Client.sendRequest() + return i.addDocuments(ctx, documents, contentTypeNDJSON, transformStringVariadicToMap(primaryKey...)) +} + +func (i *index) AddDocumentsNdjsonInBatches(documents []byte, batchSize int, primaryKey ...string) ([]TaskInfo, error) { + return i.AddDocumentsNdjsonInBatchesWithContext(context.Background(), documents, batchSize, primaryKey...) +} + +func (i *index) AddDocumentsNdjsonInBatchesWithContext(ctx context.Context, documents []byte, batchSize int, primaryKey ...string) ([]TaskInfo, error) { + // Reuse io.Reader implementation + return i.AddDocumentsNdjsonFromReaderInBatchesWithContext(ctx, bytes.NewReader(documents), batchSize, primaryKey...) +} + +func (i *index) AddDocumentsNdjsonFromReaderInBatches(documents io.Reader, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) { + return i.AddDocumentsNdjsonFromReaderInBatchesWithContext(context.Background(), documents, batchSize, primaryKey...) +} + +func (i *index) AddDocumentsNdjsonFromReaderInBatchesWithContext(ctx context.Context, documents io.Reader, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) { + // NDJSON files supposed to contain a valid JSON document in each line, so + // it's safe to split by lines. + // Lines are read and sent continuously to avoid reading all content into + // memory. However, this means that only part of the documents might be + // added successfully. + + sendNdjsonLines := func(lines []string) (*TaskInfo, error) { + b := new(bytes.Buffer) + for _, line := range lines { + _, err := b.WriteString(line) + if err != nil { + return nil, fmt.Errorf("could not write NDJSON line: %w", err) + } + err = b.WriteByte('\n') + if err != nil { + return nil, fmt.Errorf("could not write NDJSON line: %w", err) + } + } + + resp, err := i.AddDocumentsNdjsonWithContext(ctx, b.Bytes(), primaryKey...) + if err != nil { + return nil, err + } + return resp, nil + } + + var ( + responses []TaskInfo + lines []string + ) + + scanner := bufio.NewScanner(documents) + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + + // Skip empty lines (NDJSON might not allow this, but just to be sure) + if line == "" { + continue + } + + lines = append(lines, line) + // After reaching batchSize send NDJSON lines + if len(lines) == batchSize { + resp, err := sendNdjsonLines(lines) + if err != nil { + return nil, err + } + responses = append(responses, *resp) + lines = nil + } + } + if err := scanner.Err(); err != nil { + return nil, fmt.Errorf("could not read NDJSON: %w", err) + } + + // Send remaining records as the last batch if there is any + if len(lines) > 0 { + resp, err := sendNdjsonLines(lines) + if err != nil { + return nil, err + } + responses = append(responses, *resp) + } + + return responses, nil +} + +func (i *index) AddDocumentsNdjsonFromReader(documents io.Reader, primaryKey ...string) (resp *TaskInfo, err error) { + return i.AddDocumentsNdjsonFromReaderWithContext(context.Background(), documents, primaryKey...) +} + +func (i *index) AddDocumentsNdjsonFromReaderWithContext(ctx context.Context, documents io.Reader, primaryKey ...string) (resp *TaskInfo, err error) { + // Using io.Reader would avoid JSON conversion in Client.sendRequest(), but + // read content to memory anyway because of problems with streamed bodies + data, err := io.ReadAll(documents) + if err != nil { + return nil, fmt.Errorf("could not read documents: %w", err) + } + return i.addDocuments(ctx, data, contentTypeNDJSON, transformStringVariadicToMap(primaryKey...)) +} + +func (i *index) UpdateDocuments(documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error) { + return i.UpdateDocumentsWithContext(context.Background(), documentsPtr, primaryKey...) +} + +func (i *index) UpdateDocumentsWithContext(ctx context.Context, documentsPtr interface{}, primaryKey ...string) (*TaskInfo, error) { + return i.updateDocuments(ctx, documentsPtr, contentTypeJSON, transformStringVariadicToMap(primaryKey...)) +} + +func (i *index) UpdateDocumentsInBatches(documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error) { + return i.UpdateDocumentsInBatchesWithContext(context.Background(), documentsPtr, batchSize, primaryKey...) +} + +func (i *index) UpdateDocumentsInBatchesWithContext(ctx context.Context, documentsPtr interface{}, batchSize int, primaryKey ...string) ([]TaskInfo, error) { + return i.saveDocumentsInBatches(ctx, documentsPtr, batchSize, i.UpdateDocumentsWithContext, primaryKey...) +} + +func (i *index) UpdateDocumentsCsv(documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error) { + return i.UpdateDocumentsCsvWithContext(context.Background(), documents, options) +} + +func (i *index) UpdateDocumentsCsvWithContext(ctx context.Context, documents []byte, options *CsvDocumentsQuery) (*TaskInfo, error) { + return i.updateDocuments(ctx, documents, contentTypeCSV, transformCsvDocumentsQueryToMap(options)) +} + +func (i *index) UpdateDocumentsCsvInBatches(documents []byte, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error) { + return i.UpdateDocumentsCsvInBatchesWithContext(context.Background(), documents, batchSize, options) +} + +func (i *index) UpdateDocumentsCsvInBatchesWithContext(ctx context.Context, documents []byte, batchSize int, options *CsvDocumentsQuery) ([]TaskInfo, error) { + // Reuse io.Reader implementation + return i.updateDocumentsCsvFromReaderInBatches(ctx, bytes.NewReader(documents), batchSize, options) +} + +func (i *index) UpdateDocumentsNdjson(documents []byte, primaryKey ...string) (*TaskInfo, error) { + return i.UpdateDocumentsNdjsonWithContext(context.Background(), documents, primaryKey...) +} + +func (i *index) UpdateDocumentsNdjsonWithContext(ctx context.Context, documents []byte, primaryKey ...string) (*TaskInfo, error) { + return i.updateDocuments(ctx, documents, contentTypeNDJSON, transformStringVariadicToMap(primaryKey...)) +} + +func (i *index) UpdateDocumentsNdjsonInBatches(documents []byte, batchSize int, primaryKey ...string) ([]TaskInfo, error) { + return i.UpdateDocumentsNdjsonInBatchesWithContext(context.Background(), documents, batchSize, primaryKey...) +} + +func (i *index) UpdateDocumentsNdjsonInBatchesWithContext(ctx context.Context, documents []byte, batchSize int, primaryKey ...string) ([]TaskInfo, error) { + return i.updateDocumentsNdjsonFromReaderInBatches(ctx, bytes.NewReader(documents), batchSize, primaryKey...) +} + +func (i *index) GetDocument(identifier string, request *DocumentQuery, documentPtr interface{}) error { + return i.GetDocumentWithContext(context.Background(), identifier, request, documentPtr) +} + +func (i *index) GetDocumentWithContext(ctx context.Context, identifier string, request *DocumentQuery, documentPtr interface{}) error { + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/documents/" + identifier, + method: http.MethodGet, + withRequest: nil, + withResponse: documentPtr, + withQueryParams: map[string]string{}, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "GetDocument", + } + if request != nil { + if len(request.Fields) != 0 { + req.withQueryParams["fields"] = strings.Join(request.Fields, ",") + } + } + if err := i.client.executeRequest(ctx, req); err != nil { + return err + } + return nil +} + +func (i *index) GetDocuments(param *DocumentsQuery, resp *DocumentsResult) error { + return i.GetDocumentsWithContext(context.Background(), param, resp) +} + +func (i *index) GetDocumentsWithContext(ctx context.Context, param *DocumentsQuery, resp *DocumentsResult) error { + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/documents", + method: http.MethodGet, + contentType: contentTypeJSON, + withRequest: nil, + withResponse: resp, + withQueryParams: nil, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "GetDocuments", + } + if param != nil && param.Filter == nil { + req.withQueryParams = map[string]string{} + if param.Limit != 0 { + req.withQueryParams["limit"] = strconv.FormatInt(param.Limit, 10) + } + if param.Offset != 0 { + req.withQueryParams["offset"] = strconv.FormatInt(param.Offset, 10) + } + if len(param.Fields) != 0 { + req.withQueryParams["fields"] = strings.Join(param.Fields, ",") + } + } else if param != nil && param.Filter != nil { + req.withRequest = param + req.method = http.MethodPost + req.endpoint = req.endpoint + "/fetch" + } + if err := i.client.executeRequest(ctx, req); err != nil { + return VersionErrorHintMessage(err, req) + } + return nil +} + +func (i *index) DeleteDocument(identifier string) (*TaskInfo, error) { + return i.DeleteDocumentWithContext(context.Background(), identifier) +} + +func (i *index) DeleteDocumentWithContext(ctx context.Context, identifier string) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/documents/" + identifier, + method: http.MethodDelete, + withRequest: nil, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusAccepted}, + functionName: "DeleteDocument", + } + if err := i.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (i *index) DeleteDocuments(identifiers []string) (*TaskInfo, error) { + return i.DeleteDocumentsWithContext(context.Background(), identifiers) +} + +func (i *index) DeleteDocumentsWithContext(ctx context.Context, identifiers []string) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/documents/delete-batch", + method: http.MethodPost, + contentType: contentTypeJSON, + withRequest: identifiers, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusAccepted}, + functionName: "DeleteDocuments", + } + if err := i.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (i *index) DeleteDocumentsByFilter(filter interface{}) (*TaskInfo, error) { + return i.DeleteDocumentsByFilterWithContext(context.Background(), filter) +} + +func (i *index) DeleteDocumentsByFilterWithContext(ctx context.Context, filter interface{}) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/documents/delete", + method: http.MethodPost, + contentType: contentTypeJSON, + withRequest: map[string]interface{}{ + "filter": filter, + }, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusAccepted}, + functionName: "DeleteDocumentsByFilter", + } + if err := i.client.executeRequest(ctx, req); err != nil { + return nil, VersionErrorHintMessage(err, req) + } + return resp, nil +} + +func (i *index) DeleteAllDocuments() (*TaskInfo, error) { + return i.DeleteAllDocumentsWithContext(context.Background()) +} + +func (i *index) DeleteAllDocumentsWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/documents", + method: http.MethodDelete, + withRequest: nil, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusAccepted}, + functionName: "DeleteAllDocuments", + } + if err := i.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (i *index) addDocuments(ctx context.Context, documentsPtr interface{}, contentType string, options map[string]string) (resp *TaskInfo, err error) { + resp = new(TaskInfo) + endpoint := "" + if options == nil { + endpoint = "/indexes/" + i.uid + "/documents" + } else { + for key, val := range options { + if key == "primaryKey" { + i.primaryKey = val + } + } + endpoint = "/indexes/" + i.uid + "/documents?" + generateQueryForOptions(options) + } + req := &internalRequest{ + endpoint: endpoint, + method: http.MethodPost, + contentType: contentType, + withRequest: documentsPtr, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusAccepted}, + functionName: "AddDocuments", + } + if err = i.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (i *index) saveDocumentsFromReaderInBatches(ctx context.Context, documents io.Reader, batchSize int, documentsCsvFunc func(ctx context.Context, recs []byte, op *CsvDocumentsQuery) (resp *TaskInfo, err error), options *CsvDocumentsQuery) (resp []TaskInfo, err error) { + // Because of the possibility of multiline fields it's not safe to split + // into batches by lines, we'll have to parse the file and reassemble it + // into smaller parts. RFC 4180 compliant input with a header row is + // expected. + // Records are read and sent continuously to avoid reading all content + // into memory. However, this means that only part of the documents might + // be added successfully. + + var ( + responses []TaskInfo + header []string + records [][]string + ) + + r := csv.NewReader(documents) + for { + // Read CSV record (empty lines and comments are already skipped by csv.Reader) + record, err := r.Read() + if err == io.EOF { + break + } + if err != nil { + return nil, fmt.Errorf("could not read CSV record: %w", err) + } + + // Store first record as header + if header == nil { + header = record + continue + } + + // Add header record to every batch + if len(records) == 0 { + records = append(records, header) + } + + records = append(records, record) + + // After reaching batchSize (not counting the header record) assemble a CSV file and send records + if len(records) == batchSize+1 { + resp, err := sendCsvRecords(ctx, documentsCsvFunc, records, options) + if err != nil { + return nil, err + } + responses = append(responses, *resp) + records = nil + } + } + + // Send remaining records as the last batch if there is any + if len(records) > 0 { + resp, err := sendCsvRecords(ctx, documentsCsvFunc, records, options) + if err != nil { + return nil, err + } + responses = append(responses, *resp) + } + + return responses, nil +} + +func (i *index) saveDocumentsInBatches(ctx context.Context, documentsPtr interface{}, batchSize int, documentFunc func(ctx context.Context, documentsPtr interface{}, primaryKey ...string) (resp *TaskInfo, err error), primaryKey ...string) (resp []TaskInfo, err error) { + arr := reflect.ValueOf(documentsPtr) + lenDocs := arr.Len() + numBatches := int(math.Ceil(float64(lenDocs) / float64(batchSize))) + resp = make([]TaskInfo, numBatches) + + for j := 0; j < numBatches; j++ { + end := (j + 1) * batchSize + if end > lenDocs { + end = lenDocs + } + + batch := arr.Slice(j*batchSize, end).Interface() + + if len(primaryKey) != 0 { + respID, err := documentFunc(ctx, batch, primaryKey[0]) + if err != nil { + return nil, err + } + + resp[j] = *respID + } else { + respID, err := documentFunc(ctx, batch) + if err != nil { + return nil, err + } + + resp[j] = *respID + } + } + + return resp, nil +} + +func (i *index) updateDocuments(ctx context.Context, documentsPtr interface{}, contentType string, options map[string]string) (resp *TaskInfo, err error) { + resp = &TaskInfo{} + endpoint := "" + if options == nil { + endpoint = "/indexes/" + i.uid + "/documents" + } else { + for key, val := range options { + if key == "primaryKey" { + i.primaryKey = val + } + } + endpoint = "/indexes/" + i.uid + "/documents?" + generateQueryForOptions(options) + } + req := &internalRequest{ + endpoint: endpoint, + method: http.MethodPut, + contentType: contentType, + withRequest: documentsPtr, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusAccepted}, + functionName: "UpdateDocuments", + } + if err = i.client.executeRequest(ctx, req); err != nil { + return nil, err + } + return resp, nil +} + +func (i *index) updateDocumentsCsvFromReaderInBatches(ctx context.Context, documents io.Reader, batchSize int, options *CsvDocumentsQuery) (resp []TaskInfo, err error) { + return i.saveDocumentsFromReaderInBatches(ctx, documents, batchSize, i.UpdateDocumentsCsvWithContext, options) +} + +func (i *index) updateDocumentsNdjsonFromReaderInBatches(ctx context.Context, documents io.Reader, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) { + // NDJSON files supposed to contain a valid JSON document in each line, so + // it's safe to split by lines. + // Lines are read and sent continuously to avoid reading all content into + // memory. However, this means that only part of the documents might be + // added successfully. + + sendNdjsonLines := func(lines []string) (*TaskInfo, error) { + b := new(bytes.Buffer) + for _, line := range lines { + _, err := b.WriteString(line) + if err != nil { + return nil, fmt.Errorf("could not write NDJSON line: %w", err) + } + err = b.WriteByte('\n') + if err != nil { + return nil, fmt.Errorf("could not write NDJSON line: %w", err) + } + } + + resp, err := i.UpdateDocumentsNdjsonWithContext(ctx, b.Bytes(), primaryKey...) + if err != nil { + return nil, err + } + return resp, nil + } + + var ( + responses []TaskInfo + lines []string + ) + + scanner := bufio.NewScanner(documents) + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + + // Skip empty lines (NDJSON might not allow this, but just to be sure) + if line == "" { + continue + } + + lines = append(lines, line) + // After reaching batchSize send NDJSON lines + if len(lines) == batchSize { + resp, err := sendNdjsonLines(lines) + if err != nil { + return nil, err + } + responses = append(responses, *resp) + lines = nil + } + } + if err := scanner.Err(); err != nil { + return nil, fmt.Errorf("Could not read NDJSON: %w", err) + } + + // Send remaining records as the last batch if there is any + if len(lines) > 0 { + resp, err := sendNdjsonLines(lines) + if err != nil { + return nil, err + } + responses = append(responses, *resp) + } + + return responses, nil +} diff --git a/index_documents_test.go b/index_document_test.go similarity index 52% rename from index_documents_test.go rename to index_document_test.go index 58163fc9..da3dd1d8 100644 --- a/index_documents_test.go +++ b/index_document_test.go @@ -1,22 +1,21 @@ package meilisearch import ( - "bufio" "bytes" - "encoding/csv" - "encoding/json" - "io" - "strconv" - "strings" - "testing" - + "crypto/tls" "github.com/stretchr/testify/require" + "testing" ) func TestIndex_AddDocuments(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager documentsPtr interface{} } type resp struct { @@ -33,7 +32,7 @@ func TestIndex_AddDocuments(t *testing.T) { name: "TestIndexBasicAddDocuments", args: args{ UID: "TestIndexBasicAddDocuments", - client: defaultClient, + client: sv, documentsPtr: []map[string]interface{}{ {"ID": "123", "Name": "Pride and Prejudice"}, }, @@ -58,7 +57,7 @@ func TestIndex_AddDocuments(t *testing.T) { name: "TestIndexAddDocumentsWithCustomClient", args: args{ UID: "TestIndexAddDocumentsWithCustomClient", - client: customClient, + client: customSv, documentsPtr: []map[string]interface{}{ {"ID": "123", "Name": "Pride and Prejudice"}, }, @@ -83,7 +82,7 @@ func TestIndex_AddDocuments(t *testing.T) { name: "TestIndexMultipleAddDocuments", args: args{ UID: "TestIndexMultipleAddDocuments", - client: defaultClient, + client: sv, documentsPtr: []map[string]interface{}{ {"ID": "1", "Name": "Alice In Wonderland"}, {"ID": "123", "Name": "Pride and Prejudice"}, @@ -112,7 +111,7 @@ func TestIndex_AddDocuments(t *testing.T) { name: "TestIndexBasicAddDocumentsWithIntID", args: args{ UID: "TestIndexBasicAddDocumentsWithIntID", - client: defaultClient, + client: sv, documentsPtr: []map[string]interface{}{ {"BookID": float64(123), "Title": "Pride and Prejudice"}, }, @@ -137,7 +136,7 @@ func TestIndex_AddDocuments(t *testing.T) { name: "TestIndexAddDocumentsWithIntIDWithCustomClient", args: args{ UID: "TestIndexAddDocumentsWithIntIDWithCustomClient", - client: customClient, + client: customSv, documentsPtr: []map[string]interface{}{ {"BookID": float64(123), "Title": "Pride and Prejudice"}, }, @@ -162,7 +161,7 @@ func TestIndex_AddDocuments(t *testing.T) { name: "TestIndexMultipleAddDocumentsWithIntID", args: args{ UID: "TestIndexMultipleAddDocumentsWithIntID", - client: defaultClient, + client: sv, documentsPtr: []map[string]interface{}{ {"BookID": float64(1), "Title": "Alice In Wonderland"}, {"BookID": float64(123), "Title": "Pride and Prejudice"}, @@ -195,6 +194,7 @@ func TestIndex_AddDocuments(t *testing.T) { t.Cleanup(cleanup(c)) gotResp, err := i.AddDocuments(tt.args.documentsPtr) + require.NoError(t, err) require.GreaterOrEqual(t, gotResp.TaskUID, tt.resp.wantResp.TaskUID) require.Equal(t, gotResp.Status, tt.resp.wantResp.Status) require.Equal(t, gotResp.Type, tt.resp.wantResp.Type) @@ -214,9 +214,14 @@ func TestIndex_AddDocuments(t *testing.T) { } func TestIndex_AddDocumentsWithPrimaryKey(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager documentsPtr interface{} primaryKey string } @@ -234,7 +239,7 @@ func TestIndex_AddDocumentsWithPrimaryKey(t *testing.T) { name: "TestIndexBasicAddDocumentsWithPrimaryKey", args: args{ UID: "TestIndexBasicAddDocumentsWithPrimaryKey", - client: defaultClient, + client: sv, documentsPtr: []map[string]interface{}{ {"key": "123", "Name": "Pride and Prejudice"}, }, @@ -260,7 +265,7 @@ func TestIndex_AddDocumentsWithPrimaryKey(t *testing.T) { name: "TestIndexAddDocumentsWithPrimaryKeyWithCustomClient", args: args{ UID: "TestIndexAddDocumentsWithPrimaryKeyWithCustomClient", - client: customClient, + client: customSv, documentsPtr: []map[string]interface{}{ {"key": "123", "Name": "Pride and Prejudice"}, }, @@ -286,7 +291,7 @@ func TestIndex_AddDocumentsWithPrimaryKey(t *testing.T) { name: "TestIndexMultipleAddDocumentsWithPrimaryKey", args: args{ UID: "TestIndexMultipleAddDocumentsWithPrimaryKey", - client: defaultClient, + client: sv, documentsPtr: []map[string]interface{}{ {"key": "1", "Name": "Alice In Wonderland"}, {"key": "123", "Name": "Pride and Prejudice"}, @@ -316,7 +321,7 @@ func TestIndex_AddDocumentsWithPrimaryKey(t *testing.T) { name: "TestIndexAddDocumentsWithPrimaryKeyWithIntID", args: args{ UID: "TestIndexAddDocumentsWithPrimaryKeyWithIntID", - client: defaultClient, + client: sv, documentsPtr: []map[string]interface{}{ {"key": float64(123), "Name": "Pride and Prejudice"}, }, @@ -342,7 +347,7 @@ func TestIndex_AddDocumentsWithPrimaryKey(t *testing.T) { name: "TestIndexMultipleAddDocumentsWithPrimaryKeyWithIntID", args: args{ UID: "TestIndexMultipleAddDocumentsWithPrimaryKeyWithIntID", - client: defaultClient, + client: sv, documentsPtr: []map[string]interface{}{ {"key": float64(1), "Name": "Alice In Wonderland"}, {"key": float64(123), "Name": "Pride and Prejudice"}, @@ -376,6 +381,7 @@ func TestIndex_AddDocumentsWithPrimaryKey(t *testing.T) { t.Cleanup(cleanup(c)) gotResp, err := i.AddDocuments(tt.args.documentsPtr, tt.args.primaryKey) + require.NoError(t, err) require.GreaterOrEqual(t, gotResp.TaskUID, tt.resp.wantResp.TaskUID) require.Equal(t, tt.resp.wantResp.Status, gotResp.Status) require.Equal(t, tt.resp.wantResp.Type, gotResp.Type) @@ -394,16 +400,18 @@ func TestIndex_AddDocumentsWithPrimaryKey(t *testing.T) { } func TestIndex_AddDocumentsInBatches(t *testing.T) { + sv := setup(t, "") + type argsNoKey struct { UID string - client *Client + client ServiceManager documentsPtr interface{} batchSize int } type argsWithKey struct { UID string - client *Client + client ServiceManager documentsPtr interface{} batchSize int primaryKey string @@ -419,7 +427,7 @@ func TestIndex_AddDocumentsInBatches(t *testing.T) { name: "TestIndexBasicAddDocumentsInBatches", args: argsNoKey{ UID: "TestIndexBasicAddDocumentsInBatches", - client: defaultClient, + client: sv, documentsPtr: []map[string]interface{}{ {"ID": "122", "Name": "Pride and Prejudice"}, {"ID": "123", "Name": "Pride and Prejudica"}, @@ -453,7 +461,7 @@ func TestIndex_AddDocumentsInBatches(t *testing.T) { name: "TestIndexBasicAddDocumentsInBatchesWithKey", args: argsWithKey{ UID: "TestIndexBasicAddDocumentsInBatchesWithKey", - client: defaultClient, + client: sv, documentsPtr: []map[string]interface{}{ {"ID": "122", "Name": "Pride and Prejudice"}, {"ID": "123", "Name": "Pride and Prejudica"}, @@ -537,43 +545,12 @@ func TestIndex_AddDocumentsInBatches(t *testing.T) { } } -func testParseCsvDocuments(t *testing.T, documents io.Reader) []map[string]interface{} { - var ( - docs []map[string]interface{} - header []string - ) - r := csv.NewReader(documents) - for { - record, err := r.Read() - if err == io.EOF { - break - } - require.NoError(t, err) - if header == nil { - header = record - continue - } - doc := make(map[string]interface{}) - for i, key := range header { - doc[key] = record[i] - } - docs = append(docs, doc) - } - return docs -} - -var testCsvDocuments = []byte(`id,name -1,Alice In Wonderland -2,Pride and Prejudice -3,Le Petit Prince -4,The Great Gatsby -5,Don Quixote -`) +func TestIndex_AddDocumentsNdjson(t *testing.T) { + sv := setup(t, "") -func TestIndex_AddDocumentsCsv(t *testing.T) { type args struct { UID string - client *Client + client ServiceManager documents []byte } type testData struct { @@ -586,9 +563,9 @@ func TestIndex_AddDocumentsCsv(t *testing.T) { { name: "TestIndexBasic", args: args{ - UID: "csv", - client: defaultClient, - documents: testCsvDocuments, + UID: "ndjson", + client: sv, + documents: testNdjsonDocuments, }, wantResp: &TaskInfo{ TaskUID: 0, @@ -598,8 +575,8 @@ func TestIndex_AddDocumentsCsv(t *testing.T) { }, } - testAddDocumentsCsv := func(t *testing.T, tt testData, testReader bool) { - name := tt.name + "AddDocumentsCsv" + testAddDocumentsNdjson := func(t *testing.T, tt testData, testReader bool) { + name := tt.name + "AddDocumentsNdjson" if testReader { name += "FromReader" } @@ -616,7 +593,7 @@ func TestIndex_AddDocumentsCsv(t *testing.T) { i := c.Index(uid) t.Cleanup(cleanup(c)) - wantDocs := testParseCsvDocuments(t, bytes.NewReader(tt.args.documents)) + wantDocs := testParseNdjsonDocuments(t, bytes.NewReader(tt.args.documents)) var ( gotResp *TaskInfo @@ -624,9 +601,9 @@ func TestIndex_AddDocumentsCsv(t *testing.T) { ) if testReader { - gotResp, err = i.AddDocumentsCsvFromReader(bytes.NewReader(tt.args.documents), nil) + gotResp, err = i.AddDocumentsNdjsonFromReader(bytes.NewReader(tt.args.documents)) } else { - gotResp, err = i.AddDocumentsCsv(tt.args.documents, nil) + gotResp, err = i.AddDocumentsNdjson(tt.args.documents) } require.NoError(t, err) @@ -646,81 +623,61 @@ func TestIndex_AddDocumentsCsv(t *testing.T) { for _, tt := range tests { // Test both the string and io.Reader receiving versions - testAddDocumentsCsv(t, tt, false) - testAddDocumentsCsv(t, tt, true) + testAddDocumentsNdjson(t, tt, false) + testAddDocumentsNdjson(t, tt, true) } } -func TestIndex_AddDocumentsCsvWithOptions(t *testing.T) { +func TestIndex_AddDocumentsCsvInBatches(t *testing.T) { + sv := setup(t, "") + type args struct { UID string - client *Client + client ServiceManager + batchSize int documents []byte - options *CsvDocumentsQuery } type testData struct { name string args args - wantResp *TaskInfo + wantResp []TaskInfo } tests := []testData{ { - name: "TestIndexBasicAddDocumentsCsvWithOptions", + name: "TestIndexBasic", args: args{ - UID: "csv", - client: defaultClient, + UID: "csvbatch", + client: sv, + batchSize: 2, documents: testCsvDocuments, - options: &CsvDocumentsQuery{ - PrimaryKey: "id", - CsvDelimiter: ",", - }, - }, - wantResp: &TaskInfo{ - TaskUID: 0, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, }, - }, - { - name: "TestIndexBasicAddDocumentsCsvWithPrimaryKey", - args: args{ - UID: "csv", - client: defaultClient, - documents: testCsvDocuments, - options: &CsvDocumentsQuery{ - PrimaryKey: "id", + wantResp: []TaskInfo{ + { + TaskUID: 0, + Status: "enqueued", + Type: TaskTypeDocumentAdditionOrUpdate, }, - }, - wantResp: &TaskInfo{ - TaskUID: 0, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - { - name: "TestIndexBasicAddDocumentsCsvWithCsvDelimiter", - args: args{ - UID: "csv", - client: defaultClient, - documents: testCsvDocuments, - options: &CsvDocumentsQuery{ - CsvDelimiter: ",", + { + TaskUID: 1, + Status: "enqueued", + Type: TaskTypeDocumentAdditionOrUpdate, + }, + { + TaskUID: 2, + Status: "enqueued", + Type: TaskTypeDocumentAdditionOrUpdate, }, - }, - wantResp: &TaskInfo{ - TaskUID: 0, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, }, }, } - testAddDocumentsCsv := func(t *testing.T, tt testData, testReader bool) { + testAddDocumentsCsvInBatches := func(t *testing.T, tt testData, testReader bool) { name := tt.name + "AddDocumentsCsv" if testReader { name += "FromReader" } + name += "InBatches" uid := tt.args.UID if testReader { @@ -737,23 +694,25 @@ func TestIndex_AddDocumentsCsvWithOptions(t *testing.T) { wantDocs := testParseCsvDocuments(t, bytes.NewReader(tt.args.documents)) var ( - gotResp *TaskInfo + gotResp []TaskInfo err error ) if testReader { - gotResp, err = i.AddDocumentsCsvFromReader(bytes.NewReader(tt.args.documents), tt.args.options) + gotResp, err = i.AddDocumentsCsvFromReaderInBatches(bytes.NewReader(tt.args.documents), tt.args.batchSize, nil) } else { - gotResp, err = i.AddDocumentsCsv(tt.args.documents, tt.args.options) + gotResp, err = i.AddDocumentsCsvInBatches(tt.args.documents, tt.args.batchSize, nil) } require.NoError(t, err) - require.GreaterOrEqual(t, gotResp.TaskUID, tt.wantResp.TaskUID) - require.Equal(t, tt.wantResp.Status, gotResp.Status) - require.Equal(t, tt.wantResp.Type, gotResp.Type) - require.NotZero(t, gotResp.EnqueuedAt) + for i := 0; i < 2; i++ { + require.GreaterOrEqual(t, gotResp[i].TaskUID, tt.wantResp[i].TaskUID) + require.Equal(t, gotResp[i].Status, tt.wantResp[i].Status) + require.Equal(t, gotResp[i].Type, tt.wantResp[i].Type) + require.NotZero(t, gotResp[i].EnqueuedAt) + } - testWaitForTask(t, i, gotResp) + testWaitForBatchTask(t, i, gotResp) var documents DocumentsResult err = i.GetDocuments(&DocumentsQuery{}, &documents) @@ -764,59 +723,46 @@ func TestIndex_AddDocumentsCsvWithOptions(t *testing.T) { for _, tt := range tests { // Test both the string and io.Reader receiving versions - testAddDocumentsCsv(t, tt, false) - testAddDocumentsCsv(t, tt, true) + testAddDocumentsCsvInBatches(t, tt, false) + testAddDocumentsCsvInBatches(t, tt, true) } } -func TestIndex_AddDocumentsCsvInBatches(t *testing.T) { +func TestIndex_AddDocumentsCsv(t *testing.T) { + sv := setup(t, "") + type args struct { UID string - client *Client - batchSize int + client ServiceManager documents []byte } type testData struct { name string args args - wantResp []TaskInfo + wantResp *TaskInfo } tests := []testData{ { name: "TestIndexBasic", args: args{ - UID: "csvbatch", - client: defaultClient, - batchSize: 2, + UID: "csv", + client: sv, documents: testCsvDocuments, }, - wantResp: []TaskInfo{ - { - TaskUID: 0, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - { - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - { - TaskUID: 2, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, + wantResp: &TaskInfo{ + TaskUID: 0, + Status: "enqueued", + Type: TaskTypeDocumentAdditionOrUpdate, }, }, } - testAddDocumentsCsvInBatches := func(t *testing.T, tt testData, testReader bool) { + testAddDocumentsCsv := func(t *testing.T, tt testData, testReader bool) { name := tt.name + "AddDocumentsCsv" if testReader { name += "FromReader" } - name += "InBatches" uid := tt.args.UID if testReader { @@ -833,25 +779,23 @@ func TestIndex_AddDocumentsCsvInBatches(t *testing.T) { wantDocs := testParseCsvDocuments(t, bytes.NewReader(tt.args.documents)) var ( - gotResp []TaskInfo + gotResp *TaskInfo err error ) if testReader { - gotResp, err = i.AddDocumentsCsvFromReaderInBatches(bytes.NewReader(tt.args.documents), tt.args.batchSize, nil) + gotResp, err = i.AddDocumentsCsvFromReader(bytes.NewReader(tt.args.documents), nil) } else { - gotResp, err = i.AddDocumentsCsvInBatches(tt.args.documents, tt.args.batchSize, nil) + gotResp, err = i.AddDocumentsCsv(tt.args.documents, nil) } require.NoError(t, err) - for i := 0; i < 2; i++ { - require.GreaterOrEqual(t, gotResp[i].TaskUID, tt.wantResp[i].TaskUID) - require.Equal(t, gotResp[i].Status, tt.wantResp[i].Status) - require.Equal(t, gotResp[i].Type, tt.wantResp[i].Type) - require.NotZero(t, gotResp[i].EnqueuedAt) - } + require.GreaterOrEqual(t, gotResp.TaskUID, tt.wantResp.TaskUID) + require.Equal(t, tt.wantResp.Status, gotResp.Status) + require.Equal(t, tt.wantResp.Type, gotResp.Type) + require.NotZero(t, gotResp.EnqueuedAt) - testWaitForBatchTask(t, i, gotResp) + testWaitForTask(t, i, gotResp) var documents DocumentsResult err = i.GetDocuments(&DocumentsQuery{}, &documents) @@ -862,40 +806,19 @@ func TestIndex_AddDocumentsCsvInBatches(t *testing.T) { for _, tt := range tests { // Test both the string and io.Reader receiving versions - testAddDocumentsCsvInBatches(t, tt, false) - testAddDocumentsCsvInBatches(t, tt, true) - } -} - -func testParseNdjsonDocuments(t *testing.T, documents io.Reader) []map[string]interface{} { - var docs []map[string]interface{} - scanner := bufio.NewScanner(documents) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - if line == "" { - continue - } - doc := make(map[string]interface{}) - err := json.Unmarshal([]byte(line), &doc) - require.NoError(t, err) - docs = append(docs, doc) + testAddDocumentsCsv(t, tt, false) + testAddDocumentsCsv(t, tt, true) } - require.NoError(t, scanner.Err()) - return docs } -var testNdjsonDocuments = []byte(`{"id": 1, "name": "Alice In Wonderland"} -{"id": 2, "name": "Pride and Prejudice"} -{"id": 3, "name": "Le Petit Prince"} -{"id": 4, "name": "The Great Gatsby"} -{"id": 5, "name": "Don Quixote"} -`) +func TestIndex_AddDocumentsCsvWithOptions(t *testing.T) { + sv := setup(t, "") -func TestIndex_AddDocumentsNdjson(t *testing.T) { type args struct { UID string - client *Client + client ServiceManager documents []byte + options *CsvDocumentsQuery } type testData struct { name string @@ -905,11 +828,47 @@ func TestIndex_AddDocumentsNdjson(t *testing.T) { tests := []testData{ { - name: "TestIndexBasic", + name: "TestIndexBasicAddDocumentsCsvWithOptions", args: args{ - UID: "ndjson", - client: defaultClient, - documents: testNdjsonDocuments, + UID: "csv", + client: sv, + documents: testCsvDocuments, + options: &CsvDocumentsQuery{ + PrimaryKey: "id", + CsvDelimiter: ",", + }, + }, + wantResp: &TaskInfo{ + TaskUID: 0, + Status: "enqueued", + Type: TaskTypeDocumentAdditionOrUpdate, + }, + }, + { + name: "TestIndexBasicAddDocumentsCsvWithPrimaryKey", + args: args{ + UID: "csv", + client: sv, + documents: testCsvDocuments, + options: &CsvDocumentsQuery{ + PrimaryKey: "id", + }, + }, + wantResp: &TaskInfo{ + TaskUID: 0, + Status: "enqueued", + Type: TaskTypeDocumentAdditionOrUpdate, + }, + }, + { + name: "TestIndexBasicAddDocumentsCsvWithCsvDelimiter", + args: args{ + UID: "csv", + client: sv, + documents: testCsvDocuments, + options: &CsvDocumentsQuery{ + CsvDelimiter: ",", + }, }, wantResp: &TaskInfo{ TaskUID: 0, @@ -919,8 +878,8 @@ func TestIndex_AddDocumentsNdjson(t *testing.T) { }, } - testAddDocumentsNdjson := func(t *testing.T, tt testData, testReader bool) { - name := tt.name + "AddDocumentsNdjson" + testAddDocumentsCsv := func(t *testing.T, tt testData, testReader bool) { + name := tt.name + "AddDocumentsCsv" if testReader { name += "FromReader" } @@ -937,7 +896,7 @@ func TestIndex_AddDocumentsNdjson(t *testing.T) { i := c.Index(uid) t.Cleanup(cleanup(c)) - wantDocs := testParseNdjsonDocuments(t, bytes.NewReader(tt.args.documents)) + wantDocs := testParseCsvDocuments(t, bytes.NewReader(tt.args.documents)) var ( gotResp *TaskInfo @@ -945,9 +904,9 @@ func TestIndex_AddDocumentsNdjson(t *testing.T) { ) if testReader { - gotResp, err = i.AddDocumentsNdjsonFromReader(bytes.NewReader(tt.args.documents)) + gotResp, err = i.AddDocumentsCsvFromReader(bytes.NewReader(tt.args.documents), tt.args.options) } else { - gotResp, err = i.AddDocumentsNdjson(tt.args.documents) + gotResp, err = i.AddDocumentsCsv(tt.args.documents, tt.args.options) } require.NoError(t, err) @@ -967,15 +926,17 @@ func TestIndex_AddDocumentsNdjson(t *testing.T) { for _, tt := range tests { // Test both the string and io.Reader receiving versions - testAddDocumentsNdjson(t, tt, false) - testAddDocumentsNdjson(t, tt, true) + testAddDocumentsCsv(t, tt, false) + testAddDocumentsCsv(t, tt, true) } } func TestIndex_AddDocumentsNdjsonInBatches(t *testing.T) { + sv := setup(t, "") + type args struct { UID string - client *Client + client ServiceManager batchSize int documents []byte } @@ -990,7 +951,7 @@ func TestIndex_AddDocumentsNdjsonInBatches(t *testing.T) { name: "TestIndexBasic", args: args{ UID: "ndjsonbatch", - client: defaultClient, + client: sv, batchSize: 2, documents: testNdjsonDocuments, }, @@ -1071,9 +1032,14 @@ func TestIndex_AddDocumentsNdjsonInBatches(t *testing.T) { } func TestIndex_DeleteAllDocuments(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -1084,7 +1050,7 @@ func TestIndex_DeleteAllDocuments(t *testing.T) { name: "TestIndexBasicDeleteAllDocuments", args: args{ UID: "TestIndexBasicDeleteAllDocuments", - client: defaultClient, + client: sv, }, wantResp: &TaskInfo{ TaskUID: 1, @@ -1096,7 +1062,7 @@ func TestIndex_DeleteAllDocuments(t *testing.T) { name: "TestIndexDeleteAllDocumentsWithCustomClient", args: args{ UID: "TestIndexDeleteAllDocumentsWithCustomClient", - client: customClient, + client: customSv, }, wantResp: &TaskInfo{ TaskUID: 2, @@ -1111,7 +1077,7 @@ func TestIndex_DeleteAllDocuments(t *testing.T) { i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) - SetUpBasicIndex(tt.args.UID) + setUpBasicIndex(tt.args.client, tt.args.UID) gotResp, err := i.DeleteAllDocuments() require.NoError(t, err) require.GreaterOrEqual(t, gotResp.TaskUID, tt.wantResp.TaskUID) @@ -1130,10 +1096,15 @@ func TestIndex_DeleteAllDocuments(t *testing.T) { } func TestIndex_DeleteOneDocument(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string PrimaryKey string - client *Client + client ServiceManager identifier string documentsPtr interface{} } @@ -1146,7 +1117,7 @@ func TestIndex_DeleteOneDocument(t *testing.T) { name: "TestIndexBasicDeleteOneDocument", args: args{ UID: "1", - client: defaultClient, + client: sv, identifier: "123", documentsPtr: []map[string]interface{}{ {"ID": "123", "Name": "Pride and Prejudice"}, @@ -1162,7 +1133,7 @@ func TestIndex_DeleteOneDocument(t *testing.T) { name: "TestIndexDeleteOneDocumentWithCustomClient", args: args{ UID: "2", - client: customClient, + client: customSv, identifier: "123", documentsPtr: []map[string]interface{}{ {"ID": "123", "Name": "Pride and Prejudice"}, @@ -1178,7 +1149,7 @@ func TestIndex_DeleteOneDocument(t *testing.T) { name: "TestIndexDeleteOneDocumentinMultiple", args: args{ UID: "3", - client: defaultClient, + client: sv, identifier: "456", documentsPtr: []map[string]interface{}{ {"ID": "123", "Name": "Pride and Prejudice"}, @@ -1196,7 +1167,7 @@ func TestIndex_DeleteOneDocument(t *testing.T) { name: "TestIndexBasicDeleteOneDocumentWithIntID", args: args{ UID: "4", - client: defaultClient, + client: sv, identifier: "123", documentsPtr: []map[string]interface{}{ {"BookID": 123, "Title": "Pride and Prejudice"}, @@ -1212,7 +1183,7 @@ func TestIndex_DeleteOneDocument(t *testing.T) { name: "TestIndexDeleteOneDocumentWithIntIDWithCustomClient", args: args{ UID: "5", - client: customClient, + client: customSv, identifier: "123", documentsPtr: []map[string]interface{}{ {"BookID": 123, "Title": "Pride and Prejudice"}, @@ -1228,7 +1199,7 @@ func TestIndex_DeleteOneDocument(t *testing.T) { name: "TestIndexDeleteOneDocumentWithIntIDinMultiple", args: args{ UID: "6", - client: defaultClient, + client: sv, identifier: "456", documentsPtr: []map[string]interface{}{ {"BookID": 123, "Title": "Pride and Prejudice"}, @@ -1273,9 +1244,14 @@ func TestIndex_DeleteOneDocument(t *testing.T) { } func TestIndex_DeleteDocuments(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager identifier []string documentsPtr []docTest } @@ -1288,7 +1264,7 @@ func TestIndex_DeleteDocuments(t *testing.T) { name: "TestIndexBasicDeleteDocuments", args: args{ UID: "1", - client: defaultClient, + client: sv, identifier: []string{"123"}, documentsPtr: []docTest{ {ID: "123", Name: "Pride and Prejudice"}, @@ -1304,7 +1280,7 @@ func TestIndex_DeleteDocuments(t *testing.T) { name: "TestIndexDeleteDocumentsWithCustomClient", args: args{ UID: "2", - client: customClient, + client: customSv, identifier: []string{"123"}, documentsPtr: []docTest{ {ID: "123", Name: "Pride and Prejudice"}, @@ -1320,7 +1296,7 @@ func TestIndex_DeleteDocuments(t *testing.T) { name: "TestIndexDeleteOneDocumentOnMultiple", args: args{ UID: "3", - client: defaultClient, + client: sv, identifier: []string{"123"}, documentsPtr: []docTest{ {ID: "123", Name: "Pride and Prejudice"}, @@ -1338,7 +1314,7 @@ func TestIndex_DeleteDocuments(t *testing.T) { name: "TestIndexDeleteMultipleDocuments", args: args{ UID: "4", - client: defaultClient, + client: sv, identifier: []string{"123", "456", "1"}, documentsPtr: []docTest{ {ID: "123", Name: "Pride and Prejudice"}, @@ -1384,9 +1360,14 @@ func TestIndex_DeleteDocuments(t *testing.T) { } func TestIndex_DeleteDocumentsByFilter(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager filterToDelete interface{} filterToApply []string documentsPtr []docTestBooks @@ -1400,7 +1381,7 @@ func TestIndex_DeleteDocumentsByFilter(t *testing.T) { name: "TestIndexDeleteDocumentsByFilterString", args: args{ UID: "1", - client: defaultClient, + client: sv, filterToApply: []string{"book_id"}, filterToDelete: "book_id = 123", documentsPtr: []docTestBooks{ @@ -1417,7 +1398,7 @@ func TestIndex_DeleteDocumentsByFilter(t *testing.T) { name: "TestIndexDeleteMultipleDocumentsByFilterArrayOfString", args: args{ UID: "1", - client: customClient, + client: customSv, filterToApply: []string{"tag"}, filterToDelete: []string{"tag = 'Epic fantasy'"}, documentsPtr: []docTestBooks{ @@ -1436,7 +1417,7 @@ func TestIndex_DeleteDocumentsByFilter(t *testing.T) { name: "TestIndexDeleteMultipleDocumentsAndMultipleFiltersWithArrayOfString", args: args{ UID: "1", - client: customClient, + client: customSv, filterToApply: []string{"tag", "year"}, filterToDelete: []string{"tag = 'Epic fantasy'", "year > 1936"}, documentsPtr: []docTestBooks{ @@ -1455,7 +1436,7 @@ func TestIndex_DeleteDocumentsByFilter(t *testing.T) { name: "TestIndexDeleteMultipleDocumentsAndMultipleFiltersWithInterface", args: args{ UID: "1", - client: customClient, + client: customSv, filterToApply: []string{"book_id", "tag"}, filterToDelete: []interface{}{[]string{"tag = 'Epic fantasy'", "book_id = 123"}}, documentsPtr: []docTestBooks{ @@ -1505,1235 +1486,3 @@ func TestIndex_DeleteDocumentsByFilter(t *testing.T) { }) } } - -func TestIndex_GetDocument(t *testing.T) { - type args struct { - UID string - client *Client - identifier string - request *DocumentQuery - documentPtr *docTestBooks - } - tests := []struct { - name string - args args - wantErr bool - }{ - { - name: "TestIndexBasicGetDocument", - args: args{ - UID: "TestIndexBasicGetDocument", - client: defaultClient, - identifier: "123", - request: nil, - documentPtr: &docTestBooks{}, - }, - wantErr: false, - }, - { - name: "TestIndexGetDocumentWithCustomClient", - args: args{ - UID: "TestIndexGetDocumentWithCustomClient", - client: customClient, - identifier: "123", - request: nil, - documentPtr: &docTestBooks{}, - }, - wantErr: false, - }, - { - name: "TestIndexGetDocumentWithNoExistingDocument", - args: args{ - UID: "TestIndexGetDocumentWithNoExistingDocument", - client: defaultClient, - identifier: "125", - request: nil, - documentPtr: &docTestBooks{}, - }, - wantErr: true, - }, - { - name: "TestIndexGetDocumentWithEmptyParameters", - args: args{ - UID: "TestIndexGetDocumentWithEmptyParameters", - client: defaultClient, - identifier: "125", - request: &DocumentQuery{}, - documentPtr: &docTestBooks{}, - }, - wantErr: true, - }, - { - name: "TestIndexGetDocumentWithParametersFields", - args: args{ - UID: "TestIndexGetDocumentWithParametersFields", - client: defaultClient, - identifier: "125", - request: &DocumentQuery{ - Fields: []string{"book_id", "title"}, - }, - documentPtr: &docTestBooks{}, - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - i := c.Index(tt.args.UID) - t.Cleanup(cleanup(c)) - SetUpBasicIndex(tt.args.UID) - - require.Empty(t, tt.args.documentPtr) - err := i.GetDocument(tt.args.identifier, tt.args.request, tt.args.documentPtr) - if tt.wantErr { - require.Error(t, err) - require.Empty(t, tt.args.documentPtr) - } else { - require.NoError(t, err) - require.NotEmpty(t, tt.args.documentPtr) - require.Equal(t, strconv.Itoa(tt.args.documentPtr.BookID), tt.args.identifier) - } - }) - } -} - -func TestIndex_GetDocuments(t *testing.T) { - type args struct { - UID string - client *Client - request *DocumentsQuery - resp *DocumentsResult - filter []string - } - tests := []struct { - name string - args args - result int64 - }{ - { - name: "TestIndexBasicGetDocuments", - args: args{ - client: defaultClient, - request: nil, - resp: &DocumentsResult{}, - }, - result: 20, - }, - { - name: "TestIndexGetDocumentsWithCustomClient", - args: args{ - client: customClient, - request: nil, - resp: &DocumentsResult{}, - }, - result: 20, - }, - { - name: "TestIndexGetDocumentsWithEmptyStruct", - args: args{ - client: defaultClient, - request: &DocumentsQuery{}, - resp: &DocumentsResult{}, - }, - result: 20, - }, - { - name: "TestIndexGetDocumentsWithLimit", - args: args{ - client: defaultClient, - request: &DocumentsQuery{ - Limit: 3, - }, - resp: &DocumentsResult{}, - }, - result: 3, - }, - { - name: "TestIndexGetDocumentsWithFields", - args: args{ - client: defaultClient, - request: &DocumentsQuery{ - Fields: []string{"title"}, - }, - resp: &DocumentsResult{}, - }, - result: 20, - }, - { - name: "TestIndexGetDocumentsWithFilterAsString", - args: args{ - client: defaultClient, - request: &DocumentsQuery{ - Filter: "book_id = 123", - }, - resp: &DocumentsResult{}, - filter: []string{ - "book_id", - }, - }, - result: 1, - }, - { - name: "TestIndexGetDocumentsWithFilterAsArray", - args: args{ - client: defaultClient, - request: &DocumentsQuery{ - Filter: []string{"tag = Tragedy"}, - }, - resp: &DocumentsResult{}, - filter: []string{ - "tag", - }, - }, - result: 3, - }, - { - name: "TestIndexGetDocumentsWithMultipleFilterWithArrayOfString", - args: args{ - client: defaultClient, - request: &DocumentsQuery{ - Filter: []string{"tag = Tragedy", "book_id = 742"}, - }, - resp: &DocumentsResult{}, - filter: []string{ - "tag", - "book_id", - }, - }, - result: 1, - }, - { - name: "TestIndexGetDocumentsWithMultipleFilterWithInterface", - args: args{ - client: defaultClient, - request: &DocumentsQuery{ - Filter: []interface{}{[]string{"tag = Tragedy", "book_id = 123"}}, - }, - resp: &DocumentsResult{}, - filter: []string{ - "tag", - "book_id", - }, - }, - result: 4, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - i := c.Index("indexUID") - t.Cleanup(cleanup(c)) - SetUpIndexForFaceting() - - if tt.args.request != nil && tt.args.request.Filter != nil { - gotTask, err := i.UpdateFilterableAttributes(&tt.args.filter) - require.NoError(t, err) - testWaitForTask(t, i, gotTask) - } - - err := i.GetDocuments(tt.args.request, tt.args.resp) - require.NoError(t, err) - if tt.args.request != nil && tt.args.request.Limit != 0 { - require.Equal(t, tt.args.request.Limit, int64(len(tt.args.resp.Results))) - } - require.Equal(t, tt.result, int64(len(tt.args.resp.Results))) - }) - } -} - -func TestIndex_UpdateDocuments(t *testing.T) { - type args struct { - UID string - client *Client - documentsPtr []docTestBooks - } - tests := []struct { - name string - args args - want *Task - }{ - { - name: "TestIndexBasicUpdateDocument", - args: args{ - UID: "TestIndexBasicUpdateDocument", - client: defaultClient, - documentsPtr: []docTestBooks{ - {BookID: 123, Title: "One Hundred Years of Solitude"}, - }, - }, - want: &Task{ - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - { - name: "TestIndexUpdateDocumentWithCustomClient", - args: args{ - UID: "TestIndexUpdateDocumentWithCustomClient", - client: defaultClient, - documentsPtr: []docTestBooks{ - {BookID: 123, Title: "One Hundred Years of Solitude"}, - }, - }, - want: &Task{ - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - { - name: "TestIndexUpdateDocumentOnMultipleDocuments", - args: args{ - UID: "TestIndexUpdateDocumentOnMultipleDocuments", - client: defaultClient, - documentsPtr: []docTestBooks{ - {BookID: 123, Title: "One Hundred Years of Solitude"}, - {BookID: 1344, Title: "Harry Potter and the Half-Blood Prince"}, - {BookID: 4, Title: "The Hobbit"}, - {BookID: 42, Title: "The Great Gatsby"}, - }, - }, - want: &Task{ - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - { - name: "TestIndexUpdateDocumentWithNoExistingDocument", - args: args{ - UID: "TestIndexUpdateDocumentWithNoExistingDocument", - client: defaultClient, - documentsPtr: []docTestBooks{ - {BookID: 237, Title: "One Hundred Years of Solitude"}, - }, - }, - want: &Task{ - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - { - name: "TestIndexUpdateDocumentWithNoExistingMultipleDocuments", - args: args{ - UID: "TestIndexUpdateDocumentWithNoExistingMultipleDocuments", - client: defaultClient, - documentsPtr: []docTestBooks{ - {BookID: 246, Title: "One Hundred Years of Solitude"}, - {BookID: 834, Title: "To Kill a Mockingbird"}, - {BookID: 44, Title: "Don Quixote"}, - {BookID: 594, Title: "The Great Gatsby"}, - }, - }, - want: &Task{ - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - i := c.Index(tt.args.UID) - t.Cleanup(cleanup(c)) - SetUpBasicIndex(tt.args.UID) - - got, err := i.UpdateDocuments(tt.args.documentsPtr) - require.NoError(t, err) - require.GreaterOrEqual(t, got.TaskUID, tt.want.TaskUID) - require.Equal(t, got.Status, tt.want.Status) - require.Equal(t, got.Type, tt.want.Type) - require.NotZero(t, got.EnqueuedAt) - - testWaitForTask(t, i, got) - - var document docTestBooks - for _, identifier := range tt.args.documentsPtr { - err = i.GetDocument(strconv.Itoa(identifier.BookID), nil, &document) - require.NoError(t, err) - require.Equal(t, identifier.BookID, document.BookID) - require.Equal(t, identifier.Title, document.Title) - } - }) - } -} - -func TestIndex_UpdateDocumentsWithPrimaryKey(t *testing.T) { - type args struct { - UID string - client *Client - documentsPtr []docTestBooks - primaryKey string - } - tests := []struct { - name string - args args - want *Task - }{ - { - name: "TestIndexBasicUpdateDocumentsWithPrimaryKey", - args: args{ - UID: "TestIndexBasicUpdateDocumentsWithPrimaryKey", - client: defaultClient, - documentsPtr: []docTestBooks{ - {BookID: 123, Title: "One Hundred Years of Solitude"}, - }, - primaryKey: "book_id", - }, - want: &Task{ - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - { - name: "TestIndexUpdateDocumentsWithPrimaryKeyWithCustomClient", - args: args{ - UID: "TestIndexUpdateDocumentsWithPrimaryKeyWithCustomClient", - client: defaultClient, - documentsPtr: []docTestBooks{ - {BookID: 123, Title: "One Hundred Years of Solitude"}, - }, - primaryKey: "book_id", - }, - want: &Task{ - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - { - name: "TestIndexUpdateDocumentsWithPrimaryKeyOnMultipleDocuments", - args: args{ - UID: "TestIndexUpdateDocumentsWithPrimaryKeyOnMultipleDocuments", - client: defaultClient, - documentsPtr: []docTestBooks{ - {BookID: 123, Title: "One Hundred Years of Solitude"}, - {BookID: 1344, Title: "Harry Potter and the Half-Blood Prince"}, - {BookID: 4, Title: "The Hobbit"}, - {BookID: 42, Title: "The Great Gatsby"}, - }, - primaryKey: "book_id", - }, - want: &Task{ - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - { - name: "TestIndexUpdateDocumentsWithPrimaryKeyWithNoExistingDocument", - args: args{ - UID: "TestIndexUpdateDocumentsWithPrimaryKeyWithNoExistingDocument", - client: defaultClient, - documentsPtr: []docTestBooks{ - {BookID: 237, Title: "One Hundred Years of Solitude"}, - }, - primaryKey: "book_id", - }, - want: &Task{ - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - { - name: "TestIndexUpdateDocumentsWithPrimaryKeyWithNoExistingMultipleDocuments", - args: args{ - UID: "TestIndexUpdateDocumentsWithPrimaryKeyWithNoExistingMultipleDocuments", - client: defaultClient, - documentsPtr: []docTestBooks{ - {BookID: 246, Title: "One Hundred Years of Solitude"}, - {BookID: 834, Title: "To Kill a Mockingbird"}, - {BookID: 44, Title: "Don Quixote"}, - {BookID: 594, Title: "The Great Gatsby"}, - }, - primaryKey: "book_id", - }, - want: &Task{ - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - i := c.Index(tt.args.UID) - t.Cleanup(cleanup(c)) - SetUpBasicIndex(tt.args.UID) - - got, err := i.UpdateDocuments(tt.args.documentsPtr, tt.args.primaryKey) - require.NoError(t, err) - require.GreaterOrEqual(t, got.TaskUID, tt.want.TaskUID) - require.Equal(t, got.Status, tt.want.Status) - require.Equal(t, got.Type, tt.want.Type) - require.NotZero(t, got.EnqueuedAt) - - testWaitForTask(t, i, got) - - var document docTestBooks - for _, identifier := range tt.args.documentsPtr { - err = i.GetDocument(strconv.Itoa(identifier.BookID), nil, &document) - require.NoError(t, err) - require.Equal(t, identifier.BookID, document.BookID) - require.Equal(t, identifier.Title, document.Title) - } - }) - } -} - -func TestIndex_UpdateDocumentsInBatches(t *testing.T) { - type argsNoKey struct { - UID string - client *Client - documentsPtr []docTestBooks - batchSize int - } - - type argsWithKey struct { - UID string - client *Client - documentsPtr []docTestBooks - batchSize int - primaryKey string - } - - testsNoKey := []struct { - name string - args argsNoKey - want []TaskInfo - }{ - { - name: "TestIndexBatchUpdateDocuments", - args: argsNoKey{ - UID: "TestIndexBatchUpdateDocuments", - client: defaultClient, - documentsPtr: []docTestBooks{ - {BookID: 123, Title: "One Hundred Years of Solitude"}, - {BookID: 124, Title: "One Hundred Years of Solitude 2"}, - }, - batchSize: 1, - }, - want: []TaskInfo{ - { - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - { - TaskUID: 2, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - }, - } - - testsWithKey := []struct { - name string - args argsWithKey - want []TaskInfo - }{ - { - name: "TestIndexBatchUpdateDocuments", - args: argsWithKey{ - UID: "TestIndexBatchUpdateDocuments", - client: defaultClient, - documentsPtr: []docTestBooks{ - {BookID: 123, Title: "One Hundred Years of Solitude"}, - {BookID: 124, Title: "One Hundred Years of Solitude 2"}, - }, - batchSize: 1, - primaryKey: "book_id", - }, - want: []TaskInfo{ - { - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - { - TaskUID: 2, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - }, - } - - for _, tt := range testsNoKey { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - i := c.Index(tt.args.UID) - t.Cleanup(cleanup(c)) - SetUpBasicIndex(tt.args.UID) - - got, err := i.UpdateDocumentsInBatches(tt.args.documentsPtr, tt.args.batchSize) - require.NoError(t, err) - for i := 0; i < 2; i++ { - require.GreaterOrEqual(t, got[i].TaskUID, tt.want[i].TaskUID) - require.Equal(t, got[i].Status, tt.want[i].Status) - require.Equal(t, got[i].Type, tt.want[i].Type) - require.NotZero(t, got[i].EnqueuedAt) - } - - testWaitForBatchTask(t, i, got) - - var document docTestBooks - for _, identifier := range tt.args.documentsPtr { - err = i.GetDocument(strconv.Itoa(identifier.BookID), nil, &document) - require.NoError(t, err) - require.Equal(t, identifier.BookID, document.BookID) - require.Equal(t, identifier.Title, document.Title) - } - }) - } - - for _, tt := range testsWithKey { - t.Run(tt.name, func(t *testing.T) { - c := tt.args.client - i := c.Index(tt.args.UID) - t.Cleanup(cleanup(c)) - SetUpBasicIndex(tt.args.UID) - - got, err := i.UpdateDocumentsInBatches(tt.args.documentsPtr, tt.args.batchSize, tt.args.primaryKey) - require.NoError(t, err) - for i := 0; i < 2; i++ { - require.GreaterOrEqual(t, got[i].TaskUID, tt.want[i].TaskUID) - require.Equal(t, got[i].Status, tt.want[i].Status) - require.Equal(t, got[i].Type, tt.want[i].Type) - require.NotZero(t, got[i].EnqueuedAt) - } - - testWaitForBatchTask(t, i, got) - - var document docTestBooks - for _, identifier := range tt.args.documentsPtr { - err = i.GetDocument(strconv.Itoa(identifier.BookID), nil, &document) - require.NoError(t, err) - require.Equal(t, identifier.BookID, document.BookID) - require.Equal(t, identifier.Title, document.Title) - } - }) - } -} - -func TestIndex_UpdateDocumentsCsv(t *testing.T) { - type args struct { - UID string - client *Client - documents []byte - } - type testData struct { - name string - args args - wantResp *TaskInfo - } - - tests := []testData{ - { - name: "TestIndexBasic", - args: args{ - UID: "csv", - client: defaultClient, - documents: testCsvDocuments, - }, - wantResp: &TaskInfo{ - TaskUID: 0, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - } - - testUpdateDocumentsCsv := func(t *testing.T, tt testData, testReader bool) { - name := tt.name + "UpdateDocumentsCsv" - if testReader { - name += "FromReader" - } - - uid := tt.args.UID - if testReader { - uid += "-reader" - } else { - uid += "-string" - } - - t.Run(name, func(t *testing.T) { - c := tt.args.client - i := c.Index(uid) - t.Cleanup(cleanup(c)) - - wantDocs := testParseCsvDocuments(t, bytes.NewReader(tt.args.documents)) - - var ( - gotResp *TaskInfo - err error - ) - - if testReader { - gotResp, err = i.UpdateDocumentsCsvFromReader(bytes.NewReader(tt.args.documents), nil) - } else { - gotResp, err = i.UpdateDocumentsCsv(tt.args.documents, nil) - } - - require.NoError(t, err) - require.GreaterOrEqual(t, gotResp.TaskUID, tt.wantResp.TaskUID) - require.Equal(t, tt.wantResp.Status, gotResp.Status) - require.Equal(t, tt.wantResp.Type, gotResp.Type) - require.NotZero(t, gotResp.EnqueuedAt) - - testWaitForTask(t, i, gotResp) - - var documents DocumentsResult - err = i.GetDocuments(&DocumentsQuery{}, &documents) - require.NoError(t, err) - require.Equal(t, wantDocs, documents.Results) - }) - } - - for _, tt := range tests { - // Test both the string and io.Reader receiving versions - testUpdateDocumentsCsv(t, tt, false) - testUpdateDocumentsCsv(t, tt, true) - } -} - -func TestIndex_UpdateDocumentsCsvWithOptions(t *testing.T) { - type args struct { - UID string - client *Client - documents []byte - options *CsvDocumentsQuery - } - type testData struct { - name string - args args - wantResp *TaskInfo - } - - tests := []testData{ - { - name: "TestIndexBasicUpdateDocumentsCsvWithOptions", - args: args{ - UID: "csv", - client: defaultClient, - documents: testCsvDocuments, - options: &CsvDocumentsQuery{ - PrimaryKey: "id", - CsvDelimiter: ",", - }, - }, - wantResp: &TaskInfo{ - TaskUID: 0, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - { - name: "TestIndexBasicUpdateDocumentsCsvWithPrimaryKey", - args: args{ - UID: "csv", - client: defaultClient, - documents: testCsvDocuments, - options: &CsvDocumentsQuery{ - PrimaryKey: "id", - }, - }, - wantResp: &TaskInfo{ - TaskUID: 0, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - { - name: "TestIndexBasicUpdateDocumentsCsvWithCsvDelimiter", - args: args{ - UID: "csv", - client: defaultClient, - documents: testCsvDocuments, - options: &CsvDocumentsQuery{ - CsvDelimiter: ",", - }, - }, - wantResp: &TaskInfo{ - TaskUID: 0, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - } - - testUpdateDocumentsCsv := func(t *testing.T, tt testData, testReader bool) { - name := tt.name + "UpdateDocumentsCsv" - if testReader { - name += "FromReader" - } - - uid := tt.args.UID - if testReader { - uid += "-reader" - } else { - uid += "-string" - } - - t.Run(name, func(t *testing.T) { - c := tt.args.client - i := c.Index(uid) - t.Cleanup(cleanup(c)) - - wantDocs := testParseCsvDocuments(t, bytes.NewReader(tt.args.documents)) - - var ( - gotResp *TaskInfo - err error - ) - - if testReader { - gotResp, err = i.UpdateDocumentsCsvFromReader(bytes.NewReader(tt.args.documents), tt.args.options) - } else { - gotResp, err = i.UpdateDocumentsCsv(tt.args.documents, tt.args.options) - } - - require.NoError(t, err) - require.GreaterOrEqual(t, gotResp.TaskUID, tt.wantResp.TaskUID) - require.Equal(t, tt.wantResp.Status, gotResp.Status) - require.Equal(t, tt.wantResp.Type, gotResp.Type) - require.NotZero(t, gotResp.EnqueuedAt) - - testWaitForTask(t, i, gotResp) - - var documents DocumentsResult - err = i.GetDocuments(&DocumentsQuery{}, &documents) - require.NoError(t, err) - require.Equal(t, wantDocs, documents.Results) - }) - } - - for _, tt := range tests { - // Test both the string and io.Reader receiving versions - testUpdateDocumentsCsv(t, tt, false) - testUpdateDocumentsCsv(t, tt, true) - } -} - -func TestIndex_UpdateDocumentsCsvInBatches(t *testing.T) { - type args struct { - UID string - client *Client - batchSize int - documents []byte - } - type testData struct { - name string - args args - wantResp []TaskInfo - } - - tests := []testData{ - { - name: "TestIndexBasic", - args: args{ - UID: "csvbatch", - client: defaultClient, - batchSize: 2, - documents: testCsvDocuments, - }, - wantResp: []TaskInfo{ - { - TaskUID: 0, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - { - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - { - TaskUID: 2, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - }, - } - - testUpdateDocumentsCsvInBatches := func(t *testing.T, tt testData, testReader bool) { - name := tt.name + "UpdateDocumentsCsv" - if testReader { - name += "FromReader" - } - name += "InBatches" - - uid := tt.args.UID - if testReader { - uid += "-reader" - } else { - uid += "-string" - } - - t.Run(name, func(t *testing.T) { - c := tt.args.client - i := c.Index(uid) - t.Cleanup(cleanup(c)) - - wantDocs := testParseCsvDocuments(t, bytes.NewReader(tt.args.documents)) - - var ( - gotResp []TaskInfo - err error - ) - - if testReader { - gotResp, err = i.UpdateDocumentsCsvFromReaderInBatches(bytes.NewReader(tt.args.documents), tt.args.batchSize, nil) - } else { - gotResp, err = i.UpdateDocumentsCsvInBatches(tt.args.documents, tt.args.batchSize, nil) - } - - require.NoError(t, err) - for i := 0; i < 2; i++ { - require.GreaterOrEqual(t, gotResp[i].TaskUID, tt.wantResp[i].TaskUID) - require.Equal(t, gotResp[i].Status, tt.wantResp[i].Status) - require.Equal(t, gotResp[i].Type, tt.wantResp[i].Type) - require.NotZero(t, gotResp[i].EnqueuedAt) - } - - testWaitForBatchTask(t, i, gotResp) - - var documents DocumentsResult - err = i.GetDocuments(&DocumentsQuery{}, &documents) - require.NoError(t, err) - require.Equal(t, wantDocs, documents.Results) - }) - } - - for _, tt := range tests { - // Test both the string and io.Reader receiving versions - testUpdateDocumentsCsvInBatches(t, tt, false) - testUpdateDocumentsCsvInBatches(t, tt, true) - } -} - -func TestIndex_UpdateDocumentsNdjson(t *testing.T) { - type args struct { - UID string - client *Client - documents []byte - } - type testData struct { - name string - args args - wantResp *TaskInfo - } - - tests := []testData{ - { - name: "TestIndexBasic", - args: args{ - UID: "ndjson", - client: defaultClient, - documents: testNdjsonDocuments, - }, - wantResp: &TaskInfo{ - TaskUID: 0, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - } - - testUpdateDocumentsNdjson := func(t *testing.T, tt testData, testReader bool) { - name := tt.name + "UpdateDocumentsNdjson" - if testReader { - name += "FromReader" - } - - uid := tt.args.UID - if testReader { - uid += "-reader" - } else { - uid += "-string" - } - - t.Run(name, func(t *testing.T) { - c := tt.args.client - i := c.Index(uid) - t.Cleanup(cleanup(c)) - - wantDocs := testParseNdjsonDocuments(t, bytes.NewReader(tt.args.documents)) - - var ( - gotResp *TaskInfo - err error - ) - - if testReader { - gotResp, err = i.UpdateDocumentsNdjsonFromReader(bytes.NewReader(tt.args.documents)) - } else { - gotResp, err = i.UpdateDocumentsNdjson(tt.args.documents) - } - - require.NoError(t, err) - require.GreaterOrEqual(t, gotResp.TaskUID, tt.wantResp.TaskUID) - require.Equal(t, tt.wantResp.Status, gotResp.Status) - require.Equal(t, tt.wantResp.Type, gotResp.Type) - require.NotZero(t, gotResp.EnqueuedAt) - - testWaitForTask(t, i, gotResp) - - var documents DocumentsResult - err = i.GetDocuments(&DocumentsQuery{}, &documents) - require.NoError(t, err) - require.Equal(t, wantDocs, documents.Results) - }) - } - - for _, tt := range tests { - // Test both the string and io.Reader receiving versions - testUpdateDocumentsNdjson(t, tt, false) - testUpdateDocumentsNdjson(t, tt, true) - } -} - -func TestIndex_UpdateDocumentsNdjsonInBatches(t *testing.T) { - type args struct { - UID string - client *Client - batchSize int - documents []byte - } - type testData struct { - name string - args args - wantResp []TaskInfo - } - - tests := []testData{ - { - name: "TestIndexBasic", - args: args{ - UID: "ndjsonbatch", - client: defaultClient, - batchSize: 2, - documents: testNdjsonDocuments, - }, - wantResp: []TaskInfo{ - { - TaskUID: 0, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - { - TaskUID: 1, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - { - TaskUID: 2, - Status: "enqueued", - Type: TaskTypeDocumentAdditionOrUpdate, - }, - }, - }, - } - - testUpdateDocumentsNdjsonInBatches := func(t *testing.T, tt testData, testReader bool) { - name := tt.name + "UpdateDocumentsNdjson" - if testReader { - name += "FromReader" - } - name += "InBatches" - - uid := tt.args.UID - if testReader { - uid += "-reader" - } else { - uid += "-string" - } - - t.Run(name, func(t *testing.T) { - c := tt.args.client - i := c.Index(uid) - t.Cleanup(cleanup(c)) - - wantDocs := testParseNdjsonDocuments(t, bytes.NewReader(tt.args.documents)) - - var ( - gotResp []TaskInfo - err error - ) - - if testReader { - gotResp, err = i.updateDocumentsNdjsonFromReaderInBatches(bytes.NewReader(tt.args.documents), tt.args.batchSize) - } else { - gotResp, err = i.UpdateDocumentsNdjsonInBatches(tt.args.documents, tt.args.batchSize) - } - - require.NoError(t, err) - for i := 0; i < 2; i++ { - require.GreaterOrEqual(t, gotResp[i].TaskUID, tt.wantResp[i].TaskUID) - require.Equal(t, gotResp[i].Status, tt.wantResp[i].Status) - require.Equal(t, gotResp[i].Type, tt.wantResp[i].Type) - require.NotZero(t, gotResp[i].EnqueuedAt) - } - - testWaitForBatchTask(t, i, gotResp) - - var documents DocumentsResult - err = i.GetDocuments(&DocumentsQuery{}, &documents) - require.NoError(t, err) - require.Equal(t, wantDocs, documents.Results) - }) - } - - for _, tt := range tests { - // Test both the string and io.Reader receiving versions - testUpdateDocumentsNdjsonInBatches(t, tt, false) - testUpdateDocumentsNdjsonInBatches(t, tt, true) - } -} - -func Test_transformStringVariadicToMap(t *testing.T) { - type args struct { - primaryKey []string - } - tests := []struct { - name string - args args - wantOptions map[string]string - }{ - { - name: "TestCreateOptionsInterface", - args: args{ - []string{ - "id", - }, - }, - wantOptions: map[string]string{ - "primaryKey": "id", - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotOptions := transformStringVariadicToMap(tt.args.primaryKey...) - require.Equal(t, tt.wantOptions, gotOptions) - }) - } -} - -func Test_generateQueryForOptions(t *testing.T) { - type args struct { - options map[string]string - } - tests := []struct { - name string - args args - wantUrlQuery string - }{ - { - name: "TestGenerateQueryForOptions", - args: args{ - options: map[string]string{ - "primaryKey": "id", - "csvDelimiter": ",", - }, - }, - wantUrlQuery: "csvDelimiter=%2C&primaryKey=id", - }, - { - name: "TestGenerateQueryForPrimaryKey", - args: args{ - options: map[string]string{ - "primaryKey": "id", - }, - }, - wantUrlQuery: "primaryKey=id", - }, - { - name: "TestGenerateQueryForCsvDelimiter", - args: args{ - options: map[string]string{ - "csvDelimiter": ",", - }, - }, - wantUrlQuery: "csvDelimiter=%2C", - }, - { - name: "TestGenerateQueryWithNull", - args: args{ - options: nil, - }, - wantUrlQuery: "", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotUrlQuery := generateQueryForOptions(tt.args.options) - require.Equal(t, tt.wantUrlQuery, gotUrlQuery) - }) - } -} - -func Test_transformCsvDocumentsQueryToMap(t *testing.T) { - type args struct { - options *CsvDocumentsQuery - } - tests := []struct { - name string - args args - want map[string]string - }{ - { - name: "TestTransformCsvDocumentsQueryToMap", - args: args{ - options: &CsvDocumentsQuery{ - PrimaryKey: "id", - CsvDelimiter: ",", - }, - }, - want: map[string]string{ - "primaryKey": "id", - "csvDelimiter": ",", - }, - }, - { - name: "TestTransformCsvDocumentsQueryToMapWithPrimaryKey", - args: args{ - options: &CsvDocumentsQuery{ - PrimaryKey: "id", - }, - }, - want: map[string]string{ - "primaryKey": "id", - }, - }, - { - name: "TestTransformCsvDocumentsQueryToMapEmpty", - args: args{ - options: &CsvDocumentsQuery{}, - }, - want: map[string]string{}, - }, - { - name: "TestTransformCsvDocumentsQueryToMapNull", - args: args{ - options: nil, - }, - want: nil, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got := transformCsvDocumentsQueryToMap(tt.args.options) - require.Equal(t, tt.want, got) - }) - } -} diff --git a/index_documents.go b/index_documents.go deleted file mode 100644 index 7bb01182..00000000 --- a/index_documents.go +++ /dev/null @@ -1,568 +0,0 @@ -package meilisearch - -import ( - "bufio" - "bytes" - "encoding/csv" - "encoding/json" - "fmt" - "io" - "math" - "net/http" - "net/url" - "reflect" - "strconv" - "strings" -) - -func transformStringVariadicToMap(primaryKey ...string) (options map[string]string) { - if primaryKey != nil { - return map[string]string{ - "primaryKey": primaryKey[0], - } - } - return nil -} - -func transformCsvDocumentsQueryToMap(options *CsvDocumentsQuery) map[string]string { - var optionsMap map[string]string - data, _ := json.Marshal(options) - _ = json.Unmarshal(data, &optionsMap) - return optionsMap -} - -func generateQueryForOptions(options map[string]string) (urlQuery string) { - q := url.Values{} - for key, val := range options { - q.Add(key, val) - } - return q.Encode() -} - -func sendCsvRecords(documentsCsvFunc func(recs []byte, op *CsvDocumentsQuery) (resp *TaskInfo, err error), records [][]string, options *CsvDocumentsQuery) (*TaskInfo, error) { - b := new(bytes.Buffer) - w := csv.NewWriter(b) - w.UseCRLF = true - - err := w.WriteAll(records) - if err != nil { - return nil, fmt.Errorf("could not write CSV records: %w", err) - } - - resp, err := documentsCsvFunc(b.Bytes(), options) - if err != nil { - return nil, err - } - return resp, nil -} - -func (i Index) saveDocumentsFromReaderInBatches(documents io.Reader, batchSize int, documentsCsvFunc func(recs []byte, op *CsvDocumentsQuery) (resp *TaskInfo, err error), options *CsvDocumentsQuery) (resp []TaskInfo, err error) { - // Because of the possibility of multiline fields it's not safe to split - // into batches by lines, we'll have to parse the file and reassemble it - // into smaller parts. RFC 4180 compliant input with a header row is - // expected. - // Records are read and sent continuously to avoid reading all content - // into memory. However, this means that only part of the documents might - // be added successfully. - - var ( - responses []TaskInfo - header []string - records [][]string - ) - - r := csv.NewReader(documents) - for { - // Read CSV record (empty lines and comments are already skipped by csv.Reader) - record, err := r.Read() - if err == io.EOF { - break - } - if err != nil { - return nil, fmt.Errorf("could not read CSV record: %w", err) - } - - // Store first record as header - if header == nil { - header = record - continue - } - - // Add header record to every batch - if len(records) == 0 { - records = append(records, header) - } - - records = append(records, record) - - // After reaching batchSize (not counting the header record) assemble a CSV file and send records - if len(records) == batchSize+1 { - resp, err := sendCsvRecords(documentsCsvFunc, records, options) - if err != nil { - return nil, err - } - responses = append(responses, *resp) - records = nil - } - } - - // Send remaining records as the last batch if there is any - if len(records) > 0 { - resp, err := sendCsvRecords(documentsCsvFunc, records, options) - if err != nil { - return nil, err - } - responses = append(responses, *resp) - } - - return responses, nil -} - -func (i Index) saveDocumentsInBatches(documentsPtr interface{}, batchSize int, documentFunc func(documentsPtr interface{}, primaryKey ...string) (resp *TaskInfo, err error), primaryKey ...string) (resp []TaskInfo, err error) { - arr := reflect.ValueOf(documentsPtr) - lenDocs := arr.Len() - numBatches := int(math.Ceil(float64(lenDocs) / float64(batchSize))) - resp = make([]TaskInfo, numBatches) - - for j := 0; j < numBatches; j++ { - end := (j + 1) * batchSize - if end > lenDocs { - end = lenDocs - } - - batch := arr.Slice(j*batchSize, end).Interface() - - if len(primaryKey) != 0 { - respID, err := documentFunc(batch, primaryKey[0]) - if err != nil { - return nil, err - } - - resp[j] = *respID - } else { - respID, err := documentFunc(batch) - if err != nil { - return nil, err - } - - resp[j] = *respID - } - } - - return resp, nil -} - -func (i Index) GetDocument(identifier string, request *DocumentQuery, documentPtr interface{}) error { - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/documents/" + identifier, - method: http.MethodGet, - withRequest: nil, - withResponse: documentPtr, - withQueryParams: map[string]string{}, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "GetDocument", - } - if request != nil { - if len(request.Fields) != 0 { - req.withQueryParams["fields"] = strings.Join(request.Fields, ",") - } - } - if err := i.client.executeRequest(req); err != nil { - return err - } - return nil -} - -func (i Index) GetDocuments(request *DocumentsQuery, resp *DocumentsResult) error { - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/documents", - method: http.MethodGet, - contentType: contentTypeJSON, - withRequest: nil, - withResponse: resp, - withQueryParams: nil, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "GetDocuments", - } - if request != nil && request.Filter == nil { - req.withQueryParams = map[string]string{} - if request.Limit != 0 { - req.withQueryParams["limit"] = strconv.FormatInt(request.Limit, 10) - } - if request.Offset != 0 { - req.withQueryParams["offset"] = strconv.FormatInt(request.Offset, 10) - } - if len(request.Fields) != 0 { - req.withQueryParams["fields"] = strings.Join(request.Fields, ",") - } - } else if request != nil && request.Filter != nil { - req.withRequest = request - req.method = http.MethodPost - req.endpoint = req.endpoint + "/fetch" - } - if err := i.client.executeRequest(req); err != nil { - return VersionErrorHintMessage(err, &req) - } - return nil -} - -func (i *Index) addDocuments(documentsPtr interface{}, contentType string, options map[string]string) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - endpoint := "" - if options == nil { - endpoint = "/indexes/" + i.UID + "/documents" - } else { - for key, val := range options { - if key == "primaryKey" { - i.PrimaryKey = val - } - } - endpoint = "/indexes/" + i.UID + "/documents?" + generateQueryForOptions(options) - } - req := internalRequest{ - endpoint: endpoint, - method: http.MethodPost, - contentType: contentType, - withRequest: documentsPtr, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusAccepted}, - functionName: "AddDocuments", - } - if err = i.client.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} - -func (i Index) AddDocuments(documentsPtr interface{}, primaryKey ...string) (resp *TaskInfo, err error) { - return i.addDocuments(documentsPtr, contentTypeJSON, transformStringVariadicToMap(primaryKey...)) -} - -func (i Index) AddDocumentsInBatches(documentsPtr interface{}, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) { - return i.saveDocumentsInBatches(documentsPtr, batchSize, i.AddDocuments, primaryKey...) -} - -func (i Index) AddDocumentsCsv(documents []byte, options *CsvDocumentsQuery) (resp *TaskInfo, err error) { - // []byte avoids JSON conversion in Client.sendRequest() - return i.addDocuments(documents, contentTypeCSV, transformCsvDocumentsQueryToMap(options)) -} - -func (i Index) AddDocumentsCsvFromReader(documents io.Reader, options *CsvDocumentsQuery) (resp *TaskInfo, err error) { - // Using io.Reader would avoid JSON conversion in Client.sendRequest(), but - // read content to memory anyway because of problems with streamed bodies - data, err := io.ReadAll(documents) - if err != nil { - return nil, fmt.Errorf("could not read documents: %w", err) - } - return i.addDocuments(data, contentTypeCSV, transformCsvDocumentsQueryToMap(options)) -} - -func (i Index) AddDocumentsCsvInBatches(documents []byte, batchSize int, options *CsvDocumentsQuery) (resp []TaskInfo, err error) { - // Reuse io.Reader implementation - return i.AddDocumentsCsvFromReaderInBatches(bytes.NewReader(documents), batchSize, options) -} - -func (i Index) AddDocumentsCsvFromReaderInBatches(documents io.Reader, batchSize int, options *CsvDocumentsQuery) (resp []TaskInfo, err error) { - return i.saveDocumentsFromReaderInBatches(documents, batchSize, i.AddDocumentsCsv, options) -} - -func (i Index) AddDocumentsNdjson(documents []byte, primaryKey ...string) (resp *TaskInfo, err error) { - // []byte avoids JSON conversion in Client.sendRequest() - return i.addDocuments([]byte(documents), contentTypeNDJSON, transformStringVariadicToMap(primaryKey...)) -} - -func (i Index) AddDocumentsNdjsonFromReader(documents io.Reader, primaryKey ...string) (resp *TaskInfo, err error) { - // Using io.Reader would avoid JSON conversion in Client.sendRequest(), but - // read content to memory anyway because of problems with streamed bodies - data, err := io.ReadAll(documents) - if err != nil { - return nil, fmt.Errorf("could not read documents: %w", err) - } - return i.addDocuments(data, contentTypeNDJSON, transformStringVariadicToMap(primaryKey...)) -} - -func (i Index) AddDocumentsNdjsonInBatches(documents []byte, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) { - // Reuse io.Reader implementation - return i.AddDocumentsNdjsonFromReaderInBatches(bytes.NewReader(documents), batchSize, primaryKey...) -} - -func (i Index) AddDocumentsNdjsonFromReaderInBatches(documents io.Reader, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) { - // NDJSON files supposed to contain a valid JSON document in each line, so - // it's safe to split by lines. - // Lines are read and sent continuously to avoid reading all content into - // memory. However, this means that only part of the documents might be - // added successfully. - - sendNdjsonLines := func(lines []string) (*TaskInfo, error) { - b := new(bytes.Buffer) - for _, line := range lines { - _, err := b.WriteString(line) - if err != nil { - return nil, fmt.Errorf("could not write NDJSON line: %w", err) - } - err = b.WriteByte('\n') - if err != nil { - return nil, fmt.Errorf("could not write NDJSON line: %w", err) - } - } - - resp, err := i.AddDocumentsNdjson(b.Bytes(), primaryKey...) - if err != nil { - return nil, err - } - return resp, nil - } - - var ( - responses []TaskInfo - lines []string - ) - - scanner := bufio.NewScanner(documents) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - - // Skip empty lines (NDJSON might not allow this, but just to be sure) - if line == "" { - continue - } - - lines = append(lines, line) - // After reaching batchSize send NDJSON lines - if len(lines) == batchSize { - resp, err := sendNdjsonLines(lines) - if err != nil { - return nil, err - } - responses = append(responses, *resp) - lines = nil - } - } - if err := scanner.Err(); err != nil { - return nil, fmt.Errorf("could not read NDJSON: %w", err) - } - - // Send remaining records as the last batch if there is any - if len(lines) > 0 { - resp, err := sendNdjsonLines(lines) - if err != nil { - return nil, err - } - responses = append(responses, *resp) - } - - return responses, nil -} - -func (i *Index) updateDocuments(documentsPtr interface{}, contentType string, options map[string]string) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - endpoint := "" - if options == nil { - endpoint = "/indexes/" + i.UID + "/documents" - } else { - for key, val := range options { - if key == "primaryKey" { - i.PrimaryKey = val - } - } - endpoint = "/indexes/" + i.UID + "/documents?" + generateQueryForOptions(options) - } - req := internalRequest{ - endpoint: endpoint, - method: http.MethodPut, - contentType: contentType, - withRequest: documentsPtr, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusAccepted}, - functionName: "UpdateDocuments", - } - if err = i.client.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} - -func (i Index) UpdateDocuments(documentsPtr interface{}, primaryKey ...string) (resp *TaskInfo, err error) { - return i.updateDocuments(documentsPtr, contentTypeJSON, transformStringVariadicToMap(primaryKey...)) -} - -func (i Index) UpdateDocumentsInBatches(documentsPtr interface{}, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) { - return i.saveDocumentsInBatches(documentsPtr, batchSize, i.UpdateDocuments, primaryKey...) -} - -func (i Index) UpdateDocumentsCsv(documents []byte, options *CsvDocumentsQuery) (resp *TaskInfo, err error) { - return i.updateDocuments(documents, contentTypeCSV, transformCsvDocumentsQueryToMap(options)) -} - -func (i Index) UpdateDocumentsCsvFromReader(documents io.Reader, options *CsvDocumentsQuery) (resp *TaskInfo, err error) { - // Using io.Reader would avoid JSON conversion in Client.sendRequest(), but - // read content to memory anyway because of problems with streamed bodies - data, err := io.ReadAll(documents) - if err != nil { - return nil, fmt.Errorf("could not read documents: %w", err) - } - return i.updateDocuments(data, contentTypeCSV, transformCsvDocumentsQueryToMap(options)) -} - -func (i Index) UpdateDocumentsCsvInBatches(documents []byte, batchSize int, options *CsvDocumentsQuery) (resp []TaskInfo, err error) { - // Reuse io.Reader implementation - return i.UpdateDocumentsCsvFromReaderInBatches(bytes.NewReader(documents), batchSize, options) -} - -func (i Index) UpdateDocumentsCsvFromReaderInBatches(documents io.Reader, batchSize int, options *CsvDocumentsQuery) (resp []TaskInfo, err error) { - return i.saveDocumentsFromReaderInBatches(documents, batchSize, i.UpdateDocumentsCsv, options) -} - -func (i Index) UpdateDocumentsNdjson(documents []byte, primaryKey ...string) (resp *TaskInfo, err error) { - return i.updateDocuments(documents, contentTypeNDJSON, transformStringVariadicToMap(primaryKey...)) -} - -func (i Index) UpdateDocumentsNdjsonFromReader(documents io.Reader, primaryKey ...string) (resp *TaskInfo, err error) { - // Using io.Reader would avoid JSON conversion in Client.sendRequest(), but - // read content to memory anyway because of problems with streamed bodies - data, err := io.ReadAll(documents) - if err != nil { - return nil, fmt.Errorf("could not read documents: %w", err) - } - return i.updateDocuments(data, contentTypeNDJSON, transformStringVariadicToMap(primaryKey...)) -} - -func (i Index) UpdateDocumentsNdjsonInBatches(documents []byte, batchsize int, primaryKey ...string) (resp []TaskInfo, err error) { - return i.updateDocumentsNdjsonFromReaderInBatches(bytes.NewReader(documents), batchsize, primaryKey...) -} - -func (i Index) updateDocumentsNdjsonFromReaderInBatches(documents io.Reader, batchSize int, primaryKey ...string) (resp []TaskInfo, err error) { - // NDJSON files supposed to contain a valid JSON document in each line, so - // it's safe to split by lines. - // Lines are read and sent continuously to avoid reading all content into - // memory. However, this means that only part of the documents might be - // added successfully. - - sendNdjsonLines := func(lines []string) (*TaskInfo, error) { - b := new(bytes.Buffer) - for _, line := range lines { - _, err := b.WriteString(line) - if err != nil { - return nil, fmt.Errorf("Could not write NDJSON line: %w", err) - } - err = b.WriteByte('\n') - if err != nil { - return nil, fmt.Errorf("Could not write NDJSON line: %w", err) - } - } - - resp, err := i.UpdateDocumentsNdjson(b.Bytes(), primaryKey...) - if err != nil { - return nil, err - } - return resp, nil - } - - var ( - responses []TaskInfo - lines []string - ) - - scanner := bufio.NewScanner(documents) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - - // Skip empty lines (NDJSON might not allow this, but just to be sure) - if line == "" { - continue - } - - lines = append(lines, line) - // After reaching batchSize send NDJSON lines - if len(lines) == batchSize { - resp, err := sendNdjsonLines(lines) - if err != nil { - return nil, err - } - responses = append(responses, *resp) - lines = nil - } - } - if err := scanner.Err(); err != nil { - return nil, fmt.Errorf("Could not read NDJSON: %w", err) - } - - // Send remaining records as the last batch if there is any - if len(lines) > 0 { - resp, err := sendNdjsonLines(lines) - if err != nil { - return nil, err - } - responses = append(responses, *resp) - } - - return responses, nil -} - -func (i Index) DeleteDocument(identifier string) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/documents/" + identifier, - method: http.MethodDelete, - withRequest: nil, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusAccepted}, - functionName: "DeleteDocument", - } - if err := i.client.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} - -func (i Index) DeleteDocuments(identifier []string) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/documents/delete-batch", - method: http.MethodPost, - contentType: contentTypeJSON, - withRequest: identifier, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusAccepted}, - functionName: "DeleteDocuments", - } - if err := i.client.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} - -func (i Index) DeleteDocumentsByFilter(filter interface{}) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/documents/delete", - method: http.MethodPost, - contentType: contentTypeJSON, - withRequest: map[string]interface{}{ - "filter": filter, - }, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusAccepted}, - functionName: "DeleteDocumentsByFilter", - } - if err := i.client.executeRequest(req); err != nil { - return nil, VersionErrorHintMessage(err, &req) - } - return resp, nil -} - -func (i Index) DeleteAllDocuments() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/documents", - method: http.MethodDelete, - withRequest: nil, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusAccepted}, - functionName: "DeleteAllDocuments", - } - if err = i.client.executeRequest(req); err != nil { - return nil, err - } - return resp, nil -} diff --git a/index_facet_search.go b/index_facet_search.go deleted file mode 100644 index 2a14c6b4..00000000 --- a/index_facet_search.go +++ /dev/null @@ -1,60 +0,0 @@ -package meilisearch - -import ( - "encoding/json" - "errors" - "net/http" -) - -var ErrNoFacetSearchRequest = errors.New("no search facet request provided") - -func (i Index) FacetSearch(request *FacetSearchRequest) (*json.RawMessage, error) { - if request == nil { - return nil, ErrNoFacetSearchRequest - } - - searchPostRequestParams := FacetSearchPostRequestParams(request) - - resp := &json.RawMessage{} - - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/facet-search", - method: http.MethodPost, - contentType: contentTypeJSON, - withRequest: searchPostRequestParams, - withResponse: resp, - acceptedStatusCodes: []int{http.StatusOK}, - functionName: "FacetSearch", - } - - if err := i.client.executeRequest(req); err != nil { - return nil, err - } - - return resp, nil -} - -func FacetSearchPostRequestParams(request *FacetSearchRequest) map[string]interface{} { - params := make(map[string]interface{}, 22) - - if request.Q != "" { - params["q"] = request.Q - } - if request.FacetName != "" { - params["facetName"] = request.FacetName - } - if request.FacetQuery != "" { - params["facetQuery"] = request.FacetQuery - } - if request.Filter != "" { - params["filter"] = request.Filter - } - if request.MatchingStrategy != "" { - params["matchingStrategy"] = request.MatchingStrategy - } - if len(request.AttributesToSearchOn) != 0 { - params["attributesToSearchOn"] = request.AttributesToSearchOn - } - - return params -} diff --git a/index_facet_search_test.go b/index_facet_search_test.go deleted file mode 100644 index 87dbf639..00000000 --- a/index_facet_search_test.go +++ /dev/null @@ -1,213 +0,0 @@ -package meilisearch - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/require" -) - -func TestIndex_FacetSearch(t *testing.T) { - type args struct { - UID string - PrimaryKey string - client *Client - request *FacetSearchRequest - filterableAttributes []string - } - - tests := []struct { - name string - args args - want *FacetSearchResponse - wantErr bool - }{ - { - name: "TestIndexBasicFacetSearch", - args: args{ - UID: "indexUID", - client: defaultClient, - request: &FacetSearchRequest{ - FacetName: "tag", - FacetQuery: "Novel", - }, - filterableAttributes: []string{"tag"}, - }, - want: &FacetSearchResponse{ - FacetHits: []interface{}{ - map[string]interface{}{ - "value": "Novel", "count": float64(5), - }, - }, - FacetQuery: "Novel", - }, - wantErr: false, - }, - { - name: "TestIndexFacetSearchWithFilter", - args: args{ - UID: "indexUID", - client: defaultClient, - request: &FacetSearchRequest{ - FacetName: "tag", - FacetQuery: "Novel", - Filter: "tag = 'Novel'", - }, - filterableAttributes: []string{"tag"}, - }, - want: &FacetSearchResponse{ - FacetHits: []interface{}{ - map[string]interface{}{ - "value": "Novel", "count": float64(5), - }, - }, - FacetQuery: "Novel", - }, - wantErr: false, - }, - { - name: "TestIndexFacetSearchWithMatchingStrategy", - args: args{ - UID: "indexUID", - client: defaultClient, - request: &FacetSearchRequest{ - FacetName: "tag", - FacetQuery: "Novel", - MatchingStrategy: "frequency", - }, - filterableAttributes: []string{"tag"}, - }, - want: &FacetSearchResponse{ - FacetHits: []interface{}{ - map[string]interface{}{ - "value": "Novel", "count": float64(5), - }, - }, - FacetQuery: "Novel", - }, - wantErr: false, - }, - { - name: "TestIndexFacetSearchWithAttributesToSearchOn", - args: args{ - UID: "indexUID", - client: defaultClient, - request: &FacetSearchRequest{ - FacetName: "tag", - FacetQuery: "Novel", - AttributesToSearchOn: []string{"tag"}, - }, - filterableAttributes: []string{"tag"}, - }, - want: &FacetSearchResponse{ - FacetHits: []interface{}{ - map[string]interface{}{ - "value": "Novel", "count": float64(5), - }, - }, - FacetQuery: "Novel", - }, - wantErr: false, - }, - { - name: "TestIndexFacetSearchWithNoFacetSearchRequest", - args: args{ - UID: "indexUID", - client: defaultClient, - request: nil, - }, - want: nil, - wantErr: true, - }, - { - name: "TestIndexFacetSearchWithNoFacetName", - args: args{ - UID: "indexUID", - client: defaultClient, - request: &FacetSearchRequest{ - FacetQuery: "Novel", - }, - }, - want: nil, - wantErr: true, - }, - { - name: "TestIndexFacetSearchWithNoFacetQuery", - args: args{ - UID: "indexUID", - client: defaultClient, - request: &FacetSearchRequest{ - FacetName: "tag", - }, - }, - want: nil, - wantErr: true, - }, - { - name: "TestIndexFacetSearchWithNoFilterableAttributes", - args: args{ - UID: "indexUID", - client: defaultClient, - request: &FacetSearchRequest{ - FacetName: "tag", - FacetQuery: "Novel", - }, - }, - want: nil, - wantErr: true, - }, - { - name: "TestIndexFacetSearchWithQ", - args: args{ - UID: "indexUID", - client: defaultClient, - request: &FacetSearchRequest{ - Q: "query", - FacetName: "tag", - }, - filterableAttributes: []string{"tag"}, - }, - want: &FacetSearchResponse{ - FacetHits: []interface{}{}, - FacetQuery: "", - }, - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() - c := tt.args.client - i := c.Index(tt.args.UID) - t.Cleanup(cleanup(c)) - - if len(tt.args.filterableAttributes) > 0 { - updateFilter, err := i.UpdateFilterableAttributes(&tt.args.filterableAttributes) - require.NoError(t, err) - testWaitForTask(t, i, updateFilter) - } - - gotRaw, err := i.FacetSearch(tt.args.request) - - if tt.wantErr { - require.Error(t, err) - require.Nil(t, gotRaw) - return - } - - require.NoError(t, err) - // Unmarshall the raw response from FacetSearch into a FacetSearchResponse - var got FacetSearchResponse - err = json.Unmarshal(*gotRaw, &got) - require.NoError(t, err, "error unmarshalling raw got FacetSearchResponse") - - require.Equal(t, len(tt.want.FacetHits), len(got.FacetHits)) - for len := range got.FacetHits { - require.Equal(t, tt.want.FacetHits[len].(map[string]interface{})["value"], got.FacetHits[len].(map[string]interface{})["value"]) - require.Equal(t, tt.want.FacetHits[len].(map[string]interface{})["count"], got.FacetHits[len].(map[string]interface{})["count"]) - } - require.Equal(t, tt.want.FacetQuery, got.FacetQuery) - }) - } -} diff --git a/index_search.go b/index_search.go index 091d97d3..2228edc4 100644 --- a/index_search.go +++ b/index_search.go @@ -1,21 +1,16 @@ package meilisearch import ( + "context" "encoding/json" - "errors" "net/http" ) -// This constant contains the default values assigned by Meilisearch to the limit in search parameters -// -// Documentation: https://www.meilisearch.com/docs/reference/api/search#search-parameters -const ( - DefaultLimit int64 = 20 -) - -var ErrNoSearchRequest = errors.New("no search request provided") +func (i *index) Search(query string, request *SearchRequest) (*SearchResponse, error) { + return i.SearchWithContext(context.Background(), query, request) +} -func (i Index) SearchRaw(query string, request *SearchRequest) (*json.RawMessage, error) { +func (i *index) SearchWithContext(ctx context.Context, query string, request *SearchRequest) (*SearchResponse, error) { if request == nil { return nil, ErrNoSearchRequest } @@ -30,26 +25,30 @@ func (i Index) SearchRaw(query string, request *SearchRequest) (*json.RawMessage request.validate() - resp := &json.RawMessage{} + resp := new(SearchResponse) - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/search", + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/search", method: http.MethodPost, contentType: contentTypeJSON, withRequest: request, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, - functionName: "SearchRaw", + functionName: "Search", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) Search(query string, request *SearchRequest) (*SearchResponse, error) { +func (i *index) SearchRaw(query string, request *SearchRequest) (*json.RawMessage, error) { + return i.SearchRawWithContext(context.Background(), query, request) +} + +func (i *index) SearchRawWithContext(ctx context.Context, query string, request *SearchRequest) (*json.RawMessage, error) { if request == nil { return nil, ErrNoSearchRequest } @@ -64,28 +63,60 @@ func (i Index) Search(query string, request *SearchRequest) (*SearchResponse, er request.validate() - resp := &SearchResponse{} + resp := new(json.RawMessage) - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/search", + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/search", method: http.MethodPost, contentType: contentTypeJSON, withRequest: request, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, - functionName: "Search", + functionName: "SearchRaw", + } + + if err := i.client.executeRequest(ctx, req); err != nil { + return nil, err + } + + return resp, nil +} + +func (i *index) FacetSearch(request *FacetSearchRequest) (*json.RawMessage, error) { + return i.FacetSearchWithContext(context.Background(), request) +} + +func (i *index) FacetSearchWithContext(ctx context.Context, request *FacetSearchRequest) (*json.RawMessage, error) { + if request == nil { + return nil, ErrNoFacetSearchRequest } - if err := i.client.executeRequest(req); err != nil { + resp := new(json.RawMessage) + + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/facet-search", + method: http.MethodPost, + contentType: contentTypeJSON, + withRequest: request, + withResponse: resp, + acceptedStatusCodes: []int{http.StatusOK}, + functionName: "FacetSearch", + } + + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) SearchSimilarDocuments(param *SimilarDocumentQuery, resp *SimilarDocumentResult) error { - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/similar", +func (i *index) SearchSimilarDocuments(param *SimilarDocumentQuery, resp *SimilarDocumentResult) error { + return i.SearchSimilarDocumentsWithContext(context.Background(), param, resp) +} + +func (i *index) SearchSimilarDocumentsWithContext(ctx context.Context, param *SimilarDocumentQuery, resp *SimilarDocumentResult) error { + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/similar", method: http.MethodPost, withRequest: param, withResponse: resp, @@ -94,7 +125,7 @@ func (i Index) SearchSimilarDocuments(param *SimilarDocumentQuery, resp *Similar contentType: contentTypeJSON, } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return err } return nil diff --git a/index_search_test.go b/index_search_test.go index c2fa058a..6fcf7c67 100644 --- a/index_search_test.go +++ b/index_search_test.go @@ -1,17 +1,19 @@ package meilisearch import ( + "crypto/tls" "encoding/json" - "testing" - "github.com/stretchr/testify/require" + "testing" ) func TestIndex_SearchRaw(t *testing.T) { + sv := setup(t, "") + type args struct { UID string PrimaryKey string - client *Client + client ServiceManager query string request *SearchRequest } @@ -26,7 +28,7 @@ func TestIndex_SearchRaw(t *testing.T) { name: "TestIndexBasicSearch", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: &SearchRequest{ IndexUID: "foobar", @@ -51,7 +53,7 @@ func TestIndex_SearchRaw(t *testing.T) { name: "TestNullRequestInSearchRow", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: nil, }, @@ -61,7 +63,7 @@ func TestIndex_SearchRaw(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -97,10 +99,15 @@ func TestIndex_SearchRaw(t *testing.T) { } func TestIndex_Search(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string PrimaryKey string - client *Client + client ServiceManager query string request *SearchRequest } @@ -114,7 +121,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithEmptyRequest", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: nil, }, @@ -125,7 +132,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexBasicSearch", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: &SearchRequest{}, }, @@ -148,7 +155,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexBasicSearchWithIndexUIDInRequest", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: &SearchRequest{ IndexUID: "foobar", @@ -173,7 +180,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customSv, query: "prince", request: &SearchRequest{}, }, @@ -196,7 +203,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithLimit", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: &SearchRequest{ Limit: 1, @@ -218,7 +225,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithPlaceholderSearch", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, request: &SearchRequest{ Limit: 1, }, @@ -239,7 +246,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithOffset", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: &SearchRequest{ Offset: 1, @@ -261,7 +268,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithAttributeToRetrieve", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: &SearchRequest{ AttributesToRetrieve: []string{"book_id", "title"}, @@ -286,7 +293,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithAttributeToSearchOn", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: &SearchRequest{ AttributesToSearchOn: []string{"title"}, @@ -311,7 +318,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithAttributesToCrop", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "to", request: &SearchRequest{ AttributesToCrop: []string{"title"}, @@ -337,7 +344,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithAttributesToCropAndCustomCropMarker", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "to", request: &SearchRequest{ AttributesToCrop: []string{"title"}, @@ -364,7 +371,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithAttributeToHighlight", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: &SearchRequest{ Limit: 1, @@ -390,7 +397,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithCustomPreAndPostHighlightTags", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: &SearchRequest{ Limit: 1, @@ -418,7 +425,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithShowMatchesPosition", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", request: &SearchRequest{ ShowMatchesPosition: true, @@ -449,7 +456,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithQuoteInQUery", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and \"harry\"", request: &SearchRequest{}, }, @@ -469,7 +476,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithCustomMatchingStrategyAll", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "le prince", request: &SearchRequest{ Limit: 10, @@ -493,7 +500,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithCustomMatchingStrategyLast", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: &SearchRequest{ Limit: 10, @@ -520,7 +527,7 @@ func TestIndex_Search(t *testing.T) { name: "TestIndexSearchWithRankingScoreThreshold", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "pri", request: &SearchRequest{ Limit: 10, @@ -549,7 +556,7 @@ func TestIndex_Search(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -580,10 +587,15 @@ func TestIndex_Search(t *testing.T) { } func TestIndex_SearchFacets(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string PrimaryKey string - client *Client + client ServiceManager query string request *SearchRequest filterableAttributes []string @@ -598,7 +610,7 @@ func TestIndex_SearchFacets(t *testing.T) { name: "TestIndexSearchWithEmptyRequest", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: nil, }, @@ -609,7 +621,7 @@ func TestIndex_SearchFacets(t *testing.T) { name: "TestIndexSearchWithFacets", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: &SearchRequest{ Facets: []string{"*"}, @@ -628,13 +640,12 @@ func TestIndex_SearchFacets(t *testing.T) { EstimatedTotalHits: 2, Offset: 0, Limit: 20, - FacetDistribution: map[string]interface{}( - map[string]interface{}{ - "tag": map[string]interface{}{ - "Epic fantasy": float64(1), - "Tale": float64(1), - }, - }), + FacetDistribution: map[string]interface{}{ + "tag": map[string]interface{}{ + "Epic fantasy": float64(1), + "Tale": float64(1), + }, + }, }, wantErr: false, }, @@ -642,7 +653,7 @@ func TestIndex_SearchFacets(t *testing.T) { name: "TestIndexSearchWithFacetsWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customSv, query: "prince", request: &SearchRequest{ Facets: []string{"*"}, @@ -661,13 +672,12 @@ func TestIndex_SearchFacets(t *testing.T) { EstimatedTotalHits: 2, Offset: 0, Limit: 20, - FacetDistribution: map[string]interface{}( - map[string]interface{}{ - "tag": map[string]interface{}{ - "Epic fantasy": float64(1), - "Tale": float64(1), - }, - }), + FacetDistribution: map[string]interface{}{ + "tag": map[string]interface{}{ + "Epic fantasy": float64(1), + "Tale": float64(1), + }, + }, }, wantErr: false, }, @@ -675,7 +685,7 @@ func TestIndex_SearchFacets(t *testing.T) { name: "TestIndexSearchWithFacetsAndFacetsStats", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", request: &SearchRequest{ Facets: []string{"book_id"}, @@ -694,27 +704,25 @@ func TestIndex_SearchFacets(t *testing.T) { EstimatedTotalHits: 2, Offset: 0, Limit: 20, - FacetDistribution: map[string]interface{}( - map[string]interface{}{ - "book_id": map[string]interface{}{ - "4": float64(1), - "456": float64(1), - }, - }), - FacetStats: map[string]interface{}( - map[string]interface{}{ - "book_id": map[string]interface{}{ - "max": float64(456), - "min": float64(4), - }, - }), + FacetDistribution: map[string]interface{}{ + "book_id": map[string]interface{}{ + "4": float64(1), + "456": float64(1), + }, + }, + FacetStats: map[string]interface{}{ + "book_id": map[string]interface{}{ + "max": float64(456), + "min": float64(4), + }, + }, }, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -749,10 +757,12 @@ func TestIndex_SearchFacets(t *testing.T) { } func TestIndex_SearchWithFilters(t *testing.T) { + sv := setup(t, "") + type args struct { UID string PrimaryKey string - client *Client + client ServiceManager query string filterableAttributes []string request *SearchRequest @@ -767,7 +777,7 @@ func TestIndex_SearchWithFilters(t *testing.T) { name: "TestIndexBasicSearchWithFilter", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", filterableAttributes: []string{ "tag", @@ -792,7 +802,7 @@ func TestIndex_SearchWithFilters(t *testing.T) { name: "TestIndexSearchWithFilterInInt", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", filterableAttributes: []string{ "year", @@ -817,7 +827,7 @@ func TestIndex_SearchWithFilters(t *testing.T) { name: "TestIndexSearchWithFilterArray", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", filterableAttributes: []string{ "year", @@ -844,7 +854,7 @@ func TestIndex_SearchWithFilters(t *testing.T) { name: "TestIndexSearchWithFilterMultipleArray", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", filterableAttributes: []string{ "year", @@ -873,7 +883,7 @@ func TestIndex_SearchWithFilters(t *testing.T) { name: "TestIndexSearchWithMultipleFilter", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", filterableAttributes: []string{ "tag", @@ -902,7 +912,7 @@ func TestIndex_SearchWithFilters(t *testing.T) { name: "TestIndexSearchWithOneFilterAnd", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "", filterableAttributes: []string{ "year", @@ -933,7 +943,7 @@ func TestIndex_SearchWithFilters(t *testing.T) { name: "TestIndexSearchWithMultipleFilterAnd", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "", filterableAttributes: []string{ "tag", @@ -959,7 +969,7 @@ func TestIndex_SearchWithFilters(t *testing.T) { name: "TestIndexSearchWithFilterOr", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "", filterableAttributes: []string{ "year", @@ -991,7 +1001,7 @@ func TestIndex_SearchWithFilters(t *testing.T) { name: "TestIndexSearchWithAttributeToHighlight", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "prince", filterableAttributes: []string{ "book_id", @@ -1017,7 +1027,7 @@ func TestIndex_SearchWithFilters(t *testing.T) { name: "TestIndexSearchWithFilterContainingSpaces", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", filterableAttributes: []string{ "tag", @@ -1041,7 +1051,7 @@ func TestIndex_SearchWithFilters(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -1074,10 +1084,12 @@ func TestIndex_SearchWithFilters(t *testing.T) { } func TestIndex_SearchWithSort(t *testing.T) { + sv := setup(t, "") + type args struct { UID string PrimaryKey string - client *Client + client ServiceManager query string sortableAttributes []string request *SearchRequest @@ -1092,7 +1104,7 @@ func TestIndex_SearchWithSort(t *testing.T) { name: "TestIndexBasicSearchWithSortIntParameter", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", sortableAttributes: []string{ "year", @@ -1128,7 +1140,7 @@ func TestIndex_SearchWithSort(t *testing.T) { name: "TestIndexBasicSearchWithSortStringParameter", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", sortableAttributes: []string{ "title", @@ -1164,7 +1176,7 @@ func TestIndex_SearchWithSort(t *testing.T) { name: "TestIndexBasicSearchWithSortMultipleParameter", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", sortableAttributes: []string{ "title", @@ -1202,7 +1214,7 @@ func TestIndex_SearchWithSort(t *testing.T) { name: "TestIndexBasicSearchWithSortMultipleParameterReverse", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", sortableAttributes: []string{ "title", @@ -1240,7 +1252,7 @@ func TestIndex_SearchWithSort(t *testing.T) { name: "TestIndexBasicSearchWithSortMultipleParameterPlaceHolder", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "", sortableAttributes: []string{ "title", @@ -1278,7 +1290,7 @@ func TestIndex_SearchWithSort(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -1310,10 +1322,15 @@ func TestIndex_SearchWithSort(t *testing.T) { } func TestIndex_SearchOnNestedFileds(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string PrimaryKey string - client *Client + client ServiceManager query string request *SearchRequest searchableAttribute []string @@ -1329,7 +1346,7 @@ func TestIndex_SearchOnNestedFileds(t *testing.T) { name: "TestIndexBasicSearchOnNestedFields", args: args{ UID: "TestIndexBasicSearchOnNestedFields", - client: defaultClient, + client: sv, query: "An awesome", request: &SearchRequest{}, }, @@ -1353,7 +1370,7 @@ func TestIndex_SearchOnNestedFileds(t *testing.T) { name: "TestIndexBasicSearchOnNestedFieldsWithCustomClient", args: args{ UID: "TestIndexBasicSearchOnNestedFieldsWithCustomClient", - client: customClient, + client: customSv, query: "An awesome", request: &SearchRequest{}, }, @@ -1377,7 +1394,7 @@ func TestIndex_SearchOnNestedFileds(t *testing.T) { name: "TestIndexSearchOnMultipleNestedFields", args: args{ UID: "TestIndexSearchOnMultipleNestedFields", - client: defaultClient, + client: sv, query: "french", request: &SearchRequest{}, }, @@ -1407,7 +1424,7 @@ func TestIndex_SearchOnNestedFileds(t *testing.T) { name: "TestIndexSearchOnNestedFieldsWithSearchableAttribute", args: args{ UID: "TestIndexSearchOnNestedFieldsWithSearchableAttribute", - client: defaultClient, + client: sv, query: "An awesome", request: &SearchRequest{}, searchableAttribute: []string{ @@ -1434,7 +1451,7 @@ func TestIndex_SearchOnNestedFileds(t *testing.T) { name: "TestIndexSearchOnNestedFieldsWithSortableAttribute", args: args{ UID: "TestIndexSearchOnNestedFieldsWithSortableAttribute", - client: defaultClient, + client: sv, query: "An awesome", request: &SearchRequest{ Sort: []string{ @@ -1467,7 +1484,7 @@ func TestIndex_SearchOnNestedFileds(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexWithNestedFields(tt.args.UID) + setUpIndexWithNestedFields(tt.args.client, tt.args.UID) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -1505,10 +1522,12 @@ func TestIndex_SearchOnNestedFileds(t *testing.T) { } func TestIndex_SearchWithPagination(t *testing.T) { + sv := setup(t, "") + type args struct { UID string PrimaryKey string - client *Client + client ServiceManager query string request *SearchRequest } @@ -1522,7 +1541,7 @@ func TestIndex_SearchWithPagination(t *testing.T) { name: "TestIndexBasicSearchWithHitsPerPage", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", request: &SearchRequest{ HitsPerPage: 10, @@ -1554,7 +1573,7 @@ func TestIndex_SearchWithPagination(t *testing.T) { name: "TestIndexBasicSearchWithPage", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", request: &SearchRequest{ Page: 1, @@ -1586,7 +1605,7 @@ func TestIndex_SearchWithPagination(t *testing.T) { name: "TestIndexBasicSearchWithPageAndHitsPerPage", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", request: &SearchRequest{ HitsPerPage: 10, @@ -1618,7 +1637,7 @@ func TestIndex_SearchWithPagination(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -1641,22 +1660,24 @@ func TestIndex_SearchWithPagination(t *testing.T) { } func TestIndex_SearchWithShowRankingScore(t *testing.T) { + sv := setup(t, "") + type args struct { UID string PrimaryKey string - client *Client + client ServiceManager query string request SearchRequest } testArg := args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", request: SearchRequest{ ShowRankingScore: true, }, } - SetUpIndexForFaceting() + setUpIndexForFaceting(testArg.client) c := testArg.client i := c.Index(testArg.UID) t.Cleanup(cleanup(c)) @@ -1667,22 +1688,24 @@ func TestIndex_SearchWithShowRankingScore(t *testing.T) { } func TestIndex_SearchWithShowRankingScoreDetails(t *testing.T) { + sv := setup(t, "") + type args struct { UID string PrimaryKey string - client *Client + client ServiceManager query string request SearchRequest } testArg := args{ UID: "indexUID", - client: defaultClient, + client: sv, query: "and", request: SearchRequest{ ShowRankingScoreDetails: true, }, } - SetUpIndexForFaceting() + setUpIndexForFaceting(testArg.client) c := testArg.client i := c.Index(testArg.UID) t.Cleanup(cleanup(c)) @@ -1693,18 +1716,20 @@ func TestIndex_SearchWithShowRankingScoreDetails(t *testing.T) { } func TestIndex_SearchWithVectorStore(t *testing.T) { + sv := setup(t, "") + tests := []struct { name string UID string PrimaryKey string - client *Client + client ServiceManager query string request SearchRequest }{ { name: "basic hybrid test", UID: "indexUID", - client: defaultClient, + client: sv, query: "Pride and Prejudice", request: SearchRequest{ Hybrid: &SearchRequestHybrid{ @@ -1717,7 +1742,7 @@ func TestIndex_SearchWithVectorStore(t *testing.T) { { name: "empty Embedder", UID: "indexUID", - client: defaultClient, + client: sv, query: "Pride and Prejudice", request: SearchRequest{ Hybrid: &SearchRequestHybrid{ @@ -1731,7 +1756,7 @@ func TestIndex_SearchWithVectorStore(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - i, err := SetUpIndexWithVector(tt.UID) + i, err := setUpIndexWithVector(tt.client.(*meilisearch), tt.UID) if err != nil { t.Fatal(err) } @@ -1751,16 +1776,18 @@ func TestIndex_SearchWithVectorStore(t *testing.T) { } func TestIndex_SearchWithDistinct(t *testing.T) { + sv := setup(t, "") + tests := []struct { UID string PrimaryKey string - client *Client + client ServiceManager query string request SearchRequest }{ { UID: "indexUID", - client: defaultClient, + client: sv, query: "white shirt", request: SearchRequest{ Distinct: "sku", @@ -1770,7 +1797,7 @@ func TestIndex_SearchWithDistinct(t *testing.T) { for _, tt := range tests { t.Run(tt.UID, func(t *testing.T) { - SetUpDistinctIndex(tt.UID) + setUpDistinctIndex(tt.client, tt.UID) c := tt.client t.Cleanup(cleanup(c)) i := c.Index(tt.UID) @@ -1783,16 +1810,18 @@ func TestIndex_SearchWithDistinct(t *testing.T) { } func TestIndex_SearchSimilarDocuments(t *testing.T) { + sv := setup(t, "") + tests := []struct { UID string PrimaryKey string - client *Client + client ServiceManager request *SimilarDocumentQuery resp *SimilarDocumentResult }{ { UID: "indexUID", - client: defaultClient, + client: sv, request: &SimilarDocumentQuery{ Id: "123", }, @@ -1802,7 +1831,7 @@ func TestIndex_SearchSimilarDocuments(t *testing.T) { for _, tt := range tests { t.Run(tt.UID, func(t *testing.T) { - i, err := SetUpIndexWithVector(tt.UID) + i, err := setUpIndexWithVector(tt.client.(*meilisearch), tt.UID) require.NoError(t, err) c := tt.client t.Cleanup(cleanup(c)) @@ -1813,3 +1842,210 @@ func TestIndex_SearchSimilarDocuments(t *testing.T) { }) } } + +func TestIndex_FacetSearch(t *testing.T) { + sv := setup(t, "") + + type args struct { + UID string + PrimaryKey string + client ServiceManager + request *FacetSearchRequest + filterableAttributes []string + } + + tests := []struct { + name string + args args + want *FacetSearchResponse + wantErr bool + }{ + { + name: "TestIndexBasicFacetSearch", + args: args{ + UID: "indexUID", + client: sv, + request: &FacetSearchRequest{ + FacetName: "tag", + FacetQuery: "Novel", + }, + filterableAttributes: []string{"tag"}, + }, + want: &FacetSearchResponse{ + FacetHits: []interface{}{ + map[string]interface{}{ + "value": "Novel", "count": float64(5), + }, + }, + FacetQuery: "Novel", + }, + wantErr: false, + }, + { + name: "TestIndexFacetSearchWithFilter", + args: args{ + UID: "indexUID", + client: sv, + request: &FacetSearchRequest{ + FacetName: "tag", + FacetQuery: "Novel", + Filter: "tag = 'Novel'", + }, + filterableAttributes: []string{"tag"}, + }, + want: &FacetSearchResponse{ + FacetHits: []interface{}{ + map[string]interface{}{ + "value": "Novel", "count": float64(5), + }, + }, + FacetQuery: "Novel", + }, + wantErr: false, + }, + { + name: "TestIndexFacetSearchWithMatchingStrategy", + args: args{ + UID: "indexUID", + client: sv, + request: &FacetSearchRequest{ + FacetName: "tag", + FacetQuery: "Novel", + MatchingStrategy: "frequency", + }, + filterableAttributes: []string{"tag"}, + }, + want: &FacetSearchResponse{ + FacetHits: []interface{}{ + map[string]interface{}{ + "value": "Novel", "count": float64(5), + }, + }, + FacetQuery: "Novel", + }, + wantErr: false, + }, + { + name: "TestIndexFacetSearchWithAttributesToSearchOn", + args: args{ + UID: "indexUID", + client: sv, + request: &FacetSearchRequest{ + FacetName: "tag", + FacetQuery: "Novel", + AttributesToSearchOn: []string{"tag"}, + }, + filterableAttributes: []string{"tag"}, + }, + want: &FacetSearchResponse{ + FacetHits: []interface{}{ + map[string]interface{}{ + "value": "Novel", "count": float64(5), + }, + }, + FacetQuery: "Novel", + }, + wantErr: false, + }, + { + name: "TestIndexFacetSearchWithNoFacetSearchRequest", + args: args{ + UID: "indexUID", + client: sv, + request: nil, + }, + want: nil, + wantErr: true, + }, + { + name: "TestIndexFacetSearchWithNoFacetName", + args: args{ + UID: "indexUID", + client: sv, + request: &FacetSearchRequest{ + FacetQuery: "Novel", + }, + }, + want: nil, + wantErr: true, + }, + { + name: "TestIndexFacetSearchWithNoFacetQuery", + args: args{ + UID: "indexUID", + client: sv, + request: &FacetSearchRequest{ + FacetName: "tag", + }, + }, + want: nil, + wantErr: true, + }, + { + name: "TestIndexFacetSearchWithNoFilterableAttributes", + args: args{ + UID: "indexUID", + client: sv, + request: &FacetSearchRequest{ + FacetName: "tag", + FacetQuery: "Novel", + }, + }, + want: nil, + wantErr: true, + }, + { + name: "TestIndexFacetSearchWithQ", + args: args{ + UID: "indexUID", + client: sv, + request: &FacetSearchRequest{ + Q: "query", + FacetName: "tag", + }, + filterableAttributes: []string{"tag"}, + }, + want: &FacetSearchResponse{ + FacetHits: []interface{}{}, + FacetQuery: "", + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + setUpIndexForFaceting(tt.args.client) + c := tt.args.client + i := c.Index(tt.args.UID) + t.Cleanup(cleanup(c)) + + if len(tt.args.filterableAttributes) > 0 { + updateFilter, err := i.UpdateFilterableAttributes(&tt.args.filterableAttributes) + require.NoError(t, err) + testWaitForTask(t, i, updateFilter) + } + + gotRaw, err := i.FacetSearch(tt.args.request) + + if tt.wantErr { + require.Error(t, err) + require.Nil(t, gotRaw) + return + } + + require.NoError(t, err) + // Unmarshall the raw response from FacetSearch into a FacetSearchResponse + var got FacetSearchResponse + err = json.Unmarshal(*gotRaw, &got) + require.NoError(t, err, "error unmarshalling raw got FacetSearchResponse") + + require.Equal(t, len(tt.want.FacetHits), len(got.FacetHits)) + for len := range got.FacetHits { + require.Equal(t, tt.want.FacetHits[len].(map[string]interface{})["value"], got.FacetHits[len].(map[string]interface{})["value"]) + require.Equal(t, tt.want.FacetHits[len].(map[string]interface{})["count"], got.FacetHits[len].(map[string]interface{})["count"]) + } + require.Equal(t, tt.want.FacetQuery, got.FacetQuery) + }) + } +} diff --git a/index_settings.go b/index_settings.go index 552786f5..b4d4d3e2 100644 --- a/index_settings.go +++ b/index_settings.go @@ -1,29 +1,38 @@ package meilisearch import ( + "context" "net/http" ) -func (i Index) GetSettings() (resp *Settings, err error) { - resp = &Settings{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings", +func (i *index) GetSettings() (*Settings, error) { + return i.GetSettingsWithContext(context.Background()) +} + +func (i *index) GetSettingsWithContext(ctx context.Context) (*Settings, error) { + resp := new(Settings) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings", method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetSettings", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) UpdateSettings(request *Settings) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings", +func (i *index) UpdateSettings(request *Settings) (*TaskInfo, error) { + return i.UpdateSettingsWithContext(context.Background(), request) +} + +func (i *index) UpdateSettingsWithContext(ctx context.Context, request *Settings) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings", method: http.MethodPatch, contentType: contentTypeJSON, withRequest: &request, @@ -31,48 +40,60 @@ func (i Index) UpdateSettings(request *Settings) (resp *TaskInfo, err error) { acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateSettings", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetSettings() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings", +func (i *index) ResetSettings() (*TaskInfo, error) { + return i.ResetSettingsWithContext(context.Background()) +} + +func (i *index) ResetSettingsWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings", method: http.MethodDelete, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetSettings", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetRankingRules() (resp *[]string, err error) { - resp = &[]string{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/ranking-rules", +func (i *index) GetRankingRules() (*[]string, error) { + return i.GetRankingRulesWithContext(context.Background()) +} + +func (i *index) GetRankingRulesWithContext(ctx context.Context) (*[]string, error) { + resp := &[]string{} + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/ranking-rules", method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetRankingRules", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) UpdateRankingRules(request *[]string) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/ranking-rules", +func (i *index) UpdateRankingRules(request *[]string) (*TaskInfo, error) { + return i.UpdateRankingRulesWithContext(context.Background(), request) +} + +func (i *index) UpdateRankingRulesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/ranking-rules", method: http.MethodPut, contentType: contentTypeJSON, withRequest: &request, @@ -80,49 +101,60 @@ func (i Index) UpdateRankingRules(request *[]string) (resp *TaskInfo, err error) acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateRankingRules", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetRankingRules() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/ranking-rules", +func (i *index) ResetRankingRules() (*TaskInfo, error) { + return i.ResetRankingRulesWithContext(context.Background()) +} + +func (i *index) ResetRankingRulesWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/ranking-rules", method: http.MethodDelete, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetRankingRules", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetDistinctAttribute() (resp *string, err error) { - empty := "" - resp = &empty - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/distinct-attribute", +func (i *index) GetDistinctAttribute() (*string, error) { + return i.GetDistinctAttributeWithContext(context.Background()) +} + +func (i *index) GetDistinctAttributeWithContext(ctx context.Context) (*string, error) { + resp := new(string) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/distinct-attribute", method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetDistinctAttribute", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) UpdateDistinctAttribute(request string) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/distinct-attribute", +func (i *index) UpdateDistinctAttribute(request string) (*TaskInfo, error) { + return i.UpdateDistinctAttributeWithContext(context.Background(), request) +} + +func (i *index) UpdateDistinctAttributeWithContext(ctx context.Context, request string) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/distinct-attribute", method: http.MethodPut, contentType: contentTypeJSON, withRequest: &request, @@ -130,48 +162,60 @@ func (i Index) UpdateDistinctAttribute(request string) (resp *TaskInfo, err erro acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateDistinctAttribute", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetDistinctAttribute() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/distinct-attribute", +func (i *index) ResetDistinctAttribute() (*TaskInfo, error) { + return i.ResetDistinctAttributeWithContext(context.Background()) +} + +func (i *index) ResetDistinctAttributeWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/distinct-attribute", method: http.MethodDelete, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetDistinctAttribute", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetSearchableAttributes() (resp *[]string, err error) { - resp = &[]string{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/searchable-attributes", +func (i *index) GetSearchableAttributes() (*[]string, error) { + return i.GetSearchableAttributesWithContext(context.Background()) +} + +func (i *index) GetSearchableAttributesWithContext(ctx context.Context) (*[]string, error) { + resp := &[]string{} + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/searchable-attributes", method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetSearchableAttributes", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) UpdateSearchableAttributes(request *[]string) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/searchable-attributes", +func (i *index) UpdateSearchableAttributes(request *[]string) (*TaskInfo, error) { + return i.UpdateSearchableAttributesWithContext(context.Background(), request) +} + +func (i *index) UpdateSearchableAttributesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/searchable-attributes", method: http.MethodPut, contentType: contentTypeJSON, withRequest: &request, @@ -179,48 +223,60 @@ func (i Index) UpdateSearchableAttributes(request *[]string) (resp *TaskInfo, er acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateSearchableAttributes", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetSearchableAttributes() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/searchable-attributes", +func (i *index) ResetSearchableAttributes() (*TaskInfo, error) { + return i.ResetSearchableAttributesWithContext(context.Background()) +} + +func (i *index) ResetSearchableAttributesWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/searchable-attributes", method: http.MethodDelete, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetSearchableAttributes", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetDisplayedAttributes() (resp *[]string, err error) { - resp = &[]string{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/displayed-attributes", +func (i *index) GetDisplayedAttributes() (*[]string, error) { + return i.GetDisplayedAttributesWithContext(context.Background()) +} + +func (i *index) GetDisplayedAttributesWithContext(ctx context.Context) (*[]string, error) { + resp := &[]string{} + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/displayed-attributes", method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetDisplayedAttributes", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) UpdateDisplayedAttributes(request *[]string) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/displayed-attributes", +func (i *index) UpdateDisplayedAttributes(request *[]string) (*TaskInfo, error) { + return i.UpdateDisplayedAttributesWithContext(context.Background(), request) +} + +func (i *index) UpdateDisplayedAttributesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/displayed-attributes", method: http.MethodPut, contentType: contentTypeJSON, withRequest: &request, @@ -228,48 +284,60 @@ func (i Index) UpdateDisplayedAttributes(request *[]string) (resp *TaskInfo, err acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateDisplayedAttributes", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetDisplayedAttributes() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/displayed-attributes", +func (i *index) ResetDisplayedAttributes() (*TaskInfo, error) { + return i.ResetDisplayedAttributesWithContext(context.Background()) +} + +func (i *index) ResetDisplayedAttributesWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/displayed-attributes", method: http.MethodDelete, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetDisplayedAttributes", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetStopWords() (resp *[]string, err error) { - resp = &[]string{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/stop-words", +func (i *index) GetStopWords() (*[]string, error) { + return i.GetStopWordsWithContext(context.Background()) +} + +func (i *index) GetStopWordsWithContext(ctx context.Context) (*[]string, error) { + resp := &[]string{} + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/stop-words", method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetStopWords", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) UpdateStopWords(request *[]string) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/stop-words", +func (i *index) UpdateStopWords(request *[]string) (*TaskInfo, error) { + return i.UpdateStopWordsWithContext(context.Background(), request) +} + +func (i *index) UpdateStopWordsWithContext(ctx context.Context, request *[]string) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/stop-words", method: http.MethodPut, contentType: contentTypeJSON, withRequest: &request, @@ -277,48 +345,60 @@ func (i Index) UpdateStopWords(request *[]string) (resp *TaskInfo, err error) { acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateStopWords", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetStopWords() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/stop-words", +func (i *index) ResetStopWords() (*TaskInfo, error) { + return i.ResetStopWordsWithContext(context.Background()) +} + +func (i *index) ResetStopWordsWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/stop-words", method: http.MethodDelete, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetStopWords", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetSynonyms() (resp *map[string][]string, err error) { - resp = &map[string][]string{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/synonyms", +func (i *index) GetSynonyms() (*map[string][]string, error) { + return i.GetSynonymsWithContext(context.Background()) +} + +func (i *index) GetSynonymsWithContext(ctx context.Context) (*map[string][]string, error) { + resp := &map[string][]string{} + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/synonyms", method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetSynonyms", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) UpdateSynonyms(request *map[string][]string) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/synonyms", +func (i *index) UpdateSynonyms(request *map[string][]string) (*TaskInfo, error) { + return i.UpdateSynonymsWithContext(context.Background(), request) +} + +func (i *index) UpdateSynonymsWithContext(ctx context.Context, request *map[string][]string) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/synonyms", method: http.MethodPut, contentType: contentTypeJSON, withRequest: &request, @@ -326,48 +406,60 @@ func (i Index) UpdateSynonyms(request *map[string][]string) (resp *TaskInfo, err acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateSynonyms", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetSynonyms() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/synonyms", +func (i *index) ResetSynonyms() (*TaskInfo, error) { + return i.ResetSynonymsWithContext(context.Background()) +} + +func (i *index) ResetSynonymsWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/synonyms", method: http.MethodDelete, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetSynonyms", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetFilterableAttributes() (resp *[]string, err error) { - resp = &[]string{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/filterable-attributes", +func (i *index) GetFilterableAttributes() (*[]string, error) { + return i.GetFilterableAttributesWithContext(context.Background()) +} + +func (i *index) GetFilterableAttributesWithContext(ctx context.Context) (*[]string, error) { + resp := &[]string{} + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/filterable-attributes", method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetFilterableAttributes", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) UpdateFilterableAttributes(request *[]string) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/filterable-attributes", +func (i *index) UpdateFilterableAttributes(request *[]string) (*TaskInfo, error) { + return i.UpdateFilterableAttributesWithContext(context.Background(), request) +} + +func (i *index) UpdateFilterableAttributesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/filterable-attributes", method: http.MethodPut, contentType: contentTypeJSON, withRequest: &request, @@ -375,48 +467,60 @@ func (i Index) UpdateFilterableAttributes(request *[]string) (resp *TaskInfo, er acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateFilterableAttributes", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetFilterableAttributes() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/filterable-attributes", +func (i *index) ResetFilterableAttributes() (*TaskInfo, error) { + return i.ResetFilterableAttributesWithContext(context.Background()) +} + +func (i *index) ResetFilterableAttributesWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/filterable-attributes", method: http.MethodDelete, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetFilterableAttributes", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetSortableAttributes() (resp *[]string, err error) { - resp = &[]string{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/sortable-attributes", +func (i *index) GetSortableAttributes() (*[]string, error) { + return i.GetSortableAttributesWithContext(context.Background()) +} + +func (i *index) GetSortableAttributesWithContext(ctx context.Context) (*[]string, error) { + resp := &[]string{} + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/sortable-attributes", method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetSortableAttributes", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) UpdateSortableAttributes(request *[]string) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/sortable-attributes", +func (i *index) UpdateSortableAttributes(request *[]string) (*TaskInfo, error) { + return i.UpdateSortableAttributesWithContext(context.Background(), request) +} + +func (i *index) UpdateSortableAttributesWithContext(ctx context.Context, request *[]string) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/sortable-attributes", method: http.MethodPut, contentType: contentTypeJSON, withRequest: &request, @@ -424,48 +528,60 @@ func (i Index) UpdateSortableAttributes(request *[]string) (resp *TaskInfo, err acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateSortableAttributes", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetSortableAttributes() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/sortable-attributes", +func (i *index) ResetSortableAttributes() (*TaskInfo, error) { + return i.ResetSortableAttributesWithContext(context.Background()) +} + +func (i *index) ResetSortableAttributesWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/sortable-attributes", method: http.MethodDelete, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetSortableAttributes", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetTypoTolerance() (resp *TypoTolerance, err error) { - resp = &TypoTolerance{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/typo-tolerance", +func (i *index) GetTypoTolerance() (*TypoTolerance, error) { + return i.GetTypoToleranceWithContext(context.Background()) +} + +func (i *index) GetTypoToleranceWithContext(ctx context.Context) (*TypoTolerance, error) { + resp := new(TypoTolerance) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/typo-tolerance", method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetTypoTolerance", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) UpdateTypoTolerance(request *TypoTolerance) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/typo-tolerance", +func (i *index) UpdateTypoTolerance(request *TypoTolerance) (*TaskInfo, error) { + return i.UpdateTypoToleranceWithContext(context.Background(), request) +} + +func (i *index) UpdateTypoToleranceWithContext(ctx context.Context, request *TypoTolerance) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/typo-tolerance", method: http.MethodPatch, contentType: contentTypeJSON, withRequest: &request, @@ -473,48 +589,60 @@ func (i Index) UpdateTypoTolerance(request *TypoTolerance) (resp *TaskInfo, err acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateTypoTolerance", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetTypoTolerance() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/typo-tolerance", +func (i *index) ResetTypoTolerance() (*TaskInfo, error) { + return i.ResetTypoToleranceWithContext(context.Background()) +} + +func (i *index) ResetTypoToleranceWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/typo-tolerance", method: http.MethodDelete, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetTypoTolerance", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetPagination() (resp *Pagination, err error) { - resp = &Pagination{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/pagination", +func (i *index) GetPagination() (*Pagination, error) { + return i.GetPaginationWithContext(context.Background()) +} + +func (i *index) GetPaginationWithContext(ctx context.Context) (*Pagination, error) { + resp := new(Pagination) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/pagination", method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetPagination", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) UpdatePagination(request *Pagination) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/pagination", +func (i *index) UpdatePagination(request *Pagination) (*TaskInfo, error) { + return i.UpdatePaginationWithContext(context.Background(), request) +} + +func (i *index) UpdatePaginationWithContext(ctx context.Context, request *Pagination) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/pagination", method: http.MethodPatch, contentType: contentTypeJSON, withRequest: &request, @@ -522,48 +650,60 @@ func (i Index) UpdatePagination(request *Pagination) (resp *TaskInfo, err error) acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdatePagination", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetPagination() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/pagination", +func (i *index) ResetPagination() (*TaskInfo, error) { + return i.ResetPaginationWithContext(context.Background()) +} + +func (i *index) ResetPaginationWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/pagination", method: http.MethodDelete, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetPagination", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetFaceting() (resp *Faceting, err error) { - resp = &Faceting{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/faceting", +func (i *index) GetFaceting() (*Faceting, error) { + return i.GetFacetingWithContext(context.Background()) +} + +func (i *index) GetFacetingWithContext(ctx context.Context) (*Faceting, error) { + resp := new(Faceting) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/faceting", method: http.MethodGet, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetFaceting", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) UpdateFaceting(request *Faceting) (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/faceting", +func (i *index) UpdateFaceting(request *Faceting) (*TaskInfo, error) { + return i.UpdateFacetingWithContext(context.Background(), request) +} + +func (i *index) UpdateFacetingWithContext(ctx context.Context, request *Faceting) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/faceting", method: http.MethodPatch, contentType: contentTypeJSON, withRequest: &request, @@ -571,115 +711,149 @@ func (i Index) UpdateFaceting(request *Faceting) (resp *TaskInfo, err error) { acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateFaceting", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetFaceting() (resp *TaskInfo, err error) { - resp = &TaskInfo{} - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/faceting", +func (i *index) ResetFaceting() (*TaskInfo, error) { + return i.ResetFacetingWithContext(context.Background()) +} + +func (i *index) ResetFacetingWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/faceting", method: http.MethodDelete, withRequest: nil, withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetFaceting", } - if err := i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetEmbedders() (resp map[string]Embedder, err error) { - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/embedders", +func (i *index) GetEmbedders() (map[string]Embedder, error) { + return i.GetEmbeddersWithContext(context.Background()) +} + +func (i *index) GetEmbeddersWithContext(ctx context.Context) (map[string]Embedder, error) { + resp := make(map[string]Embedder) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/embedders", method: http.MethodGet, withRequest: nil, withResponse: &resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetEmbedders", } - if err = i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) UpdateEmbedders(request map[string]Embedder) (resp *TaskInfo, err error) { - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/embedders", +func (i *index) UpdateEmbedders(request map[string]Embedder) (*TaskInfo, error) { + return i.UpdateEmbeddersWithContext(context.Background(), request) +} + +func (i *index) UpdateEmbeddersWithContext(ctx context.Context, request map[string]Embedder) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/embedders", method: http.MethodPatch, contentType: contentTypeJSON, withRequest: &request, - withResponse: &resp, + withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateEmbedders", } - if err = i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetEmbedders() (resp *TaskInfo, err error) { - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/embedders", +func (i *index) ResetEmbedders() (*TaskInfo, error) { + return i.ResetEmbeddersWithContext(context.Background()) +} + +func (i *index) ResetEmbeddersWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/embedders", method: http.MethodDelete, withRequest: nil, - withResponse: &resp, + withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetEmbedders", } - if err = i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) GetSearchCutoffMs() (resp int64, err error) { - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/search-cutoff-ms", +func (i *index) GetSearchCutoffMs() (int64, error) { + return i.GetSearchCutoffMsWithContext(context.Background()) +} + +func (i *index) GetSearchCutoffMsWithContext(ctx context.Context) (int64, error) { + var resp int64 + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/search-cutoff-ms", method: http.MethodGet, withRequest: nil, withResponse: &resp, acceptedStatusCodes: []int{http.StatusOK}, functionName: "GetSearchCutoffMs", } - if err = i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return 0, err } return resp, nil } -func (i Index) UpdateSearchCutoffMs(request int64) (resp *TaskInfo, err error) { - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/search-cutoff-ms", +func (i *index) UpdateSearchCutoffMs(request int64) (*TaskInfo, error) { + return i.UpdateSearchCutoffMsWithContext(context.Background(), request) +} + +func (i *index) UpdateSearchCutoffMsWithContext(ctx context.Context, request int64) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/search-cutoff-ms", method: http.MethodPut, contentType: contentTypeJSON, withRequest: &request, - withResponse: &resp, + withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "UpdateSearchCutoffMs", } - if err = i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil } -func (i Index) ResetSearchCutoffMs() (resp *TaskInfo, err error) { - req := internalRequest{ - endpoint: "/indexes/" + i.UID + "/settings/search-cutoff-ms", +func (i *index) ResetSearchCutoffMs() (*TaskInfo, error) { + return i.ResetSearchCutoffMsWithContext(context.Background()) +} + +func (i *index) ResetSearchCutoffMsWithContext(ctx context.Context) (*TaskInfo, error) { + resp := new(TaskInfo) + req := &internalRequest{ + endpoint: "/indexes/" + i.uid + "/settings/search-cutoff-ms", method: http.MethodDelete, withRequest: nil, - withResponse: &resp, + withResponse: resp, acceptedStatusCodes: []int{http.StatusAccepted}, functionName: "ResetSearchCutoffMs", } - if err = i.client.executeRequest(req); err != nil { + if err := i.client.executeRequest(ctx, req); err != nil { return nil, err } return resp, nil diff --git a/index_settings_test.go b/index_settings_test.go index 47918570..43e71525 100644 --- a/index_settings_test.go +++ b/index_settings_test.go @@ -1,15 +1,20 @@ package meilisearch import ( - "testing" - + "crypto/tls" "github.com/stretchr/testify/require" + "testing" ) func TestIndex_GetFilterableAttributes(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -19,20 +24,20 @@ func TestIndex_GetFilterableAttributes(t *testing.T) { name: "TestIndexBasicGetFilterableAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, }, { name: "TestIndexGetFilterableAttributesWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -45,9 +50,14 @@ func TestIndex_GetFilterableAttributes(t *testing.T) { } func TestIndex_GetDisplayedAttributes(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -58,7 +68,7 @@ func TestIndex_GetDisplayedAttributes(t *testing.T) { name: "TestIndexBasicGetDisplayedAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantResp: &[]string{"*"}, }, @@ -66,14 +76,14 @@ func TestIndex_GetDisplayedAttributes(t *testing.T) { name: "TestIndexGetDisplayedAttributesWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantResp: &[]string{"*"}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -86,9 +96,11 @@ func TestIndex_GetDisplayedAttributes(t *testing.T) { } func TestIndex_GetDistinctAttribute(t *testing.T) { + meili := setup(t, "") + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -98,20 +110,20 @@ func TestIndex_GetDistinctAttribute(t *testing.T) { name: "TestIndexBasicGetDistinctAttribute", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, }, { name: "TestIndexBasicGetDistinctAttribute", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -124,9 +136,11 @@ func TestIndex_GetDistinctAttribute(t *testing.T) { } func TestIndex_GetRankingRules(t *testing.T) { + meili := setup(t, "") + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -137,7 +151,7 @@ func TestIndex_GetRankingRules(t *testing.T) { name: "TestIndexBasicGetRankingRules", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantResp: &defaultRankingRules, }, @@ -145,14 +159,14 @@ func TestIndex_GetRankingRules(t *testing.T) { name: "TestIndexGetRankingRulesWithCustomClient", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantResp: &defaultRankingRules, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -165,9 +179,11 @@ func TestIndex_GetRankingRules(t *testing.T) { } func TestIndex_GetSearchableAttributes(t *testing.T) { + meili := setup(t, "") + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -178,7 +194,7 @@ func TestIndex_GetSearchableAttributes(t *testing.T) { name: "TestIndexBasicGetSearchableAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantResp: &[]string{"*"}, }, @@ -186,14 +202,14 @@ func TestIndex_GetSearchableAttributes(t *testing.T) { name: "TestIndexGetSearchableAttributesCustomClient", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantResp: &[]string{"*"}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -206,9 +222,14 @@ func TestIndex_GetSearchableAttributes(t *testing.T) { } func TestIndex_GetSettings(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -219,7 +240,7 @@ func TestIndex_GetSettings(t *testing.T) { name: "TestIndexBasicGetSettings", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantResp: &Settings{ RankingRules: defaultRankingRules, @@ -240,7 +261,7 @@ func TestIndex_GetSettings(t *testing.T) { name: "TestIndexGetSettingsWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantResp: &Settings{ RankingRules: defaultRankingRules, @@ -260,7 +281,7 @@ func TestIndex_GetSettings(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -273,9 +294,14 @@ func TestIndex_GetSettings(t *testing.T) { } func TestIndex_GetStopWords(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -285,20 +311,20 @@ func TestIndex_GetStopWords(t *testing.T) { name: "TestIndexBasicGetStopWords", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, }, { name: "TestIndexGetStopWordsCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -311,9 +337,14 @@ func TestIndex_GetStopWords(t *testing.T) { } func TestIndex_GetSynonyms(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -323,20 +354,20 @@ func TestIndex_GetSynonyms(t *testing.T) { name: "TestIndexBasicGetSynonyms", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, }, { name: "TestIndexGetSynonymsWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -349,9 +380,14 @@ func TestIndex_GetSynonyms(t *testing.T) { } func TestIndex_GetSortableAttributes(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -361,20 +397,20 @@ func TestIndex_GetSortableAttributes(t *testing.T) { name: "TestIndexBasicGetSortableAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, }, { name: "TestIndexGetSortableAttributesWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -387,9 +423,14 @@ func TestIndex_GetSortableAttributes(t *testing.T) { } func TestIndex_GetTypoTolerance(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -400,7 +441,7 @@ func TestIndex_GetTypoTolerance(t *testing.T) { name: "TestIndexBasicGetTypoTolerance", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantResp: &defaultTypoTolerance, }, @@ -408,14 +449,14 @@ func TestIndex_GetTypoTolerance(t *testing.T) { name: "TestIndexGetTypoToleranceWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantResp: &defaultTypoTolerance, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -428,9 +469,14 @@ func TestIndex_GetTypoTolerance(t *testing.T) { } func TestIndex_GetPagination(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -441,7 +487,7 @@ func TestIndex_GetPagination(t *testing.T) { name: "TestIndexBasicGetPagination", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantResp: &defaultPagination, }, @@ -449,14 +495,14 @@ func TestIndex_GetPagination(t *testing.T) { name: "TestIndexGetPaginationWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantResp: &defaultPagination, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -469,9 +515,14 @@ func TestIndex_GetPagination(t *testing.T) { } func TestIndex_GetFaceting(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -482,7 +533,7 @@ func TestIndex_GetFaceting(t *testing.T) { name: "TestIndexBasicGetFaceting", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantResp: &defaultFaceting, }, @@ -490,14 +541,14 @@ func TestIndex_GetFaceting(t *testing.T) { name: "TestIndexGetFacetingWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantResp: &defaultFaceting, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -510,9 +561,14 @@ func TestIndex_GetFaceting(t *testing.T) { } func TestIndex_ResetFilterableAttributes(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -523,7 +579,7 @@ func TestIndex_ResetFilterableAttributes(t *testing.T) { name: "TestIndexBasicResetFilterableAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -533,7 +589,7 @@ func TestIndex_ResetFilterableAttributes(t *testing.T) { name: "TestIndexResetFilterableAttributesCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -542,7 +598,7 @@ func TestIndex_ResetFilterableAttributes(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -560,9 +616,14 @@ func TestIndex_ResetFilterableAttributes(t *testing.T) { } func TestIndex_ResetDisplayedAttributes(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -574,7 +635,7 @@ func TestIndex_ResetDisplayedAttributes(t *testing.T) { name: "TestIndexBasicResetDisplayedAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -585,7 +646,7 @@ func TestIndex_ResetDisplayedAttributes(t *testing.T) { name: "TestIndexResetDisplayedAttributesCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -595,7 +656,7 @@ func TestIndex_ResetDisplayedAttributes(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -613,9 +674,14 @@ func TestIndex_ResetDisplayedAttributes(t *testing.T) { } func TestIndex_ResetDistinctAttribute(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -626,7 +692,7 @@ func TestIndex_ResetDistinctAttribute(t *testing.T) { name: "TestIndexBasicResetDistinctAttribute", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -636,7 +702,7 @@ func TestIndex_ResetDistinctAttribute(t *testing.T) { name: "TestIndexResetDistinctAttributeWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -645,7 +711,7 @@ func TestIndex_ResetDistinctAttribute(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -663,9 +729,14 @@ func TestIndex_ResetDistinctAttribute(t *testing.T) { } func TestIndex_ResetRankingRules(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -677,7 +748,7 @@ func TestIndex_ResetRankingRules(t *testing.T) { name: "TestIndexBasicResetRankingRules", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -688,7 +759,7 @@ func TestIndex_ResetRankingRules(t *testing.T) { name: "TestIndexResetRankingRulesWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -698,7 +769,7 @@ func TestIndex_ResetRankingRules(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -716,9 +787,14 @@ func TestIndex_ResetRankingRules(t *testing.T) { } func TestIndex_ResetSearchableAttributes(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -730,7 +806,7 @@ func TestIndex_ResetSearchableAttributes(t *testing.T) { name: "TestIndexBasicResetSearchableAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -741,7 +817,7 @@ func TestIndex_ResetSearchableAttributes(t *testing.T) { name: "TestIndexResetSearchableAttributesWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -751,7 +827,7 @@ func TestIndex_ResetSearchableAttributes(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -769,9 +845,14 @@ func TestIndex_ResetSearchableAttributes(t *testing.T) { } func TestIndex_ResetSettings(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -783,7 +864,7 @@ func TestIndex_ResetSettings(t *testing.T) { name: "TestIndexBasicResetSettings", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -806,7 +887,7 @@ func TestIndex_ResetSettings(t *testing.T) { name: "TestIndexResetSettingsWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -828,7 +909,7 @@ func TestIndex_ResetSettings(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -846,9 +927,14 @@ func TestIndex_ResetSettings(t *testing.T) { } func TestIndex_ResetStopWords(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -859,7 +945,7 @@ func TestIndex_ResetStopWords(t *testing.T) { name: "TestIndexBasicResetStopWords", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -869,7 +955,7 @@ func TestIndex_ResetStopWords(t *testing.T) { name: "TestIndexResetStopWordsWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -878,7 +964,7 @@ func TestIndex_ResetStopWords(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -896,9 +982,14 @@ func TestIndex_ResetStopWords(t *testing.T) { } func TestIndex_ResetSynonyms(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -909,7 +1000,7 @@ func TestIndex_ResetSynonyms(t *testing.T) { name: "TestIndexBasicResetSynonyms", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -919,7 +1010,7 @@ func TestIndex_ResetSynonyms(t *testing.T) { name: "TestIndexResetSynonymsWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -928,7 +1019,7 @@ func TestIndex_ResetSynonyms(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -946,9 +1037,14 @@ func TestIndex_ResetSynonyms(t *testing.T) { } func TestIndex_ResetSortableAttributes(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -959,7 +1055,7 @@ func TestIndex_ResetSortableAttributes(t *testing.T) { name: "TestIndexBasicResetSortableAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -969,7 +1065,7 @@ func TestIndex_ResetSortableAttributes(t *testing.T) { name: "TestIndexResetSortableAttributesCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -978,7 +1074,7 @@ func TestIndex_ResetSortableAttributes(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -996,9 +1092,14 @@ func TestIndex_ResetSortableAttributes(t *testing.T) { } func TestIndex_ResetTypoTolerance(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -1010,7 +1111,7 @@ func TestIndex_ResetTypoTolerance(t *testing.T) { name: "TestIndexBasicResetTypoTolerance", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -1021,7 +1122,7 @@ func TestIndex_ResetTypoTolerance(t *testing.T) { name: "TestIndexResetTypoToleranceWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -1031,7 +1132,7 @@ func TestIndex_ResetTypoTolerance(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -1049,9 +1150,14 @@ func TestIndex_ResetTypoTolerance(t *testing.T) { } func TestIndex_ResetPagination(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -1063,7 +1169,7 @@ func TestIndex_ResetPagination(t *testing.T) { name: "TestIndexBasicResetPagination", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -1074,7 +1180,7 @@ func TestIndex_ResetPagination(t *testing.T) { name: "TestIndexResetPaginationWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -1084,7 +1190,7 @@ func TestIndex_ResetPagination(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -1102,9 +1208,14 @@ func TestIndex_ResetPagination(t *testing.T) { } func TestIndex_ResetFaceting(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -1116,7 +1227,7 @@ func TestIndex_ResetFaceting(t *testing.T) { name: "TestIndexBasicResetFaceting", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -1127,7 +1238,7 @@ func TestIndex_ResetFaceting(t *testing.T) { name: "TestIndexResetFacetingWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, }, wantTask: &TaskInfo{ TaskUID: 1, @@ -1137,7 +1248,7 @@ func TestIndex_ResetFaceting(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -1155,9 +1266,14 @@ func TestIndex_ResetFaceting(t *testing.T) { } func TestIndex_UpdateFilterableAttributes(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager request []string } tests := []struct { @@ -1169,7 +1285,7 @@ func TestIndex_UpdateFilterableAttributes(t *testing.T) { name: "TestIndexBasicUpdateFilterableAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: []string{ "title", }, @@ -1182,7 +1298,7 @@ func TestIndex_UpdateFilterableAttributes(t *testing.T) { name: "TestIndexUpdateFilterableAttributesWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, request: []string{ "title", }, @@ -1194,7 +1310,7 @@ func TestIndex_UpdateFilterableAttributes(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -1216,9 +1332,14 @@ func TestIndex_UpdateFilterableAttributes(t *testing.T) { } func TestIndex_UpdateDisplayedAttributes(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager request []string } tests := []struct { @@ -1231,7 +1352,7 @@ func TestIndex_UpdateDisplayedAttributes(t *testing.T) { name: "TestIndexBasicUpdateDisplayedAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: []string{ "book_id", "tag", "title", }, @@ -1245,7 +1366,7 @@ func TestIndex_UpdateDisplayedAttributes(t *testing.T) { name: "TestIndexUpdateDisplayedAttributesWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, request: []string{ "book_id", "tag", "title", }, @@ -1258,7 +1379,7 @@ func TestIndex_UpdateDisplayedAttributes(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -1280,9 +1401,14 @@ func TestIndex_UpdateDisplayedAttributes(t *testing.T) { } func TestIndex_UpdateDistinctAttribute(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager request string } tests := []struct { @@ -1294,7 +1420,7 @@ func TestIndex_UpdateDistinctAttribute(t *testing.T) { name: "TestIndexBasicUpdateDistinctAttribute", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: "movie_id", }, wantTask: &TaskInfo{ @@ -1305,7 +1431,7 @@ func TestIndex_UpdateDistinctAttribute(t *testing.T) { name: "TestIndexUpdateDistinctAttributeWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, request: "movie_id", }, wantTask: &TaskInfo{ @@ -1315,7 +1441,7 @@ func TestIndex_UpdateDistinctAttribute(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -1337,9 +1463,14 @@ func TestIndex_UpdateDistinctAttribute(t *testing.T) { } func TestIndex_UpdateRankingRules(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager request []string } tests := []struct { @@ -1352,7 +1483,7 @@ func TestIndex_UpdateRankingRules(t *testing.T) { name: "TestIndexBasicUpdateRankingRules", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: []string{ "typo", "words", }, @@ -1366,7 +1497,7 @@ func TestIndex_UpdateRankingRules(t *testing.T) { name: "TestIndexUpdateRankingRulesWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, request: []string{ "typo", "words", }, @@ -1380,7 +1511,7 @@ func TestIndex_UpdateRankingRules(t *testing.T) { name: "TestIndexUpdateRankingRulesAscending", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: []string{ "BookID:asc", }, @@ -1393,7 +1524,7 @@ func TestIndex_UpdateRankingRules(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -1415,9 +1546,14 @@ func TestIndex_UpdateRankingRules(t *testing.T) { } func TestIndex_UpdateSearchableAttributes(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager request []string } tests := []struct { @@ -1430,7 +1566,7 @@ func TestIndex_UpdateSearchableAttributes(t *testing.T) { name: "TestIndexBasicUpdateSearchableAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: []string{ "title", "tag", }, @@ -1444,7 +1580,7 @@ func TestIndex_UpdateSearchableAttributes(t *testing.T) { name: "TestIndexUpdateSearchableAttributesWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, request: []string{ "title", "tag", }, @@ -1457,7 +1593,7 @@ func TestIndex_UpdateSearchableAttributes(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -1479,9 +1615,14 @@ func TestIndex_UpdateSearchableAttributes(t *testing.T) { } func TestIndex_UpdateSettings(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager request Settings } tests := []struct { @@ -1494,7 +1635,7 @@ func TestIndex_UpdateSettings(t *testing.T) { name: "TestIndexBasicUpdateSettings", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: Settings{ RankingRules: []string{ "typo", "words", @@ -1558,7 +1699,7 @@ func TestIndex_UpdateSettings(t *testing.T) { name: "TestIndexUpdateSettingsWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, request: Settings{ RankingRules: []string{ "typo", "words", @@ -1621,7 +1762,7 @@ func TestIndex_UpdateSettings(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -1639,9 +1780,14 @@ func TestIndex_UpdateSettings(t *testing.T) { } func TestIndex_UpdateSettingsOneByOne(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager firstRequest Settings firstResponse Settings secondRequest Settings @@ -1657,7 +1803,7 @@ func TestIndex_UpdateSettingsOneByOne(t *testing.T) { name: "TestIndexUpdateJustSynonyms", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, firstRequest: Settings{ RankingRules: []string{ "typo", "words", @@ -1727,7 +1873,7 @@ func TestIndex_UpdateSettingsOneByOne(t *testing.T) { name: "TestIndexUpdateJustSynonymsWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, firstRequest: Settings{ RankingRules: []string{ "typo", "words", @@ -1797,7 +1943,7 @@ func TestIndex_UpdateSettingsOneByOne(t *testing.T) { name: "TestIndexUpdateJustSearchableAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, firstRequest: Settings{ RankingRules: []string{ "typo", "words", @@ -1867,7 +2013,7 @@ func TestIndex_UpdateSettingsOneByOne(t *testing.T) { name: "TestIndexUpdateJustDisplayedAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, firstRequest: Settings{ RankingRules: []string{ "typo", "words", @@ -1937,7 +2083,7 @@ func TestIndex_UpdateSettingsOneByOne(t *testing.T) { name: "TestIndexUpdateJustStopWords", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, firstRequest: Settings{ RankingRules: []string{ "typo", "words", @@ -2007,7 +2153,7 @@ func TestIndex_UpdateSettingsOneByOne(t *testing.T) { name: "TestIndexUpdateJustFilterableAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, firstRequest: Settings{ RankingRules: []string{ "typo", "words", @@ -2077,7 +2223,7 @@ func TestIndex_UpdateSettingsOneByOne(t *testing.T) { name: "TestIndexUpdateJustSortableAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, firstRequest: Settings{ RankingRules: []string{ "typo", "words", @@ -2147,7 +2293,7 @@ func TestIndex_UpdateSettingsOneByOne(t *testing.T) { name: "TestIndexUpdateJustTypoTolerance", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, firstRequest: Settings{ RankingRules: []string{ "typo", "words", @@ -2249,7 +2395,7 @@ func TestIndex_UpdateSettingsOneByOne(t *testing.T) { name: "TestIndexUpdateJustPagination", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, firstRequest: Settings{ RankingRules: []string{ "typo", "words", @@ -2319,7 +2465,7 @@ func TestIndex_UpdateSettingsOneByOne(t *testing.T) { name: "TestIndexUpdateJustFaceting", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, firstRequest: Settings{ RankingRules: []string{ "typo", "words", @@ -2388,7 +2534,7 @@ func TestIndex_UpdateSettingsOneByOne(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -2420,9 +2566,14 @@ func TestIndex_UpdateSettingsOneByOne(t *testing.T) { } func TestIndex_UpdateStopWords(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager request []string } tests := []struct { @@ -2434,7 +2585,7 @@ func TestIndex_UpdateStopWords(t *testing.T) { name: "TestIndexBasicUpdateStopWords", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: []string{ "of", "the", "to", }, @@ -2447,7 +2598,7 @@ func TestIndex_UpdateStopWords(t *testing.T) { name: "TestIndexUpdateStopWordsWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, request: []string{ "of", "the", "to", }, @@ -2459,7 +2610,7 @@ func TestIndex_UpdateStopWords(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -2481,9 +2632,14 @@ func TestIndex_UpdateStopWords(t *testing.T) { } func TestIndex_UpdateSynonyms(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager request map[string][]string } tests := []struct { @@ -2495,7 +2651,7 @@ func TestIndex_UpdateSynonyms(t *testing.T) { name: "TestIndexBasicUpdateSynonyms", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: map[string][]string{ "wolverine": {"logan", "xmen"}, }, @@ -2508,7 +2664,7 @@ func TestIndex_UpdateSynonyms(t *testing.T) { name: "TestIndexUpdateSynonymsWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, request: map[string][]string{ "wolverine": {"logan", "xmen"}, }, @@ -2520,7 +2676,7 @@ func TestIndex_UpdateSynonyms(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -2542,9 +2698,14 @@ func TestIndex_UpdateSynonyms(t *testing.T) { } func TestIndex_UpdateSortableAttributes(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager request []string } tests := []struct { @@ -2556,7 +2717,7 @@ func TestIndex_UpdateSortableAttributes(t *testing.T) { name: "TestIndexBasicUpdateSortableAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: []string{ "title", }, @@ -2569,7 +2730,7 @@ func TestIndex_UpdateSortableAttributes(t *testing.T) { name: "TestIndexUpdateSortableAttributesWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, request: []string{ "title", }, @@ -2581,7 +2742,7 @@ func TestIndex_UpdateSortableAttributes(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -2603,9 +2764,14 @@ func TestIndex_UpdateSortableAttributes(t *testing.T) { } func TestIndex_UpdateTypoTolerance(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager request TypoTolerance } tests := []struct { @@ -2618,7 +2784,7 @@ func TestIndex_UpdateTypoTolerance(t *testing.T) { name: "TestIndexBasicUpdateTypoTolerance", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: TypoTolerance{ Enabled: true, MinWordSizeForTypos: MinWordSizeForTypos{ @@ -2638,7 +2804,7 @@ func TestIndex_UpdateTypoTolerance(t *testing.T) { name: "TestIndexUpdateTypoToleranceWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, request: TypoTolerance{ Enabled: true, MinWordSizeForTypos: MinWordSizeForTypos{ @@ -2658,7 +2824,7 @@ func TestIndex_UpdateTypoTolerance(t *testing.T) { name: "TestIndexUpdateTypoToleranceWithDisableOnWords", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: TypoTolerance{ Enabled: true, MinWordSizeForTypos: MinWordSizeForTypos{ @@ -2680,7 +2846,7 @@ func TestIndex_UpdateTypoTolerance(t *testing.T) { name: "TestIndexUpdateTypoToleranceWithDisableOnAttributes", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: TypoTolerance{ Enabled: true, MinWordSizeForTypos: MinWordSizeForTypos{ @@ -2702,7 +2868,7 @@ func TestIndex_UpdateTypoTolerance(t *testing.T) { name: "TestIndexDisableTypoTolerance", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: TypoTolerance{ Enabled: false, MinWordSizeForTypos: MinWordSizeForTypos{ @@ -2723,7 +2889,7 @@ func TestIndex_UpdateTypoTolerance(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -2741,9 +2907,14 @@ func TestIndex_UpdateTypoTolerance(t *testing.T) { } func TestIndex_UpdatePagination(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager request Pagination } tests := []struct { @@ -2756,7 +2927,7 @@ func TestIndex_UpdatePagination(t *testing.T) { name: "TestIndexBasicUpdatePagination", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: Pagination{ MaxTotalHits: 1200, }, @@ -2770,7 +2941,7 @@ func TestIndex_UpdatePagination(t *testing.T) { name: "TestIndexUpdatePaginationWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, request: Pagination{ MaxTotalHits: 1200, }, @@ -2783,7 +2954,7 @@ func TestIndex_UpdatePagination(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -2805,9 +2976,14 @@ func TestIndex_UpdatePagination(t *testing.T) { } func TestIndex_UpdateFaceting(t *testing.T) { + meili := setup(t, "") + customMeili := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager request Faceting } tests := []struct { @@ -2820,7 +2996,7 @@ func TestIndex_UpdateFaceting(t *testing.T) { name: "TestIndexBasicUpdateFaceting", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: Faceting{ MaxValuesPerFacet: 200, }, @@ -2834,7 +3010,7 @@ func TestIndex_UpdateFaceting(t *testing.T) { name: "TestIndexUpdateFacetingWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customMeili, request: Faceting{ MaxValuesPerFacet: 200, }, @@ -2847,7 +3023,7 @@ func TestIndex_UpdateFaceting(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -2869,9 +3045,11 @@ func TestIndex_UpdateFaceting(t *testing.T) { } func TestIndex_UpdateSettingsEmbedders(t *testing.T) { + meili := setup(t, "") + type args struct { UID string - client *Client + client ServiceManager request Settings newIndex bool } @@ -2886,7 +3064,7 @@ func TestIndex_UpdateSettingsEmbedders(t *testing.T) { name: "TestIndexUpdateSettingsEmbeddersErr", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: Settings{ Embedders: map[string]Embedder{ "default": { @@ -2905,7 +3083,7 @@ func TestIndex_UpdateSettingsEmbedders(t *testing.T) { name: "TestIndexUpdateSettingsEmbeddersErr", args: args{ UID: "indexUID", - client: defaultClient, + client: meili, request: Settings{ Embedders: map[string]Embedder{ "default": { @@ -2927,7 +3105,7 @@ func TestIndex_UpdateSettingsEmbedders(t *testing.T) { args: args{ newIndex: true, UID: "newIndexUID", - client: defaultClient, + client: meili, request: Settings{ Embedders: map[string]Embedder{ "default": { @@ -2946,11 +3124,11 @@ func TestIndex_UpdateSettingsEmbedders(t *testing.T) { t.Run(tt.name, func(t *testing.T) { c := tt.args.client if !tt.args.newIndex { - SetUpIndexForFaceting() + setUpIndexForFaceting(tt.args.client) } else { task, err := c.CreateIndex(&IndexConfig{Uid: tt.args.UID}) require.NoError(t, err) - _, err = c.Index(tt.args.UID).WaitForTask(task.TaskUID) + _, err = c.Index(tt.args.UID).WaitForTask(task.TaskUID, 0) require.NoError(t, err) } i := c.Index(tt.args.UID) @@ -2964,7 +3142,7 @@ func TestIndex_UpdateSettingsEmbedders(t *testing.T) { require.NoError(t, err) require.GreaterOrEqual(t, gotTask.TaskUID, tt.wantTask.TaskUID) - r, err := i.WaitForTask(gotTask.TaskUID) + r, err := i.WaitForTask(gotTask.TaskUID, 0) require.NoError(t, err) if tt.wantErr != "" { require.Contains(t, r.Error.Message, tt.wantErr) @@ -2980,7 +3158,7 @@ func TestIndex_UpdateSettingsEmbedders(t *testing.T) { } func TestIndex_GetEmbedders(t *testing.T) { - c := defaultClient + c := setup(t, "") t.Cleanup(cleanup(c)) indexID := "newIndexUID" @@ -3007,7 +3185,7 @@ func TestIndex_GetEmbedders(t *testing.T) { } func TestIndex_UpdateEmbedders(t *testing.T) { - c := defaultClient + c := setup(t, "") t.Cleanup(cleanup(c)) indexID := "newIndexUID" @@ -3037,7 +3215,7 @@ func TestIndex_UpdateEmbedders(t *testing.T) { taskInfo, err = i.UpdateEmbedders(updated) require.NoError(t, err) - task, err := i.WaitForTask(taskInfo.TaskUID) + task, err := i.WaitForTask(taskInfo.TaskUID, 0) require.NoError(t, err) require.Equal(t, TaskStatusSucceeded, task.Status) @@ -3047,7 +3225,7 @@ func TestIndex_UpdateEmbedders(t *testing.T) { } func TestIndex_ResetEmbedders(t *testing.T) { - c := defaultClient + c := setup(t, "") t.Cleanup(cleanup(c)) indexID := "newIndexUID" @@ -3069,7 +3247,7 @@ func TestIndex_ResetEmbedders(t *testing.T) { taskInfo, err = i.ResetEmbedders() require.NoError(t, err) - task, err := i.WaitForTask(taskInfo.TaskUID) + task, err := i.WaitForTask(taskInfo.TaskUID, 0) require.NoError(t, err) require.Equal(t, TaskStatusSucceeded, task.Status) @@ -3079,7 +3257,7 @@ func TestIndex_ResetEmbedders(t *testing.T) { } func Test_SearchCutoffMs(t *testing.T) { - c := defaultClient + c := setup(t, "") t.Cleanup(cleanup(c)) indexID := "newIndexUID" diff --git a/index_test.go b/index_test.go index 4c807b28..175d3fd2 100644 --- a/index_test.go +++ b/index_test.go @@ -2,25 +2,30 @@ package meilisearch import ( "context" + "crypto/tls" + "github.com/stretchr/testify/require" "testing" "time" - - "github.com/stretchr/testify/require" ) func TestIndex_Delete(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { createUid []string deleteUid []string } tests := []struct { name string - client *Client + client ServiceManager args args }{ { name: "TestIndexDeleteOneIndex", - client: defaultClient, + client: sv, args: args{ createUid: []string{"TestIndexDeleteOneIndex"}, deleteUid: []string{"TestIndexDeleteOneIndex"}, @@ -28,7 +33,7 @@ func TestIndex_Delete(t *testing.T) { }, { name: "TestIndexDeleteOneIndexWithCustomClient", - client: customClient, + client: customSv, args: args{ createUid: []string{"TestIndexDeleteOneIndexWithCustomClient"}, deleteUid: []string{"TestIndexDeleteOneIndexWithCustomClient"}, @@ -36,7 +41,7 @@ func TestIndex_Delete(t *testing.T) { }, { name: "TestIndexDeleteMultipleIndex", - client: defaultClient, + client: sv, args: args{ createUid: []string{ "TestIndexDeleteMultipleIndex_1", @@ -56,7 +61,7 @@ func TestIndex_Delete(t *testing.T) { }, { name: "TestIndexDeleteNotExistingIndex", - client: defaultClient, + client: sv, args: args{ createUid: []string{}, deleteUid: []string{"TestIndexDeleteNotExistingIndex"}, @@ -64,7 +69,7 @@ func TestIndex_Delete(t *testing.T) { }, { name: "TestIndexDeleteMultipleNotExistingIndex", - client: defaultClient, + client: sv, args: args{ createUid: []string{}, deleteUid: []string{ @@ -81,7 +86,7 @@ func TestIndex_Delete(t *testing.T) { t.Cleanup(cleanup(c)) for _, uid := range tt.args.createUid { - _, err := SetUpEmptyIndex(&IndexConfig{Uid: uid}) + _, err := setUpEmptyIndex(sv, &IndexConfig{Uid: uid}) require.NoError(t, err, "CreateIndex() in DeleteTest error should be nil") } for k := range tt.args.deleteUid { @@ -95,9 +100,14 @@ func TestIndex_Delete(t *testing.T) { } func TestIndex_GetStats(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -108,7 +118,7 @@ func TestIndex_GetStats(t *testing.T) { name: "TestIndexBasicGetStats", args: args{ UID: "TestIndexBasicGetStats", - client: defaultClient, + client: sv, }, wantResp: &StatsIndex{ NumberOfDocuments: 6, @@ -120,7 +130,7 @@ func TestIndex_GetStats(t *testing.T) { name: "TestIndexGetStatsWithCustomClient", args: args{ UID: "TestIndexGetStatsWithCustomClient", - client: customClient, + client: customSv, }, wantResp: &StatsIndex{ NumberOfDocuments: 6, @@ -131,7 +141,7 @@ func TestIndex_GetStats(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpBasicIndex(tt.args.UID) + setUpBasicIndex(sv, tt.args.UID) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -144,36 +154,35 @@ func TestIndex_GetStats(t *testing.T) { } func Test_newIndex(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { - client *Client + client ServiceManager uid string } tests := []struct { name string args args - want *Index + want IndexManager }{ { name: "TestBasicNewIndex", args: args{ - client: defaultClient, + client: sv, uid: "TestBasicNewIndex", }, - want: &Index{ - UID: "TestBasicNewIndex", - client: defaultClient, - }, + want: sv.Index("TestBasicNewIndex"), }, { name: "TestNewIndexCustomClient", args: args{ - client: customClient, + client: sv, uid: "TestNewIndexCustomClient", }, - want: &Index{ - UID: "TestNewIndexCustomClient", - client: customClient, - }, + want: customSv.Index("TestNewIndexCustomClient"), }, } for _, tt := range tests { @@ -181,20 +190,36 @@ func Test_newIndex(t *testing.T) { c := tt.args.client t.Cleanup(cleanup(c)) - got := newIndex(c, tt.args.uid) - require.Equal(t, tt.want.UID, got.UID) - require.Equal(t, tt.want.client, got.client) + gotIdx := c.Index(tt.args.uid) + + task, err := c.CreateIndex(&IndexConfig{Uid: tt.args.uid}) + require.NoError(t, err) + + testWaitForTask(t, gotIdx, task) + + gotIdxResult, err := gotIdx.FetchInfo() + require.NoError(t, err) + + wantIdxResult, err := tt.want.FetchInfo() + require.NoError(t, err) + + require.Equal(t, gotIdxResult.UID, wantIdxResult.UID) // Timestamps should be empty unless fetched - require.Zero(t, got.CreatedAt) - require.Zero(t, got.UpdatedAt) + require.NotZero(t, gotIdxResult.CreatedAt) + require.NotZero(t, gotIdxResult.UpdatedAt) }) } } func TestIndex_GetTask(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager taskUID int64 document []docTest } @@ -206,7 +231,7 @@ func TestIndex_GetTask(t *testing.T) { name: "TestIndexBasicGetTask", args: args{ UID: "TestIndexBasicGetTask", - client: defaultClient, + client: sv, taskUID: 0, document: []docTest{ {ID: "123", Name: "Pride and Prejudice"}, @@ -217,7 +242,7 @@ func TestIndex_GetTask(t *testing.T) { name: "TestIndexGetTaskWithCustomClient", args: args{ UID: "TestIndexGetTaskWithCustomClient", - client: customClient, + client: customSv, taskUID: 0, document: []docTest{ {ID: "123", Name: "Pride and Prejudice"}, @@ -228,7 +253,7 @@ func TestIndex_GetTask(t *testing.T) { name: "TestIndexGetTask", args: args{ UID: "TestIndexGetTask", - client: defaultClient, + client: sv, taskUID: 0, document: []docTest{ {ID: "456", Name: "Le Petit Prince"}, @@ -238,7 +263,7 @@ func TestIndex_GetTask(t *testing.T) { }, } - t.Cleanup(cleanup(defaultClient)) + t.Cleanup(cleanup(sv, customSv)) for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -249,7 +274,7 @@ func TestIndex_GetTask(t *testing.T) { task, err := i.AddDocuments(tt.args.document) require.NoError(t, err) - _, err = c.WaitForTask(task.TaskUID) + _, err = c.WaitForTask(task.TaskUID, 0) require.NoError(t, err) gotResp, err := i.GetTask(task.TaskUID) @@ -268,9 +293,14 @@ func TestIndex_GetTask(t *testing.T) { } func TestIndex_GetTasks(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager document []docTest query *TasksQuery } @@ -282,7 +312,7 @@ func TestIndex_GetTasks(t *testing.T) { name: "TestIndexBasicGetTasks", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, document: []docTest{ {ID: "123", Name: "Pride and Prejudice"}, }, @@ -292,7 +322,7 @@ func TestIndex_GetTasks(t *testing.T) { name: "TestIndexGetTasksWithCustomClient", args: args{ UID: "indexUID", - client: customClient, + client: customSv, document: []docTest{ {ID: "123", Name: "Pride and Prejudice"}, }, @@ -302,7 +332,7 @@ func TestIndex_GetTasks(t *testing.T) { name: "TestIndexBasicGetTasksWithFilters", args: args{ UID: "indexUID", - client: defaultClient, + client: sv, document: []docTest{ {ID: "123", Name: "Pride and Prejudice"}, }, @@ -322,7 +352,7 @@ func TestIndex_GetTasks(t *testing.T) { task, err := i.AddDocuments(tt.args.document) require.NoError(t, err) - _, err = c.WaitForTask(task.TaskUID) + _, err = c.WaitForTask(task.TaskUID, 0) require.NoError(t, err) gotResp, err := i.GetTasks(nil) @@ -335,9 +365,14 @@ func TestIndex_GetTasks(t *testing.T) { } func TestIndex_WaitForTask(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager interval time.Duration timeout time.Duration document []docTest @@ -351,7 +386,7 @@ func TestIndex_WaitForTask(t *testing.T) { name: "TestWaitForTask50", args: args{ UID: "TestWaitForTask50", - client: defaultClient, + client: sv, interval: time.Millisecond * 50, timeout: time.Second * 5, document: []docTest{ @@ -366,7 +401,7 @@ func TestIndex_WaitForTask(t *testing.T) { name: "TestWaitForTask50WithCustomClient", args: args{ UID: "TestWaitForTask50WithCustomClient", - client: customClient, + client: customSv, interval: time.Millisecond * 50, timeout: time.Second * 5, document: []docTest{ @@ -381,7 +416,7 @@ func TestIndex_WaitForTask(t *testing.T) { name: "TestWaitForTask10", args: args{ UID: "TestWaitForTask10", - client: defaultClient, + client: sv, interval: time.Millisecond * 10, timeout: time.Second * 5, document: []docTest{ @@ -396,7 +431,7 @@ func TestIndex_WaitForTask(t *testing.T) { name: "TestWaitForTaskWithTimeout", args: args{ UID: "TestWaitForTaskWithTimeout", - client: defaultClient, + client: sv, interval: time.Millisecond * 50, timeout: time.Millisecond * 10, document: []docTest{ @@ -420,7 +455,7 @@ func TestIndex_WaitForTask(t *testing.T) { ctx, cancelFunc := context.WithTimeout(context.Background(), tt.args.timeout) defer cancelFunc() - gotTask, err := i.WaitForTask(task.TaskUID, WaitParams{Context: ctx, Interval: tt.args.interval}) + gotTask, err := i.WaitForTaskWithContext(ctx, task.TaskUID, 0) if tt.args.timeout < tt.args.interval { require.Error(t, err) } else { @@ -432,22 +467,27 @@ func TestIndex_WaitForTask(t *testing.T) { } func TestIndex_FetchInfo(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string args args - wantResp *Index + wantResp *IndexResult }{ { name: "TestIndexBasicFetchInfo", args: args{ UID: "TestIndexBasicFetchInfo", - client: defaultClient, + client: sv, }, - wantResp: &Index{ + wantResp: &IndexResult{ UID: "TestIndexBasicFetchInfo", PrimaryKey: "book_id", }, @@ -456,9 +496,9 @@ func TestIndex_FetchInfo(t *testing.T) { name: "TestIndexFetchInfoWithCustomClient", args: args{ UID: "TestIndexFetchInfoWithCustomClient", - client: customClient, + client: customSv, }, - wantResp: &Index{ + wantResp: &IndexResult{ UID: "TestIndexFetchInfoWithCustomClient", PrimaryKey: "book_id", }, @@ -466,11 +506,13 @@ func TestIndex_FetchInfo(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpBasicIndex(tt.args.UID) + setUpBasicIndex(tt.args.client, tt.args.UID) c := tt.args.client - i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) + i, err := c.GetIndex(tt.args.UID) + require.NoError(t, err) + gotResp, err := i.FetchInfo() require.NoError(t, err) require.Equal(t, tt.wantResp.UID, gotResp.UID) @@ -485,9 +527,14 @@ func TestIndex_FetchInfo(t *testing.T) { } func TestIndex_FetchPrimaryKey(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { UID string - client *Client + client ServiceManager } tests := []struct { name string @@ -498,7 +545,7 @@ func TestIndex_FetchPrimaryKey(t *testing.T) { name: "TestIndexBasicFetchPrimaryKey", args: args{ UID: "TestIndexBasicFetchPrimaryKey", - client: defaultClient, + client: sv, }, wantPrimaryKey: "book_id", }, @@ -506,14 +553,14 @@ func TestIndex_FetchPrimaryKey(t *testing.T) { name: "TestIndexFetchPrimaryKeyWithCustomClient", args: args{ UID: "TestIndexFetchPrimaryKeyWithCustomClient", - client: customClient, + client: customSv, }, wantPrimaryKey: "book_id", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SetUpBasicIndex(tt.args.UID) + setUpBasicIndex(tt.args.client, tt.args.UID) c := tt.args.client i := c.Index(tt.args.UID) t.Cleanup(cleanup(c)) @@ -526,26 +573,31 @@ func TestIndex_FetchPrimaryKey(t *testing.T) { } func TestIndex_UpdateIndex(t *testing.T) { + sv := setup(t, "") + customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ + InsecureSkipVerify: true, + })) + type args struct { primaryKey string config IndexConfig - client *Client + client ServiceManager } tests := []struct { name string args args - wantResp *Index + wantResp *IndexResult }{ { name: "TestIndexBasicUpdateIndex", args: args{ - client: defaultClient, + client: sv, config: IndexConfig{ Uid: "indexUID", }, primaryKey: "book_id", }, - wantResp: &Index{ + wantResp: &IndexResult{ UID: "indexUID", PrimaryKey: "book_id", }, @@ -553,13 +605,13 @@ func TestIndex_UpdateIndex(t *testing.T) { { name: "TestIndexUpdateIndexWithCustomClient", args: args{ - client: customClient, + client: customSv, config: IndexConfig{ Uid: "indexUID", }, primaryKey: "book_id", }, - wantResp: &Index{ + wantResp: &IndexResult{ UID: "indexUID", PrimaryKey: "book_id", }, @@ -570,7 +622,7 @@ func TestIndex_UpdateIndex(t *testing.T) { c := tt.args.client t.Cleanup(cleanup(c)) - i, err := SetUpEmptyIndex(&tt.args.config) + i, err := setUpEmptyIndex(tt.args.client, &tt.args.config) require.NoError(t, err) require.Equal(t, tt.args.config.Uid, i.UID) // Store original timestamps @@ -580,7 +632,7 @@ func TestIndex_UpdateIndex(t *testing.T) { gotResp, err := i.UpdateIndex(tt.args.primaryKey) require.NoError(t, err) - _, err = c.WaitForTask(gotResp.TaskUID) + _, err = c.WaitForTask(gotResp.TaskUID, 0) require.NoError(t, err) require.NoError(t, err) diff --git a/main_test.go b/main_test.go index cf9426cb..afedf776 100644 --- a/main_test.go +++ b/main_test.go @@ -1,17 +1,56 @@ package meilisearch import ( - "crypto/tls" + "bufio" + "context" + "encoding/csv" + "encoding/json" "fmt" + "github.com/stretchr/testify/require" + "io" "net/http" "os" "strings" "testing" +) - "github.com/stretchr/testify/require" - "github.com/valyala/fasthttp" +var ( + masterKey = "masterKey" + defaultRankingRules = []string{ + "words", "typo", "proximity", "attribute", "sort", "exactness", + } + defaultTypoTolerance = TypoTolerance{ + Enabled: true, + MinWordSizeForTypos: MinWordSizeForTypos{ + OneTypo: 5, + TwoTypos: 9, + }, + DisableOnWords: []string{}, + DisableOnAttributes: []string{}, + } + defaultPagination = Pagination{ + MaxTotalHits: 1000, + } + defaultFaceting = Faceting{ + MaxValuesPerFacet: 100, + } ) +var testNdjsonDocuments = []byte(`{"id": 1, "name": "Alice In Wonderland"} +{"id": 2, "name": "Pride and Prejudice"} +{"id": 3, "name": "Le Petit Prince"} +{"id": 4, "name": "The Great Gatsby"} +{"id": 5, "name": "Don Quixote"} +`) + +var testCsvDocuments = []byte(`id,name +1,Alice In Wonderland +2,Pride and Prejudice +3,Le Petit Prince +4,The Great Gatsby +5,Don Quixote +`) + type docTest struct { ID string `json:"id"` Name string `json:"name"` @@ -24,23 +63,65 @@ type docTestBooks struct { Year int `json:"year"` } -func getenv(key, fallback string) string { - value := os.Getenv(key) - if len(value) == 0 { - return fallback +func setup(t *testing.T, host string, options ...Option) ServiceManager { + t.Helper() + + opts := make([]Option, 0) + opts = append(opts, WithAPIKey(masterKey)) + opts = append(opts, options...) + + if host == "" { + host = getenv("MEILISEARCH_URL", "http://localhost:7700") } - return value + + sv := New(host, opts...) + return sv } -func deleteAllIndexes(client ClientInterface) (ok bool, err error) { - list, err := client.GetIndexes(nil) +func cleanup(services ...ServiceManager) func() { + return func() { + for _, s := range services { + _, _ = deleteAllIndexes(s) + _, _ = deleteAllKeys(s) + } + } +} + +func getPrivateKey(sv ServiceManager) (key string) { + list, err := sv.GetKeys(nil) + if err != nil { + return "" + } + for _, key := range list.Results { + if strings.Contains(key.Name, "Default Admin API Key") || (key.Description == "") { + return key.Key + } + } + return "" +} + +func getPrivateUIDKey(sv ServiceManager) (key string) { + list, err := sv.GetKeys(nil) + if err != nil { + return "" + } + for _, key := range list.Results { + if strings.Contains(key.Name, "Default Admin API Key") || (key.Description == "") { + return key.UID + } + } + return "" +} + +func deleteAllIndexes(sv ServiceManager) (ok bool, err error) { + list, err := sv.ListIndexes(nil) if err != nil { return false, err } for _, index := range list.Results { - task, _ := client.DeleteIndex(index.UID) - _, err := client.WaitForTask(task.TaskUID) + task, _ := sv.DeleteIndex(index.UID) + _, err := sv.WaitForTask(task.TaskUID, 0) if err != nil { return false, err } @@ -49,15 +130,15 @@ func deleteAllIndexes(client ClientInterface) (ok bool, err error) { return true, nil } -func deleteAllKeys(client ClientInterface) (ok bool, err error) { - list, err := client.GetKeys(nil) +func deleteAllKeys(sv ServiceManager) (ok bool, err error) { + list, err := sv.GetKeys(nil) if err != nil { return false, err } for _, key := range list.Results { if strings.Contains(key.Description, "Test") || (key.Description == "") { - _, err = client.DeleteKey(key.Key) + _, err = sv.DeleteKey(key.Key) if err != nil { return false, err } @@ -67,77 +148,126 @@ func deleteAllKeys(client ClientInterface) (ok bool, err error) { return true, nil } -func cleanup(c ClientInterface) func() { - return func() { - _, _ = deleteAllIndexes(c) - _, _ = deleteAllKeys(c) +func getenv(key, fallback string) string { + value := os.Getenv(key) + if len(value) == 0 { + return fallback } + return value } -func testWaitForTask(t *testing.T, i *Index, u *TaskInfo) { +func testWaitForTask(t *testing.T, i IndexManager, u *TaskInfo) { t.Helper() - r, err := i.WaitForTask(u.TaskUID) + r, err := i.WaitForTask(u.TaskUID, 0) require.NoError(t, err) require.Equal(t, TaskStatusSucceeded, r.Status, fmt.Sprintf("Task failed: %#+v", r)) } -func testWaitForBatchTask(t *testing.T, i *Index, u []TaskInfo) { +func testWaitForBatchTask(t *testing.T, i IndexManager, u []TaskInfo) { for _, id := range u { - _, err := i.WaitForTask(id.TaskUID) + _, err := i.WaitForTask(id.TaskUID, 0) require.NoError(t, err) } } -func GetPrivateKey() (key string) { - list, err := defaultClient.GetKeys(nil) +func setUpEmptyIndex(sv ServiceManager, index *IndexConfig) (resp *IndexResult, err error) { + task, err := sv.CreateIndex(index) if err != nil { - return "" + fmt.Println(err) + return nil, err } - for _, key := range list.Results { - if strings.Contains(key.Name, "Default Admin API Key") || (key.Description == "") { - return key.Key - } + finalTask, _ := sv.WaitForTask(task.TaskUID, 0) + if finalTask.Status != "succeeded" { + cleanup(sv) + return setUpEmptyIndex(sv, index) } - return "" + return sv.GetIndex(index.Uid) } -func GetPrivateUIDKey() (key string) { - list, err := defaultClient.GetKeys(nil) +func setUpBasicIndex(sv ServiceManager, indexUID string) { + index := sv.Index(indexUID) + + documents := []map[string]interface{}{ + {"book_id": 123, "title": "Pride and Prejudice"}, + {"book_id": 456, "title": "Le Petit Prince"}, + {"book_id": 1, "title": "Alice In Wonderland"}, + {"book_id": 1344, "title": "The Hobbit"}, + {"book_id": 4, "title": "Harry Potter and the Half-Blood Prince"}, + {"book_id": 42, "title": "The Hitchhiker's Guide to the Galaxy"}, + } + + task, err := index.AddDocuments(documents) if err != nil { - return "" + fmt.Println(err) + os.Exit(1) } - for _, key := range list.Results { - if strings.Contains(key.Name, "Default Admin API Key") || (key.Description == "") { - return key.UID - } + finalTask, _ := index.WaitForTask(task.TaskUID, 0) + if finalTask.Status != "succeeded" { + os.Exit(1) } - return "" } -func SetUpEmptyIndex(index *IndexConfig) (resp *Index, err error) { - client := NewClient(ClientConfig{ - Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), - APIKey: masterKey, - }) - task, err := client.CreateIndex(index) +func setUpIndexForFaceting(client ServiceManager) { + idx := client.Index("indexUID") + + booksTest := []docTestBooks{ + {BookID: 123, Title: "Pride and Prejudice", Tag: "Romance", Year: 1813}, + {BookID: 456, Title: "Le Petit Prince", Tag: "Tale", Year: 1943}, + {BookID: 1, Title: "Alice In Wonderland", Tag: "Tale", Year: 1865}, + {BookID: 1344, Title: "The Hobbit", Tag: "Epic fantasy", Year: 1937}, + {BookID: 4, Title: "Harry Potter and the Half-Blood Prince", Tag: "Epic fantasy", Year: 2005}, + {BookID: 42, Title: "The Hitchhiker's Guide to the Galaxy", Tag: "Epic fantasy", Year: 1978}, + {BookID: 742, Title: "The Great Gatsby", Tag: "Tragedy", Year: 1925}, + {BookID: 834, Title: "One Hundred Years of Solitude", Tag: "Tragedy", Year: 1967}, + {BookID: 17, Title: "In Search of Lost Time", Tag: "Modernist literature", Year: 1913}, + {BookID: 204, Title: "Ulysses", Tag: "Novel", Year: 1922}, + {BookID: 7, Title: "Don Quixote", Tag: "Satiric", Year: 1605}, + {BookID: 10, Title: "Moby Dick", Tag: "Novel", Year: 1851}, + {BookID: 730, Title: "War and Peace", Tag: "Historical fiction", Year: 1865}, + {BookID: 69, Title: "Hamlet", Tag: "Tragedy", Year: 1598}, + {BookID: 32, Title: "The Odyssey", Tag: "Epic", Year: 1571}, + {BookID: 71, Title: "Madame Bovary", Tag: "Novel", Year: 1857}, + {BookID: 56, Title: "The Divine Comedy", Tag: "Epic", Year: 1303}, + {BookID: 254, Title: "Lolita", Tag: "Novel", Year: 1955}, + {BookID: 921, Title: "The Brothers Karamazov", Tag: "Novel", Year: 1879}, + {BookID: 1032, Title: "Crime and Punishment", Tag: "Crime fiction", Year: 1866}, + } + task, err := idx.AddDocuments(booksTest) if err != nil { fmt.Println(err) - return nil, err + os.Exit(1) } - finalTask, _ := client.WaitForTask(task.TaskUID) + finalTask, _ := idx.WaitForTask(task.TaskUID, 0) if finalTask.Status != "succeeded" { os.Exit(1) } - return client.GetIndex(index.Uid) } -func SetUpIndexWithVector(indexUID string) (resp *Index, err error) { - client := NewClient(ClientConfig{ - Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), - APIKey: masterKey, - }) +func setUpIndexWithNestedFields(client ServiceManager, indexUID string) { + index := client.Index(indexUID) - req := internalRequest{ + documents := []map[string]interface{}{ + {"id": 1, "title": "Pride and Prejudice", "info": map[string]interface{}{"comment": "A great book", "reviewNb": 50}}, + {"id": 2, "title": "Le Petit Prince", "info": map[string]interface{}{"comment": "A french book", "reviewNb": 600}}, + {"id": 3, "title": "Le Rouge et le Noir", "info": map[string]interface{}{"comment": "Another french book", "reviewNb": 700}}, + {"id": 4, "title": "Alice In Wonderland", "comment": "A weird book", "info": map[string]interface{}{"comment": "A weird book", "reviewNb": 800}}, + {"id": 5, "title": "The Hobbit", "info": map[string]interface{}{"comment": "An awesome book", "reviewNb": 900}}, + {"id": 6, "title": "Harry Potter and the Half-Blood Prince", "info": map[string]interface{}{"comment": "The best book", "reviewNb": 1000}}, + {"id": 7, "title": "The Hitchhiker's Guide to the Galaxy"}, + } + task, err := index.AddDocuments(documents) + if err != nil { + fmt.Println(err) + os.Exit(1) + } + finalTask, _ := index.WaitForTask(task.TaskUID, 0) + if finalTask.Status != "succeeded" { + os.Exit(1) + } +} + +func setUpIndexWithVector(client *meilisearch, indexUID string) (resp *IndexResult, err error) { + req := &internalRequest{ endpoint: "/experimental-features", method: http.MethodPatch, contentType: "application/json", @@ -146,12 +276,12 @@ func SetUpIndexWithVector(indexUID string) (resp *Index, err error) { }, } - if err := client.executeRequest(req); err != nil { + if err := client.client.executeRequest(context.Background(), req); err != nil { return nil, err } - index := client.Index(indexUID) - taskInfo, err := index.UpdateSettings(&Settings{ + idx := client.Index(indexUID) + taskInfo, err := idx.UpdateSettings(&Settings{ Embedders: map[string]Embedder{ "default": { Source: "userProvided", @@ -162,7 +292,7 @@ func SetUpIndexWithVector(indexUID string) (resp *Index, err error) { if err != nil { return nil, err } - settingsTask, err := index.WaitForTask(taskInfo.TaskUID) + settingsTask, err := idx.WaitForTask(taskInfo.TaskUID, 0) if err != nil { return nil, err } @@ -175,12 +305,12 @@ func SetUpIndexWithVector(indexUID string) (resp *Index, err error) { {"book_id": 456, "title": "Le Petit Prince", "_vectors": map[string]interface{}{"default": []float64{2.4, 8.5, 1.6}}}, } - taskInfo, err = index.AddDocuments(documents) + taskInfo, err = idx.AddDocuments(documents) if err != nil { return nil, err } - finalTask, _ := index.WaitForTask(taskInfo.TaskUID) + finalTask, _ := idx.WaitForTask(taskInfo.TaskUID, 0) if finalTask.Status != TaskStatusSucceeded { return nil, fmt.Errorf("Add documents task failed: %#+v", finalTask) } @@ -188,47 +318,17 @@ func SetUpIndexWithVector(indexUID string) (resp *Index, err error) { return client.GetIndex(indexUID) } -func SetUpBasicIndex(indexUID string) { - client := NewClient(ClientConfig{ - Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), - APIKey: masterKey, - }) - index := client.Index(indexUID) - - documents := []map[string]interface{}{ - {"book_id": 123, "title": "Pride and Prejudice"}, - {"book_id": 456, "title": "Le Petit Prince"}, - {"book_id": 1, "title": "Alice In Wonderland"}, - {"book_id": 1344, "title": "The Hobbit"}, - {"book_id": 4, "title": "Harry Potter and the Half-Blood Prince"}, - {"book_id": 42, "title": "The Hitchhiker's Guide to the Galaxy"}, - } - task, err := index.AddDocuments(documents) - if err != nil { - fmt.Println(err) - os.Exit(1) - } - finalTask, _ := index.WaitForTask(task.TaskUID) - if finalTask.Status != "succeeded" { - os.Exit(1) - } -} - -func SetUpDistinctIndex(indexUID string) { - client := NewClient(ClientConfig{ - Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), - APIKey: masterKey, - }) - index := client.Index(indexUID) +func setUpDistinctIndex(client ServiceManager, indexUID string) { + idx := client.Index(indexUID) atters := []string{"product_id", "title", "sku", "url"} - task, err := index.UpdateFilterableAttributes(&atters) + task, err := idx.UpdateFilterableAttributes(&atters) if err != nil { fmt.Println(err) os.Exit(1) } - finalTask, _ := index.WaitForTask(task.TaskUID) + finalTask, _ := idx.WaitForTask(task.TaskUID, 0) if finalTask.Status != "succeeded" { os.Exit(1) } @@ -241,176 +341,55 @@ func SetUpDistinctIndex(indexUID string) { {"product_id": 4, "title": "yellow shirt", "sku": "sku9064", "url": "https://example.com/products/p4"}, {"product_id": 42, "title": "gray shirt", "sku": "sku964", "url": "https://example.com/products/p42"}, } - task, err = index.AddDocuments(documents) - if err != nil { - fmt.Println(err) - os.Exit(1) - } - finalTask, _ = index.WaitForTask(task.TaskUID) - if finalTask.Status != "succeeded" { - os.Exit(1) - } -} - -func SetUpIndexWithNestedFields(indexUID string) { - client := NewClient(ClientConfig{ - Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), - APIKey: masterKey, - }) - index := client.Index(indexUID) - - documents := []map[string]interface{}{ - {"id": 1, "title": "Pride and Prejudice", "info": map[string]interface{}{"comment": "A great book", "reviewNb": 50}}, - {"id": 2, "title": "Le Petit Prince", "info": map[string]interface{}{"comment": "A french book", "reviewNb": 600}}, - {"id": 3, "title": "Le Rouge et le Noir", "info": map[string]interface{}{"comment": "Another french book", "reviewNb": 700}}, - {"id": 4, "title": "Alice In Wonderland", "comment": "A weird book", "info": map[string]interface{}{"comment": "A weird book", "reviewNb": 800}}, - {"id": 5, "title": "The Hobbit", "info": map[string]interface{}{"comment": "An awesome book", "reviewNb": 900}}, - {"id": 6, "title": "Harry Potter and the Half-Blood Prince", "info": map[string]interface{}{"comment": "The best book", "reviewNb": 1000}}, - {"id": 7, "title": "The Hitchhiker's Guide to the Galaxy"}, - } - task, err := index.AddDocuments(documents) + task, err = idx.AddDocuments(documents) if err != nil { fmt.Println(err) os.Exit(1) } - finalTask, _ := index.WaitForTask(task.TaskUID) + finalTask, _ = idx.WaitForTask(task.TaskUID, 0) if finalTask.Status != "succeeded" { os.Exit(1) } } -func SetUpIndexForFaceting() { - client := NewClient(ClientConfig{ - Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), - APIKey: masterKey, - }) - index := client.Index("indexUID") - - booksTest := []docTestBooks{ - {BookID: 123, Title: "Pride and Prejudice", Tag: "Romance", Year: 1813}, - {BookID: 456, Title: "Le Petit Prince", Tag: "Tale", Year: 1943}, - {BookID: 1, Title: "Alice In Wonderland", Tag: "Tale", Year: 1865}, - {BookID: 1344, Title: "The Hobbit", Tag: "Epic fantasy", Year: 1937}, - {BookID: 4, Title: "Harry Potter and the Half-Blood Prince", Tag: "Epic fantasy", Year: 2005}, - {BookID: 42, Title: "The Hitchhiker's Guide to the Galaxy", Tag: "Epic fantasy", Year: 1978}, - {BookID: 742, Title: "The Great Gatsby", Tag: "Tragedy", Year: 1925}, - {BookID: 834, Title: "One Hundred Years of Solitude", Tag: "Tragedy", Year: 1967}, - {BookID: 17, Title: "In Search of Lost Time", Tag: "Modernist literature", Year: 1913}, - {BookID: 204, Title: "Ulysses", Tag: "Novel", Year: 1922}, - {BookID: 7, Title: "Don Quixote", Tag: "Satiric", Year: 1605}, - {BookID: 10, Title: "Moby Dick", Tag: "Novel", Year: 1851}, - {BookID: 730, Title: "War and Peace", Tag: "Historical fiction", Year: 1865}, - {BookID: 69, Title: "Hamlet", Tag: "Tragedy", Year: 1598}, - {BookID: 32, Title: "The Odyssey", Tag: "Epic", Year: 1571}, - {BookID: 71, Title: "Madame Bovary", Tag: "Novel", Year: 1857}, - {BookID: 56, Title: "The Divine Comedy", Tag: "Epic", Year: 1303}, - {BookID: 254, Title: "Lolita", Tag: "Novel", Year: 1955}, - {BookID: 921, Title: "The Brothers Karamazov", Tag: "Novel", Year: 1879}, - {BookID: 1032, Title: "Crime and Punishment", Tag: "Crime fiction", Year: 1866}, - } - task, err := index.AddDocuments(booksTest) - if err != nil { - fmt.Println(err) - os.Exit(1) - } - finalTask, _ := index.WaitForTask(task.TaskUID) - if finalTask.Status != "succeeded" { - os.Exit(1) - } -} - -var ( - masterKey = "masterKey" - defaultClient = NewClient(ClientConfig{ - Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), - APIKey: masterKey, - }) - defaultRankingRules = []string{ - "words", "typo", "proximity", "attribute", "sort", "exactness", - } - defaultTypoTolerance = TypoTolerance{ - Enabled: true, - MinWordSizeForTypos: MinWordSizeForTypos{ - OneTypo: 5, - TwoTypos: 9, - }, - DisableOnWords: []string{}, - DisableOnAttributes: []string{}, - } - defaultPagination = Pagination{ - MaxTotalHits: 1000, - } - defaultFaceting = Faceting{ - MaxValuesPerFacet: 100, - } -) - -var customClient = NewFastHTTPCustomClient(ClientConfig{ - Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), - APIKey: masterKey, -}, - &fasthttp.Client{ - TLSConfig: &tls.Config{InsecureSkipVerify: true}, - Name: "custom-client", - }) - -var brokenClient = NewFastHTTPCustomClient(ClientConfig{ - Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), - APIKey: "WRONG", -}, - &fasthttp.Client{ - TLSConfig: &tls.Config{InsecureSkipVerify: true}, - Name: "broken-client", - }) - -var timeoutClient = NewClient(ClientConfig{ - Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), - APIKey: masterKey, - Timeout: 1, -}) - -var privateClient = NewClient(ClientConfig{ - Host: getenv("MEILISEARCH_URL", "http://localhost:7700"), - APIKey: GetPrivateKey(), -}) - -func TestMain(m *testing.M) { - _, _ = deleteAllIndexes(defaultClient) - code := m.Run() - _, _ = deleteAllIndexes(defaultClient) - os.Exit(code) -} - -func Test_deleteAllIndexes(t *testing.T) { - indexUIDS := []string{ - "Test_deleteAllIndexes", - "Test_deleteAllIndexes2", - "Test_deleteAllIndexes3", - } - _, _ = deleteAllIndexes(defaultClient) - - for _, uid := range indexUIDS { - task, err := defaultClient.CreateIndex(&IndexConfig{ - Uid: uid, - }) - if err != nil { - t.Fatal(err) +func testParseCsvDocuments(t *testing.T, documents io.Reader) []map[string]interface{} { + var ( + docs []map[string]interface{} + header []string + ) + r := csv.NewReader(documents) + for { + record, err := r.Read() + if err == io.EOF { + break } - _, err = defaultClient.WaitForTask(task.TaskUID) - if err != nil { - t.Fatal(err) + require.NoError(t, err) + if header == nil { + header = record + continue + } + doc := make(map[string]interface{}) + for i, key := range header { + doc[key] = record[i] } + docs = append(docs, doc) } + return docs +} - _, _ = deleteAllIndexes(defaultClient) - - for _, uid := range indexUIDS { - resp, err := defaultClient.GetIndex(uid) - if resp != nil { - t.Fatal(resp) - } - if err == nil { - t.Fatal("deleteAllIndexes: One or more indexes were not deleted") +func testParseNdjsonDocuments(t *testing.T, documents io.Reader) []map[string]interface{} { + var docs []map[string]interface{} + scanner := bufio.NewScanner(documents) + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + if line == "" { + continue } + doc := make(map[string]interface{}) + err := json.Unmarshal([]byte(line), &doc) + require.NoError(t, err) + docs = append(docs, doc) } + require.NoError(t, scanner.Err()) + return docs } diff --git a/options.go b/options.go index c42e31b8..25f502f8 100644 --- a/options.go +++ b/options.go @@ -16,9 +16,8 @@ var ( ) type meiliOpt struct { - client *http.Client - timeout time.Duration - apiKey string + client *http.Client + apiKey string } type Option func(*meiliOpt) diff --git a/types_test.go b/types_test.go deleted file mode 100644 index 2ae781e2..00000000 --- a/types_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package meilisearch - -import ( - "encoding/json" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "testing" -) - -func TestTypes_UnmarshalJSON(t *testing.T) { - var raw RawType - data := []byte(`"some data"`) - - err := json.Unmarshal(data, &raw) - require.NoError(t, err) - - expected := RawType(data) - require.Equal(t, expected, raw) -} - -func TestTypes_MarshalJSON(t *testing.T) { - raw := RawType(`"some data"`) - - data, err := json.Marshal(raw) - require.NoError(t, err) - - expected := []byte(`"some data"`) - require.Equal(t, data, expected) -} - -func TestTypes_ValidateSearchRequest(t *testing.T) { - req := &SearchRequest{ - Limit: 0, - Hybrid: &SearchRequestHybrid{ - Embedder: "", - }, - } - - req.validate() - - assert.Equal(t, req.Limit, DefaultLimit) - assert.Equal(t, req.Hybrid.Embedder, "default") -} From f526c9a05fba7c9a97a41d7e4f78a7e4fd0daf9e Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 11:11:24 +0330 Subject: [PATCH 34/43] fix: update module version to 1.17 for fix issue testify --- go.mod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 048c77cb..934148a0 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/meilisearch/meilisearch-go -go 1.16 +go 1.17 require ( github.com/golang-jwt/jwt/v4 v4.5.0 From 67c39c8eda285a2d9c2010c90ec05e7b91ffb33d Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 11:14:19 +0330 Subject: [PATCH 35/43] fix: update go module --- go.mod | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/go.mod b/go.mod index 934148a0..58f90f46 100644 --- a/go.mod +++ b/go.mod @@ -7,3 +7,10 @@ require ( github.com/mailru/easyjson v0.7.7 github.com/stretchr/testify v1.9.0 ) + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) From 3daaca60453795c591f60a2ea18656dda751aee2 Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 11:16:37 +0330 Subject: [PATCH 36/43] fix: change testify version to stable and support go 1.16 --- go.mod | 11 ++--------- go.sum | 6 ++---- 2 files changed, 4 insertions(+), 13 deletions(-) diff --git a/go.mod b/go.mod index 58f90f46..4dec4131 100644 --- a/go.mod +++ b/go.mod @@ -1,16 +1,9 @@ module github.com/meilisearch/meilisearch-go -go 1.17 +go 1.16 require ( github.com/golang-jwt/jwt/v4 v4.5.0 github.com/mailru/easyjson v0.7.7 - github.com/stretchr/testify v1.9.0 -) - -require ( - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/josharian/intern v1.0.0 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect + github.com/stretchr/testify v1.8.2 ) diff --git a/go.sum b/go.sum index a5f193f9..21a37433 100644 --- a/go.sum +++ b/go.sum @@ -12,12 +12,10 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= -github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= -github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= From 621feaf7cf9c1745f4bdd2c07a70b8fc9384c234 Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 11:20:19 +0330 Subject: [PATCH 37/43] docs: update example documentation --- doc.go | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/doc.go b/doc.go index b3f90523..73d2db58 100644 --- a/doc.go +++ b/doc.go @@ -8,11 +8,23 @@ // // Example: // -// sv, err := New("http://localhost:7700", WithAPIKey("foobar")) +// meili := New("http://localhost:7700", WithAPIKey("foobar")) +// +// idx := meili.Index("movies") +// +// documents := []map[string]interface{}{ +// {"id": 1, "title": "Carol", "genres": []string{"Romance", "Drama"}}, +// {"id": 2, "title": "Wonder Woman", "genres": []string{"Action", "Adventure"}}, +// {"id": 3, "title": "Life of Pi", "genres": []string{"Adventure", "Drama"}}, +// {"id": 4, "title": "Mad Max: Fury Road", "genres": []string{"Adventure", "Science Fiction"}}, +// {"id": 5, "title": "Moana", "genres": []string{"Fantasy", "Action"}}, +// {"id": 6, "title": "Philadelphia", "genres": []string{"Drama"}}, +// } +// task, err := idx.AddDocuments(documents) // if err != nil { // fmt.Println(err) -// return +// os.Exit(1) // } // -// fmt.Println(sv.IsHealthy(context.Background())) +// fmt.Println(task.TaskUID) package meilisearch From 2afede6b480c555806acabc4ebd06b20ea5ffbe2 Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 11:41:33 +0330 Subject: [PATCH 38/43] chore: update many for coverage --- index_search_test.go | 31 ++++++++++++++++++++++++++++++- index_test.go | 35 ++++++++++++++++++++++++----------- options_test.go | 14 ++++++++++++++ 3 files changed, 68 insertions(+), 12 deletions(-) create mode 100644 options_test.go diff --git a/index_search_test.go b/index_search_test.go index 6fcf7c67..b81ea071 100644 --- a/index_search_test.go +++ b/index_search_test.go @@ -553,6 +553,21 @@ func TestIndex_Search(t *testing.T) { }, wantErr: false, }, + { + name: "TestIndexSearchWithInvalidIndex", + args: args{ + UID: "invalidIndex", + client: sv, + query: "pri", + request: &SearchRequest{ + Limit: 10, + AttributesToRetrieve: []string{"book_id", "title"}, + RankingScoreThreshold: 0.2, + }, + }, + want: nil, + wantErr: true, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -1818,6 +1833,7 @@ func TestIndex_SearchSimilarDocuments(t *testing.T) { client ServiceManager request *SimilarDocumentQuery resp *SimilarDocumentResult + wantErr bool }{ { UID: "indexUID", @@ -1825,7 +1841,15 @@ func TestIndex_SearchSimilarDocuments(t *testing.T) { request: &SimilarDocumentQuery{ Id: "123", }, - resp: new(SimilarDocumentResult), + resp: new(SimilarDocumentResult), + wantErr: false, + }, + { + UID: "indexUID", + client: sv, + request: &SimilarDocumentQuery{}, + resp: new(SimilarDocumentResult), + wantErr: true, }, } @@ -1837,6 +1861,11 @@ func TestIndex_SearchSimilarDocuments(t *testing.T) { t.Cleanup(cleanup(c)) err = i.SearchSimilarDocuments(tt.request, tt.resp) + if tt.wantErr { + require.Error(t, err) + return + } + require.NoError(t, err) require.NotNil(t, tt.resp) }) diff --git a/index_test.go b/index_test.go index 175d3fd2..bdf6e750 100644 --- a/index_test.go +++ b/index_test.go @@ -471,6 +471,7 @@ func TestIndex_FetchInfo(t *testing.T) { customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ InsecureSkipVerify: true, })) + broken := setup(t, "", WithAPIKey("wrong")) type args struct { UID string @@ -503,25 +504,37 @@ func TestIndex_FetchInfo(t *testing.T) { PrimaryKey: "book_id", }, }, + { + name: "TestIndexFetchInfoWithBrokenClient", + args: args{ + UID: "TestIndexFetchInfoWithCustomClient", + client: broken, + }, + wantResp: nil, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - setUpBasicIndex(tt.args.client, tt.args.UID) + setUpBasicIndex(sv, tt.args.UID) c := tt.args.client t.Cleanup(cleanup(c)) - i, err := c.GetIndex(tt.args.UID) - require.NoError(t, err) + i := c.Index(tt.args.UID) gotResp, err := i.FetchInfo() - require.NoError(t, err) - require.Equal(t, tt.wantResp.UID, gotResp.UID) - require.Equal(t, tt.wantResp.PrimaryKey, gotResp.PrimaryKey) - // Make sure that timestamps are also fetched and are updated - require.NotZero(t, gotResp.CreatedAt) - require.NotZero(t, gotResp.UpdatedAt) - require.Equal(t, i.CreatedAt, gotResp.CreatedAt) - require.Equal(t, i.UpdatedAt, gotResp.UpdatedAt) + + if tt.wantResp == nil { + require.Error(t, err) + require.Nil(t, gotResp) + } else { + require.NoError(t, err) + require.Equal(t, tt.wantResp.UID, gotResp.UID) + require.Equal(t, tt.wantResp.PrimaryKey, gotResp.PrimaryKey) + // Make sure that timestamps are also fetched and are updated + require.NotZero(t, gotResp.CreatedAt) + require.NotZero(t, gotResp.UpdatedAt) + } + }) } } diff --git a/options_test.go b/options_test.go new file mode 100644 index 00000000..2171fd1d --- /dev/null +++ b/options_test.go @@ -0,0 +1,14 @@ +package meilisearch + +import ( + "github.com/stretchr/testify/require" + "net/http" + "testing" +) + +func TestOptions_WithCustomClient(t *testing.T) { + meili := setup(t, "", WithCustomClient(http.DefaultClient)) + v, err := meili.Version() + require.NoError(t, err) + require.NotZero(t, v.PkgVersion) +} From ea75d08c28d2d447f08d456133c74fc607565511 Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 14:32:15 +0330 Subject: [PATCH 39/43] fix: improved coverage test of client --- client_test.go | 38 +++++++++++++++++++++++++++++++++++--- 1 file changed, 35 insertions(+), 3 deletions(-) diff --git a/client_test.go b/client_test.go index 2322de6b..e1d68893 100644 --- a/client_test.go +++ b/client_test.go @@ -1,6 +1,7 @@ package meilisearch import ( + "bytes" "context" "github.com/stretchr/testify/assert" "net/http" @@ -22,6 +23,9 @@ func TestExecuteRequest(t *testing.T) { } else if r.Method == http.MethodPost && r.URL.Path == "/test-post" { w.WriteHeader(http.StatusCreated) _, _ = w.Write([]byte(`{"message":"post successful"}`)) + } else if r.URL.Path == "/test-bad-request" { + w.WriteHeader(http.StatusBadRequest) + _, _ = w.Write([]byte(`{"message":"bad request"}`)) } else { w.WriteHeader(http.StatusNotFound) } @@ -71,6 +75,17 @@ func TestExecuteRequest(t *testing.T) { expectedResp: nil, expectedErr: &Error{StatusCode: http.StatusNotFound}, }, + { + name: "400 Bad Request", + internalReq: &internalRequest{ + endpoint: "/test-bad-request", + method: http.MethodGet, + withResponse: &mockResponse{}, + acceptedStatusCodes: []int{http.StatusOK}, + }, + expectedResp: nil, + expectedErr: &Error{StatusCode: http.StatusBadRequest}, + }, } for _, tt := range tests { @@ -78,9 +93,13 @@ func TestExecuteRequest(t *testing.T) { err := client.executeRequest(context.Background(), tt.internalReq) if tt.expectedErr != nil { assert.Error(t, err) - var apiErr *Error - assert.ErrorAs(t, err, &apiErr) - assert.Equal(t, tt.expectedErr.(*Error).StatusCode, apiErr.StatusCode) + if apiErr, ok := tt.expectedErr.(*Error); ok { + var actualErr *Error + assert.ErrorAs(t, err, &actualErr) + assert.Equal(t, apiErr.StatusCode, actualErr.StatusCode) + } else { + assert.Contains(t, err.Error(), tt.expectedErr.Error()) + } } else { assert.NoError(t, err) assert.Equal(t, tt.expectedResp, tt.internalReq.withResponse) @@ -88,3 +107,16 @@ func TestExecuteRequest(t *testing.T) { }) } } + +func TestBufferPool(t *testing.T) { + client := newClient(&http.Client{}, "http://localhost", "") + + data := "test" + + buf1 := client.bufferPool.Get().(*bytes.Buffer) + buf1.WriteString(data) + client.bufferPool.Put(buf1) + + buf2 := client.bufferPool.Get().(*bytes.Buffer) + assert.Equal(t, buf2.String(), data) +} From f0595424de4fd938b700b179ca555af5718f65f2 Mon Sep 17 00:00:00 2001 From: Javad Date: Sun, 4 Aug 2024 14:36:10 +0330 Subject: [PATCH 40/43] fix: client test except on pool --- client_test.go | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/client_test.go b/client_test.go index e1d68893..6053a4c8 100644 --- a/client_test.go +++ b/client_test.go @@ -1,7 +1,6 @@ package meilisearch import ( - "bytes" "context" "github.com/stretchr/testify/assert" "net/http" @@ -107,16 +106,3 @@ func TestExecuteRequest(t *testing.T) { }) } } - -func TestBufferPool(t *testing.T) { - client := newClient(&http.Client{}, "http://localhost", "") - - data := "test" - - buf1 := client.bufferPool.Get().(*bytes.Buffer) - buf1.WriteString(data) - client.bufferPool.Put(buf1) - - buf2 := client.bufferPool.Get().(*bytes.Buffer) - assert.Equal(t, buf2.String(), data) -} From a1e7862f56aee640ef32ef4ab6d68261a4f1d4e9 Mon Sep 17 00:00:00 2001 From: Javad Date: Wed, 7 Aug 2024 12:21:06 +0330 Subject: [PATCH 41/43] fix: change old client example to new --- .code-samples.meilisearch.yaml | 51 ++++++++-------------------------- 1 file changed, 11 insertions(+), 40 deletions(-) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index 50d443fe..629af9fc 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -515,10 +515,7 @@ add_movies_json_1: |- client.Index("movies").AddDocuments(&movies) landing_getting_started_1: |- - client := meilisearch.NewClient(meilisearch.ClientConfig{ - Host: "http://localhost:7700", - APIKey: "masterKey", - }) + client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey")) documents := []map[string]interface{}{ { "id": 1, "title": "Carol" }, @@ -548,10 +545,7 @@ getting_started_add_documents_md: |- ) func main() { - client := meilisearch.NewClient(meilisearch.ClientConfig{ - Host: "http://localhost:7700", - APIKey: aSampleMasterKey, - }) + client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey")) jsonFile, _ := os.Open("movies.json") defer jsonFile.Close() @@ -576,9 +570,7 @@ getting_started_search_md: |- [About this SDK](https://github.com/meilisearch/meilisearch-go/) getting_started_add_meteorites: |- - client := meilisearch.NewClient(meilisearch.ClientConfig{ - Host: "http://localhost:7700", - }) + client := meilisearch.New("http://localhost:7700") jsonFile, _ := os.Open("meteorites.json") defer jsonFile.Close() @@ -791,25 +783,16 @@ primary_field_guide_add_document_primary_key: |- primary_field_guide_update_document_primary_key: |- client.Index("books").UpdateIndex("title") security_guide_search_key_1: |- - client := meilisearch.NewClient(meilisearch.ClientConfig{ - Host: "http://localhost:7700", - APIKey: "apiKey", - }) + client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey")) client.Index("patient_medical_records").Search(); security_guide_update_key_1: |- - client := meilisearch.NewClient(meilisearch.ClientConfig{ - Host: "http://localhost:7700", - APIKey: "masterKey", - }) + client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey")) client.UpdateKey("74c9c733-3368-4738-bbe5-1d18a5fecb37", &meilisearch.Key{ Indexes: []string{"doctors"}, }) security_guide_create_key_1: |- - client := meilisearch.NewClient(meilisearch.ClientConfig{ - Host: "http://localhost:7700", - APIKey: "masterKey", - }) + client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey")) client.CreateKey(&meilisearch.Key{ Description: "Search patient records key", @@ -818,22 +801,13 @@ security_guide_create_key_1: |- ExpiresAt: time.Date(2042, time.April, 02, 0, 42, 42, 0, time.UTC), }) security_guide_list_keys_1: |- - client := meilisearch.NewClient(meilisearch.ClientConfig{ - Host: "http://localhost:7700", - APIKey: "masterKey", - }) + client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey")) client.GetKeys(nil); security_guide_delete_key_1: |- - client := meilisearch.NewClient(meilisearch.ClientConfig{ - Host: "http://localhost:7700", - APIKey: "masterKey", - }) + client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey")) client.DeleteKey("74c9c733-3368-4738-bbe5-1d18a5fecb37"); authorization_header_1: |- - client := meilisearch.NewClient(meilisearch.ClientConfig{ - Host: "http://localhost:7700", - APIKey: "masterKey", - }) + client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey")) client.GetKeys(nil); tenant_token_guide_generate_sdk_1: |- searchRules := map[string]interface{}{ @@ -848,11 +822,8 @@ tenant_token_guide_generate_sdk_1: |- token, err := client.GenerateTenantToken(searchRules, options); tenant_token_guide_search_sdk_1: |- - frontEndClient := meilisearch.NewClient(meilisearch.ClientConfig{ - Host: "http://localhost:7700", - APIKey: token, - }) - frontEndClient.Index("patient_medical_records").Search("blood test", &meilisearch.SearchRequest{}); + client := meilisearch.New("http://localhost:7700", meilisearch.WithAPIKey("masterKey")) + client.Index("patient_medical_records").Search("blood test", &meilisearch.SearchRequest{}); synonyms_guide_1: |- synonyms := map[string][]string{ "great": []string{"fantastic"}, From a0234176bf5461ab4d562a834d297e31f8859c8b Mon Sep 17 00:00:00 2001 From: Javad Date: Mon, 12 Aug 2024 12:31:25 +0330 Subject: [PATCH 42/43] fix: add test for RawType to improve coverage --- types_test.go | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 types_test.go diff --git a/types_test.go b/types_test.go new file mode 100644 index 00000000..11a2ecc7 --- /dev/null +++ b/types_test.go @@ -0,0 +1,42 @@ +package meilisearch + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestRawType_UnmarshalJSON(t *testing.T) { + var r RawType + + data := []byte(`"example"`) + err := r.UnmarshalJSON(data) + assert.NoError(t, err) + assert.Equal(t, RawType(`"example"`), r) + + data = []byte(`""`) + err = r.UnmarshalJSON(data) + assert.NoError(t, err) + assert.Equal(t, RawType(`""`), r) + + data = []byte(`{invalid}`) + err = r.UnmarshalJSON(data) + assert.NoError(t, err) + assert.Equal(t, RawType(`{invalid}`), r) +} + +func TestRawType_MarshalJSON(t *testing.T) { + r := RawType(`"example"`) + data, err := r.MarshalJSON() + assert.NoError(t, err) + assert.Equal(t, []byte(`"example"`), data) + + r = RawType(`""`) + data, err = r.MarshalJSON() + assert.NoError(t, err) + assert.Equal(t, []byte(`""`), data) + + r = RawType(`{random}`) + data, err = r.MarshalJSON() + assert.NoError(t, err) + assert.Equal(t, []byte(`{random}`), data) +} From cb933a83fbb07c429f2037bcfce4b88478e0eb7b Mon Sep 17 00:00:00 2001 From: Javad Date: Mon, 12 Aug 2024 12:50:13 +0330 Subject: [PATCH 43/43] fix: add update documents test for improve coverage --- index_document_test.go | 65 ++++++++++++++++++++++++++++++++++++++---- 1 file changed, 60 insertions(+), 5 deletions(-) diff --git a/index_document_test.go b/index_document_test.go index da3dd1d8..3d699254 100644 --- a/index_document_test.go +++ b/index_document_test.go @@ -7,7 +7,7 @@ import ( "testing" ) -func TestIndex_AddDocuments(t *testing.T) { +func TestIndex_AddOrUpdateDocuments(t *testing.T) { sv := setup(t, "") customSv := setup(t, "", WithCustomClientWithTLS(&tls.Config{ InsecureSkipVerify: true, @@ -209,6 +209,15 @@ func TestIndex_AddDocuments(t *testing.T) { }, &documents) require.NoError(t, err) require.Equal(t, tt.resp.documentsRes, documents) + + gotResp, err = i.UpdateDocuments(tt.args.documentsPtr) + require.NoError(t, err) + require.GreaterOrEqual(t, gotResp.TaskUID, tt.resp.wantResp.TaskUID) + require.Equal(t, gotResp.Status, tt.resp.wantResp.Status) + require.Equal(t, gotResp.Type, tt.resp.wantResp.Type) + require.Equal(t, gotResp.IndexUID, tt.args.UID) + require.NotZero(t, gotResp.EnqueuedAt) + require.NoError(t, err) }) } } @@ -399,7 +408,7 @@ func TestIndex_AddDocumentsWithPrimaryKey(t *testing.T) { } } -func TestIndex_AddDocumentsInBatches(t *testing.T) { +func TestIndex_AddOrUpdateDocumentsInBatches(t *testing.T) { sv := setup(t, "") type argsNoKey struct { @@ -512,6 +521,18 @@ func TestIndex_AddDocumentsInBatches(t *testing.T) { require.NoError(t, err) require.Equal(t, tt.args.documentsPtr, documents.Results) + + gotResp, err = i.UpdateDocumentsInBatches(tt.args.documentsPtr, tt.args.batchSize) + require.NoError(t, err) + for i := 0; i < 2; i++ { + require.GreaterOrEqual(t, gotResp[i].TaskUID, tt.wantResp[i].TaskUID) + require.Equal(t, gotResp[i].Status, tt.wantResp[i].Status) + require.Equal(t, gotResp[i].Type, tt.wantResp[i].Type) + require.Equal(t, gotResp[i].IndexUID, tt.args.UID) + require.NotZero(t, gotResp[i].EnqueuedAt) + } + + testWaitForBatchTask(t, i, gotResp) }) } @@ -545,7 +566,7 @@ func TestIndex_AddDocumentsInBatches(t *testing.T) { } } -func TestIndex_AddDocumentsNdjson(t *testing.T) { +func TestIndex_AddOrUpdateDocumentsNdjson(t *testing.T) { sv := setup(t, "") type args struct { @@ -618,6 +639,16 @@ func TestIndex_AddDocumentsNdjson(t *testing.T) { err = i.GetDocuments(&DocumentsQuery{}, &documents) require.NoError(t, err) require.Equal(t, wantDocs, documents.Results) + + if !testReader { + gotResp, err = i.UpdateDocumentsNdjson(tt.args.documents) + require.NoError(t, err) + require.GreaterOrEqual(t, gotResp.TaskUID, tt.wantResp.TaskUID) + require.Equal(t, tt.wantResp.Status, gotResp.Status) + require.Equal(t, tt.wantResp.Type, gotResp.Type) + require.NotZero(t, gotResp.EnqueuedAt) + testWaitForTask(t, i, gotResp) + } }) } @@ -628,7 +659,7 @@ func TestIndex_AddDocumentsNdjson(t *testing.T) { } } -func TestIndex_AddDocumentsCsvInBatches(t *testing.T) { +func TestIndex_AddOrUpdateDocumentsCsvInBatches(t *testing.T) { sv := setup(t, "") type args struct { @@ -718,6 +749,18 @@ func TestIndex_AddDocumentsCsvInBatches(t *testing.T) { err = i.GetDocuments(&DocumentsQuery{}, &documents) require.NoError(t, err) require.Equal(t, wantDocs, documents.Results) + + if !testReader { + gotResp, err = i.UpdateDocumentsCsvInBatches(tt.args.documents, tt.args.batchSize, nil) + require.NoError(t, err) + for i := 0; i < 2; i++ { + require.GreaterOrEqual(t, gotResp[i].TaskUID, tt.wantResp[i].TaskUID) + require.Equal(t, gotResp[i].Status, tt.wantResp[i].Status) + require.Equal(t, gotResp[i].Type, tt.wantResp[i].Type) + require.NotZero(t, gotResp[i].EnqueuedAt) + } + } + }) } @@ -931,7 +974,7 @@ func TestIndex_AddDocumentsCsvWithOptions(t *testing.T) { } } -func TestIndex_AddDocumentsNdjsonInBatches(t *testing.T) { +func TestIndex_AddOrUpdateDocumentsNdjsonInBatches(t *testing.T) { sv := setup(t, "") type args struct { @@ -1021,6 +1064,18 @@ func TestIndex_AddDocumentsNdjsonInBatches(t *testing.T) { err = i.GetDocuments(&DocumentsQuery{}, &documents) require.NoError(t, err) require.Equal(t, wantDocs, documents.Results) + + if !testReader { + gotResp, err = i.UpdateDocumentsNdjsonInBatches(tt.args.documents, tt.args.batchSize) + require.NoError(t, err) + for i := 0; i < 2; i++ { + require.GreaterOrEqual(t, gotResp[i].TaskUID, tt.wantResp[i].TaskUID) + require.Equal(t, gotResp[i].Status, tt.wantResp[i].Status) + require.Equal(t, gotResp[i].Type, tt.wantResp[i].Type) + require.NotZero(t, gotResp[i].EnqueuedAt) + } + testWaitForBatchTask(t, i, gotResp) + } }) }