diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml index 9783f4d..f6213bf 100644 --- a/.github/workflows/golangci-lint.yml +++ b/.github/workflows/golangci-lint.yml @@ -30,4 +30,4 @@ jobs: - name: golangci-lint uses: golangci/golangci-lint-action@v6 with: - version: v1.59 + version: v1.60 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 460f0e7..b8847b1 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest services: typesense: - image: typesense/typesense:26.0 + image: typesense/typesense:27.0 ports: - 8108:8108/tcp volumes: diff --git a/typesense/analytics.go b/typesense/analytics.go new file mode 100644 index 0000000..21fe01c --- /dev/null +++ b/typesense/analytics.go @@ -0,0 +1,23 @@ +package typesense + +type AnalyticsInterface interface { + Events() AnalyticsEventsInterface + Rules() AnalyticsRulesInterface + Rule(ruleName string) AnalyticsRuleInterface +} + +type analytics struct { + apiClient APIClientInterface +} + +func (a *analytics) Events() AnalyticsEventsInterface { + return &analyticsEvents{apiClient: a.apiClient} +} + +func (a *analytics) Rules() AnalyticsRulesInterface { + return &analyticsRules{apiClient: a.apiClient} +} + +func (a *analytics) Rule(ruleName string) AnalyticsRuleInterface { + return &analyticsRule{apiClient: a.apiClient, ruleName: ruleName} +} diff --git a/typesense/analytics_events.go b/typesense/analytics_events.go new file mode 100644 index 0000000..34f2386 --- /dev/null +++ b/typesense/analytics_events.go @@ -0,0 +1,26 @@ +package typesense + +import ( + "context" + + "github.com/typesense/typesense-go/v2/typesense/api" +) + +type AnalyticsEventsInterface interface { + Create(ctx context.Context, eventSchema *api.AnalyticsEventCreateSchema) (*api.AnalyticsEventCreateResponse, error) +} + +type analyticsEvents struct { + apiClient APIClientInterface +} + +func (a *analyticsEvents) Create(ctx context.Context, eventSchema *api.AnalyticsEventCreateSchema) (*api.AnalyticsEventCreateResponse, error) { + response, err := a.apiClient.CreateAnalyticsEventWithResponse(ctx, api.CreateAnalyticsEventJSONRequestBody(*eventSchema)) + if err != nil { + return nil, err + } + if response.JSON201 == nil { + return nil, &HTTPError{Status: response.StatusCode(), Body: response.Body} + } + return response.JSON201, nil +} diff --git a/typesense/analytics_events_test.go b/typesense/analytics_events_test.go new file mode 100644 index 0000000..f1ef194 --- /dev/null +++ b/typesense/analytics_events_test.go @@ -0,0 +1,55 @@ +package typesense + +import ( + "context" + "encoding/json" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/typesense/typesense-go/v2/typesense/api" +) + +func TestAnalyticsEventsCreate(t *testing.T) { + expectedData := &api.AnalyticsEventCreateSchema{ + Name: "products_click_event", + Type: "click", + Data: map[string]interface{}{ + "hello": "hi", + }, + } + + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/analytics/events", http.MethodPost) + + var reqBody api.AnalyticsEventCreateSchema + err := json.NewDecoder(r.Body).Decode(&reqBody) + + assert.NoError(t, err) + assert.Equal(t, *expectedData, reqBody) + + data := jsonEncode(t, api.AnalyticsEventCreateResponse{ + Ok: true, + }) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + w.Write(data) + }) + defer server.Close() + + res, err := client.Analytics().Events().Create(context.Background(), expectedData) + assert.NoError(t, err) + assert.True(t, res.Ok) +} + +func TestAnalyticsEventsCreateOnHttpStatusErrorCodeReturnsError(t *testing.T) { + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/analytics/events", http.MethodPost) + w.WriteHeader(http.StatusConflict) + }) + defer server.Close() + + _, err := client.Analytics().Events().Create(context.Background(), &api.AnalyticsEventCreateSchema{}) + assert.ErrorContains(t, err, "status: 409") +} diff --git a/typesense/analytics_rule.go b/typesense/analytics_rule.go new file mode 100644 index 0000000..6a66313 --- /dev/null +++ b/typesense/analytics_rule.go @@ -0,0 +1,39 @@ +package typesense + +import ( + "context" + + "github.com/typesense/typesense-go/v2/typesense/api" +) + +type AnalyticsRuleInterface interface { + Delete(ctx context.Context) (*api.AnalyticsRuleDeleteResponse, error) + Retrieve(ctx context.Context) (*api.AnalyticsRuleSchema, error) +} + +type analyticsRule struct { + apiClient APIClientInterface + ruleName string +} + +func (a *analyticsRule) Delete(ctx context.Context) (*api.AnalyticsRuleDeleteResponse, error) { + response, err := a.apiClient.DeleteAnalyticsRuleWithResponse(ctx, a.ruleName) + if err != nil { + return nil, err + } + if response.JSON200 == nil { + return nil, &HTTPError{Status: response.StatusCode(), Body: response.Body} + } + return response.JSON200, nil +} + +func (a *analyticsRule) Retrieve(ctx context.Context) (*api.AnalyticsRuleSchema, error) { + response, err := a.apiClient.RetrieveAnalyticsRuleWithResponse(ctx, a.ruleName) + if err != nil { + return nil, err + } + if response.JSON200 == nil { + return nil, &HTTPError{Status: response.StatusCode(), Body: response.Body} + } + return response.JSON200, nil +} diff --git a/typesense/analytics_rule_test.go b/typesense/analytics_rule_test.go new file mode 100644 index 0000000..0a41738 --- /dev/null +++ b/typesense/analytics_rule_test.go @@ -0,0 +1,75 @@ +package typesense + +import ( + "context" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/typesense/typesense-go/v2/typesense/api" + "github.com/typesense/typesense-go/v2/typesense/api/pointer" +) + +func TestAnalyticsRuleRetrieve(t *testing.T) { + expectedData := &api.AnalyticsRuleSchema{ + Name: "test_rule", + Type: "test_type", + Params: api.AnalyticsRuleParameters{ + Limit: pointer.Int(10), + }, + } + + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/analytics/rules/test_rule", http.MethodGet) + data := jsonEncode(t, expectedData) + + w.Header().Set("Content-Type", "application/json") + w.Write(data) + }) + defer server.Close() + + res, err := client.Analytics().Rule(expectedData.Name).Retrieve(context.Background()) + assert.NoError(t, err) + assert.Equal(t, expectedData, res) +} + +func TestAnalyticsRuleRetrieveOnHttpStatusErrorCodeReturnsError(t *testing.T) { + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/analytics/rules/test_rule", http.MethodGet) + w.WriteHeader(http.StatusConflict) + }) + defer server.Close() + + _, err := client.Analytics().Rule("test_rule").Retrieve(context.Background()) + assert.ErrorContains(t, err, "status: 409") +} + +func TestAnalyticsRuleDelete(t *testing.T) { + expectedData := &api.AnalyticsRuleDeleteResponse{ + Name: "test_rule", + } + + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/analytics/rules/test_rule", http.MethodDelete) + data := jsonEncode(t, expectedData) + + w.Header().Set("Content-Type", "application/json") + w.Write(data) + }) + defer server.Close() + + res, err := client.Analytics().Rule("test_rule").Delete(context.Background()) + assert.NoError(t, err) + assert.Equal(t, expectedData, res) +} + +func TestAnalyticsRuleUpsertOnHttpStatusErrorCodeReturnsError(t *testing.T) { + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/analytics/rules/test_rule", http.MethodDelete) + w.WriteHeader(http.StatusConflict) + }) + defer server.Close() + + _, err := client.Analytics().Rule("test_rule").Delete(context.Background()) + assert.ErrorContains(t, err, "status: 409") +} diff --git a/typesense/analytics_rules.go b/typesense/analytics_rules.go new file mode 100644 index 0000000..58c3d76 --- /dev/null +++ b/typesense/analytics_rules.go @@ -0,0 +1,39 @@ +package typesense + +import ( + "context" + + "github.com/typesense/typesense-go/v2/typesense/api" +) + +type AnalyticsRulesInterface interface { + Upsert(ctx context.Context, ruleName string, ruleSchema *api.AnalyticsRuleUpsertSchema) (*api.AnalyticsRuleSchema, error) + Retrieve(ctx context.Context) ([]*api.AnalyticsRuleSchema, error) +} + +type analyticsRules struct { + apiClient APIClientInterface +} + +func (a *analyticsRules) Upsert(ctx context.Context, ruleName string, ruleSchema *api.AnalyticsRuleUpsertSchema) (*api.AnalyticsRuleSchema, error) { + response, err := a.apiClient.UpsertAnalyticsRuleWithResponse(ctx, + ruleName, api.UpsertAnalyticsRuleJSONRequestBody(*ruleSchema)) + if err != nil { + return nil, err + } + if response.JSON200 == nil { + return nil, &HTTPError{Status: response.StatusCode(), Body: response.Body} + } + return response.JSON200, nil +} + +func (a *analyticsRules) Retrieve(ctx context.Context) ([]*api.AnalyticsRuleSchema, error) { + response, err := a.apiClient.RetrieveAnalyticsRulesWithResponse(ctx) + if err != nil { + return nil, err + } + if response.JSON200 == nil { + return nil, &HTTPError{Status: response.StatusCode(), Body: response.Body} + } + return *response.JSON200.Rules, nil +} diff --git a/typesense/analytics_rules_test.go b/typesense/analytics_rules_test.go new file mode 100644 index 0000000..28f7b96 --- /dev/null +++ b/typesense/analytics_rules_test.go @@ -0,0 +1,98 @@ +package typesense + +import ( + "context" + "encoding/json" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/typesense/typesense-go/v2/typesense/api" + "github.com/typesense/typesense-go/v2/typesense/api/pointer" +) + +func TestAnalyticsRulesRetrieve(t *testing.T) { + expectedData := []*api.AnalyticsRuleSchema{ + { + Name: "test_name", + Type: "test_type", + Params: api.AnalyticsRuleParameters{ + Limit: pointer.Int(10), + }, + }, + } + + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/analytics/rules", http.MethodGet) + data := jsonEncode(t, api.AnalyticsRulesRetrieveSchema{ + Rules: &expectedData, + }) + w.Header().Set("Content-Type", "application/json") + w.Write(data) + }) + defer server.Close() + + res, err := client.Analytics().Rules().Retrieve(context.Background()) + assert.NoError(t, err) + assert.Equal(t, expectedData, res) +} + +func TestAnalyticsRulesRetrieveOnHttpStatusErrorCodeReturnsError(t *testing.T) { + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/analytics/rules", http.MethodGet) + w.WriteHeader(http.StatusConflict) + }) + defer server.Close() + + _, err := client.Analytics().Rules().Retrieve(context.Background()) + assert.ErrorContains(t, err, "status: 409") +} + +func TestAnalyticsRulesUpsert(t *testing.T) { + upsertData := &api.AnalyticsRuleUpsertSchema{ + Type: api.AnalyticsRuleUpsertSchemaTypeCounter, + Params: api.AnalyticsRuleParameters{ + Limit: pointer.Int(100), + }, + } + expectedData := &api.AnalyticsRuleSchema{ + Name: "test-rule", + Type: api.AnalyticsRuleSchemaType(upsertData.Type), + Params: upsertData.Params, + } + + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/analytics/rules/test-rule", http.MethodPut) + + var reqBody api.AnalyticsRuleUpsertSchema + err := json.NewDecoder(r.Body).Decode(&reqBody) + + assert.NoError(t, err) + assert.Equal(t, *upsertData, reqBody) + + data := jsonEncode(t, api.AnalyticsRuleSchema{ + Name: expectedData.Name, + Type: api.AnalyticsRuleSchemaType(upsertData.Type), + Params: upsertData.Params, + }) + + w.Header().Set("Content-Type", "application/json") + w.Write(data) + }) + defer server.Close() + + res, err := client.Analytics().Rules().Upsert(context.Background(), expectedData.Name, upsertData) + assert.NoError(t, err) + assert.Equal(t, expectedData, res) +} + +func TestAnalyticsRulesUpsertOnHttpStatusErrorCodeReturnsError(t *testing.T) { + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/analytics/rules/test-rule", http.MethodPut) + w.WriteHeader(http.StatusConflict) + }) + defer server.Close() + + _, err := client.Analytics().Rules().Upsert(context.Background(), "test-rule", &api.AnalyticsRuleUpsertSchema{}) + assert.ErrorContains(t, err, "status: 409") +} diff --git a/typesense/api/client.go b/typesense/api/client.go index ce03473..8ae0dfa 100644 --- a/typesense/api/client.go +++ b/typesense/api/client.go @@ -16,3 +16,9 @@ func WithAPIKey(apiKey string) ClientOption { return nil } } + +// Manually defining this unreferenced schema here instead of disabling oapi-codegen schema pruning + +type DocumentIndexParameters struct { + DirtyValues *DirtyValues `json:"dirty_values,omitempty"` +} diff --git a/typesense/api/client_gen.go b/typesense/api/client_gen.go index 04bc669..2695586 100644 --- a/typesense/api/client_gen.go +++ b/typesense/api/client_gen.go @@ -103,6 +103,11 @@ type ClientInterface interface { UpsertAlias(ctx context.Context, aliasName string, body UpsertAliasJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // CreateAnalyticsEventWithBody request with any body + CreateAnalyticsEventWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateAnalyticsEvent(ctx context.Context, body CreateAnalyticsEventJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // RetrieveAnalyticsRules request RetrieveAnalyticsRules(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -170,9 +175,9 @@ type ClientInterface interface { GetDocument(ctx context.Context, collectionName string, documentId string, reqEditors ...RequestEditorFn) (*http.Response, error) // UpdateDocumentWithBody request with any body - UpdateDocumentWithBody(ctx context.Context, collectionName string, documentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + UpdateDocumentWithBody(ctx context.Context, collectionName string, documentId string, params *UpdateDocumentParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - UpdateDocument(ctx context.Context, collectionName string, documentId string, body UpdateDocumentJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + UpdateDocument(ctx context.Context, collectionName string, documentId string, params *UpdateDocumentParams, body UpdateDocumentJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) // GetSearchOverrides request GetSearchOverrides(ctx context.Context, collectionName string, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -202,6 +207,25 @@ type ClientInterface interface { UpsertSearchSynonym(ctx context.Context, collectionName string, synonymId string, body UpsertSearchSynonymJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // RetrieveAllConversationModels request + RetrieveAllConversationModels(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateConversationModelWithBody request with any body + CreateConversationModelWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateConversationModel(ctx context.Context, body CreateConversationModelJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteConversationModel request + DeleteConversationModel(ctx context.Context, modelId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // RetrieveConversationModel request + RetrieveConversationModel(ctx context.Context, modelId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateConversationModelWithBody request with any body + UpdateConversationModelWithBody(ctx context.Context, modelId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateConversationModel(ctx context.Context, modelId string, body UpdateConversationModelJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // Debug request Debug(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -328,6 +352,30 @@ func (c *Client) UpsertAlias(ctx context.Context, aliasName string, body UpsertA return c.Client.Do(req) } +func (c *Client) CreateAnalyticsEventWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAnalyticsEventRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateAnalyticsEvent(ctx context.Context, body CreateAnalyticsEventJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAnalyticsEventRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) RetrieveAnalyticsRules(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewRetrieveAnalyticsRulesRequest(c.Server) if err != nil { @@ -616,8 +664,8 @@ func (c *Client) GetDocument(ctx context.Context, collectionName string, documen return c.Client.Do(req) } -func (c *Client) UpdateDocumentWithBody(ctx context.Context, collectionName string, documentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdateDocumentRequestWithBody(c.Server, collectionName, documentId, contentType, body) +func (c *Client) UpdateDocumentWithBody(ctx context.Context, collectionName string, documentId string, params *UpdateDocumentParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateDocumentRequestWithBody(c.Server, collectionName, documentId, params, contentType, body) if err != nil { return nil, err } @@ -628,8 +676,8 @@ func (c *Client) UpdateDocumentWithBody(ctx context.Context, collectionName stri return c.Client.Do(req) } -func (c *Client) UpdateDocument(ctx context.Context, collectionName string, documentId string, body UpdateDocumentJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdateDocumentRequest(c.Server, collectionName, documentId, body) +func (c *Client) UpdateDocument(ctx context.Context, collectionName string, documentId string, params *UpdateDocumentParams, body UpdateDocumentJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateDocumentRequest(c.Server, collectionName, documentId, params, body) if err != nil { return nil, err } @@ -760,6 +808,90 @@ func (c *Client) UpsertSearchSynonym(ctx context.Context, collectionName string, return c.Client.Do(req) } +func (c *Client) RetrieveAllConversationModels(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewRetrieveAllConversationModelsRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateConversationModelWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateConversationModelRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateConversationModel(ctx context.Context, body CreateConversationModelJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateConversationModelRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteConversationModel(ctx context.Context, modelId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteConversationModelRequest(c.Server, modelId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) RetrieveConversationModel(ctx context.Context, modelId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewRetrieveConversationModelRequest(c.Server, modelId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateConversationModelWithBody(ctx context.Context, modelId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateConversationModelRequestWithBody(c.Server, modelId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateConversationModel(ctx context.Context, modelId string, body UpdateConversationModelJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateConversationModelRequest(c.Server, modelId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) Debug(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewDebugRequest(c.Server) if err != nil { @@ -1178,6 +1310,46 @@ func NewUpsertAliasRequestWithBody(server string, aliasName string, contentType return req, nil } +// NewCreateAnalyticsEventRequest calls the generic CreateAnalyticsEvent builder with application/json body +func NewCreateAnalyticsEventRequest(server string, body CreateAnalyticsEventJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateAnalyticsEventRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateAnalyticsEventRequestWithBody generates requests for CreateAnalyticsEvent with any type of body +func NewCreateAnalyticsEventRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/analytics/events") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + // NewRetrieveAnalyticsRulesRequest generates requests for RetrieveAnalyticsRules func NewRetrieveAnalyticsRulesRequest(server string) (*http.Request, error) { var err error @@ -1603,6 +1775,22 @@ func NewDeleteDocumentsRequest(server string, collectionName string, params *Del } + if params.IgnoreNotFound != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignore_not_found", runtime.ParamLocationQuery, *params.IgnoreNotFound); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + queryURL.RawQuery = queryValues.Encode() } @@ -1739,6 +1927,22 @@ func NewIndexDocumentRequestWithBody(server string, collectionName string, param } + if params.DirtyValues != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "dirty_values", runtime.ParamLocationQuery, *params.DirtyValues); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + queryURL.RawQuery = queryValues.Encode() } @@ -1885,9 +2089,9 @@ func NewImportDocumentsRequestWithBody(server string, collectionName string, par } - if params.BatchSize != nil { + if params.DirtyValues != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "batch_size", runtime.ParamLocationQuery, *params.BatchSize); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "dirty_values", runtime.ParamLocationQuery, *params.DirtyValues); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -1901,9 +2105,9 @@ func NewImportDocumentsRequestWithBody(server string, collectionName string, par } - if params.DirtyValues != nil { + if params.BatchSize != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "dirty_values", runtime.ParamLocationQuery, *params.DirtyValues); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "batch_size", runtime.ParamLocationQuery, *params.BatchSize); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -1933,6 +2137,38 @@ func NewImportDocumentsRequestWithBody(server string, collectionName string, par } + if params.ReturnDoc != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "return_doc", runtime.ParamLocationQuery, *params.ReturnDoc); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ReturnId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "return_id", runtime.ParamLocationQuery, *params.ReturnId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + queryURL.RawQuery = queryValues.Encode() } @@ -1991,9 +2227,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.DropTokensThreshold != nil { + if params.Conversation != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "drop_tokens_threshold", runtime.ParamLocationQuery, *params.DropTokensThreshold); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "conversation", runtime.ParamLocationQuery, *params.Conversation); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2007,9 +2243,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.EnableHighlightV1 != nil { + if params.ConversationId != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enable_highlight_v1", runtime.ParamLocationQuery, *params.EnableHighlightV1); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "conversation_id", runtime.ParamLocationQuery, *params.ConversationId); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2023,9 +2259,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.EnableOverrides != nil { + if params.ConversationModelId != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enable_overrides", runtime.ParamLocationQuery, *params.EnableOverrides); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "conversation_model_id", runtime.ParamLocationQuery, *params.ConversationModelId); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2039,9 +2275,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.EnableTyposForNumericalTokens != nil { + if params.DropTokensMode != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enable_typos_for_numerical_tokens", runtime.ParamLocationQuery, *params.EnableTyposForNumericalTokens); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "drop_tokens_mode", runtime.ParamLocationQuery, *params.DropTokensMode); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2055,9 +2291,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.ExcludeFields != nil { + if params.DropTokensThreshold != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "exclude_fields", runtime.ParamLocationQuery, *params.ExcludeFields); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "drop_tokens_threshold", runtime.ParamLocationQuery, *params.DropTokensThreshold); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2071,9 +2307,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.ExhaustiveSearch != nil { + if params.EnableHighlightV1 != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "exhaustive_search", runtime.ParamLocationQuery, *params.ExhaustiveSearch); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enable_highlight_v1", runtime.ParamLocationQuery, *params.EnableHighlightV1); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2087,9 +2323,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.FacetBy != nil { + if params.EnableOverrides != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "facet_by", runtime.ParamLocationQuery, *params.FacetBy); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enable_overrides", runtime.ParamLocationQuery, *params.EnableOverrides); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2103,9 +2339,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.FacetQuery != nil { + if params.EnableSynonyms != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "facet_query", runtime.ParamLocationQuery, *params.FacetQuery); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enable_synonyms", runtime.ParamLocationQuery, *params.EnableSynonyms); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2119,9 +2355,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.FacetReturnParent != nil { + if params.EnableTyposForAlphaNumericalTokens != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "facet_return_parent", runtime.ParamLocationQuery, *params.FacetReturnParent); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enable_typos_for_alpha_numerical_tokens", runtime.ParamLocationQuery, *params.EnableTyposForAlphaNumericalTokens); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2135,9 +2371,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.FacetStrategy != nil { + if params.EnableTyposForNumericalTokens != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "facet_strategy", runtime.ParamLocationQuery, *params.FacetStrategy); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enable_typos_for_numerical_tokens", runtime.ParamLocationQuery, *params.EnableTyposForNumericalTokens); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2151,9 +2387,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.FilterBy != nil { + if params.ExcludeFields != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter_by", runtime.ParamLocationQuery, *params.FilterBy); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "exclude_fields", runtime.ParamLocationQuery, *params.ExcludeFields); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2167,9 +2403,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.GroupBy != nil { + if params.ExhaustiveSearch != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "group_by", runtime.ParamLocationQuery, *params.GroupBy); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "exhaustive_search", runtime.ParamLocationQuery, *params.ExhaustiveSearch); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2183,9 +2419,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.GroupLimit != nil { + if params.FacetBy != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "group_limit", runtime.ParamLocationQuery, *params.GroupLimit); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "facet_by", runtime.ParamLocationQuery, *params.FacetBy); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2199,9 +2435,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.HiddenHits != nil { + if params.FacetQuery != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "hidden_hits", runtime.ParamLocationQuery, *params.HiddenHits); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "facet_query", runtime.ParamLocationQuery, *params.FacetQuery); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2215,9 +2451,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.HighlightAffixNumTokens != nil { + if params.FacetReturnParent != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "highlight_affix_num_tokens", runtime.ParamLocationQuery, *params.HighlightAffixNumTokens); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "facet_return_parent", runtime.ParamLocationQuery, *params.FacetReturnParent); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2231,9 +2467,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.HighlightEndTag != nil { + if params.FacetStrategy != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "highlight_end_tag", runtime.ParamLocationQuery, *params.HighlightEndTag); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "facet_strategy", runtime.ParamLocationQuery, *params.FacetStrategy); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2247,9 +2483,9 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.HighlightFields != nil { + if params.FilterBy != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "highlight_fields", runtime.ParamLocationQuery, *params.HighlightFields); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter_by", runtime.ParamLocationQuery, *params.FilterBy); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2263,7 +2499,135 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } - if params.HighlightFullFields != nil { + if params.FilterCuratedHits != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter_curated_hits", runtime.ParamLocationQuery, *params.FilterCuratedHits); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.GroupBy != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "group_by", runtime.ParamLocationQuery, *params.GroupBy); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.GroupLimit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "group_limit", runtime.ParamLocationQuery, *params.GroupLimit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.GroupMissingValues != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "group_missing_values", runtime.ParamLocationQuery, *params.GroupMissingValues); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.HiddenHits != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "hidden_hits", runtime.ParamLocationQuery, *params.HiddenHits); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.HighlightAffixNumTokens != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "highlight_affix_num_tokens", runtime.ParamLocationQuery, *params.HighlightAffixNumTokens); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.HighlightEndTag != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "highlight_end_tag", runtime.ParamLocationQuery, *params.HighlightEndTag); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.HighlightFields != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "highlight_fields", runtime.ParamLocationQuery, *params.HighlightFields); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.HighlightFullFields != nil { if queryFrag, err := runtime.StyleParamWithLocation("form", true, "highlight_full_fields", runtime.ParamLocationQuery, *params.HighlightFullFields); err != nil { return nil, err @@ -2791,6 +3155,38 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } + if params.SynonymNumTypos != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "synonym_num_typos", runtime.ParamLocationQuery, *params.SynonymNumTypos); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SynonymPrefix != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "synonym_prefix", runtime.ParamLocationQuery, *params.SynonymPrefix); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + if params.TextMatchType != nil { if queryFrag, err := runtime.StyleParamWithLocation("form", true, "text_match_type", runtime.ParamLocationQuery, *params.TextMatchType); err != nil { @@ -2855,6 +3251,22 @@ func NewSearchCollectionRequest(server string, collectionName string, params *Se } + if params.VoiceQuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "voice_query", runtime.ParamLocationQuery, *params.VoiceQuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + queryURL.RawQuery = queryValues.Encode() } @@ -2949,18 +3361,18 @@ func NewGetDocumentRequest(server string, collectionName string, documentId stri } // NewUpdateDocumentRequest calls the generic UpdateDocument builder with application/json body -func NewUpdateDocumentRequest(server string, collectionName string, documentId string, body UpdateDocumentJSONRequestBody) (*http.Request, error) { +func NewUpdateDocumentRequest(server string, collectionName string, documentId string, params *UpdateDocumentParams, body UpdateDocumentJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { return nil, err } bodyReader = bytes.NewReader(buf) - return NewUpdateDocumentRequestWithBody(server, collectionName, documentId, "application/json", bodyReader) + return NewUpdateDocumentRequestWithBody(server, collectionName, documentId, params, "application/json", bodyReader) } // NewUpdateDocumentRequestWithBody generates requests for UpdateDocument with any type of body -func NewUpdateDocumentRequestWithBody(server string, collectionName string, documentId string, contentType string, body io.Reader) (*http.Request, error) { +func NewUpdateDocumentRequestWithBody(server string, collectionName string, documentId string, params *UpdateDocumentParams, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -2992,6 +3404,28 @@ func NewUpdateDocumentRequestWithBody(server string, collectionName string, docu return nil, err } + if params != nil { + queryValues := queryURL.Query() + + if params.DirtyValues != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "dirty_values", runtime.ParamLocationQuery, *params.DirtyValues); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + req, err := http.NewRequest("PATCH", queryURL.String(), body) if err != nil { return nil, err @@ -3342,6 +3776,188 @@ func NewUpsertSearchSynonymRequestWithBody(server string, collectionName string, return req, nil } +// NewRetrieveAllConversationModelsRequest generates requests for RetrieveAllConversationModels +func NewRetrieveAllConversationModelsRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/conversations/models") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateConversationModelRequest calls the generic CreateConversationModel builder with application/json body +func NewCreateConversationModelRequest(server string, body CreateConversationModelJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateConversationModelRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateConversationModelRequestWithBody generates requests for CreateConversationModel with any type of body +func NewCreateConversationModelRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/conversations/models") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteConversationModelRequest generates requests for DeleteConversationModel +func NewDeleteConversationModelRequest(server string, modelId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "modelId", runtime.ParamLocationPath, modelId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/conversations/models/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewRetrieveConversationModelRequest generates requests for RetrieveConversationModel +func NewRetrieveConversationModelRequest(server string, modelId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "modelId", runtime.ParamLocationPath, modelId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/conversations/models/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateConversationModelRequest calls the generic UpdateConversationModel builder with application/json body +func NewUpdateConversationModelRequest(server string, modelId string, body UpdateConversationModelJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateConversationModelRequestWithBody(server, modelId, "application/json", bodyReader) +} + +// NewUpdateConversationModelRequestWithBody generates requests for UpdateConversationModel with any type of body +func NewUpdateConversationModelRequestWithBody(server string, modelId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "modelId", runtime.ParamLocationPath, modelId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/conversations/models/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + // NewDebugRequest generates requests for Debug func NewDebugRequest(server string) (*http.Request, error) { var err error @@ -3588,12 +4204,76 @@ func NewMultiSearchRequestWithBody(server string, params *MultiSearchParams, con return nil, err } - if params != nil { - queryValues := queryURL.Query() + if params != nil { + queryValues := queryURL.Query() + + if params.CacheTtl != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "cache_ttl", runtime.ParamLocationQuery, *params.CacheTtl); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Conversation != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "conversation", runtime.ParamLocationQuery, *params.Conversation); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ConversationId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "conversation_id", runtime.ParamLocationQuery, *params.ConversationId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ConversationModelId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "conversation_model_id", runtime.ParamLocationQuery, *params.ConversationModelId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } - if params.CacheTtl != nil { + if params.DropTokensMode != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "cache_ttl", runtime.ParamLocationQuery, *params.CacheTtl); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "drop_tokens_mode", runtime.ParamLocationQuery, *params.DropTokensMode); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -3655,6 +4335,38 @@ func NewMultiSearchRequestWithBody(server string, params *MultiSearchParams, con } + if params.EnableSynonyms != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enable_synonyms", runtime.ParamLocationQuery, *params.EnableSynonyms); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.EnableTyposForAlphaNumericalTokens != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enable_typos_for_alpha_numerical_tokens", runtime.ParamLocationQuery, *params.EnableTyposForAlphaNumericalTokens); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + if params.EnableTyposForNumericalTokens != nil { if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enable_typos_for_numerical_tokens", runtime.ParamLocationQuery, *params.EnableTyposForNumericalTokens); err != nil { @@ -3783,6 +4495,22 @@ func NewMultiSearchRequestWithBody(server string, params *MultiSearchParams, con } + if params.FilterCuratedHits != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter_curated_hits", runtime.ParamLocationQuery, *params.FilterCuratedHits); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + if params.GroupBy != nil { if queryFrag, err := runtime.StyleParamWithLocation("form", true, "group_by", runtime.ParamLocationQuery, *params.GroupBy); err != nil { @@ -3815,6 +4543,22 @@ func NewMultiSearchRequestWithBody(server string, params *MultiSearchParams, con } + if params.GroupMissingValues != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "group_missing_values", runtime.ParamLocationQuery, *params.GroupMissingValues); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + if params.HiddenHits != nil { if queryFrag, err := runtime.StyleParamWithLocation("form", true, "hidden_hits", runtime.ParamLocationQuery, *params.HiddenHits); err != nil { @@ -4407,6 +5151,38 @@ func NewMultiSearchRequestWithBody(server string, params *MultiSearchParams, con } + if params.SynonymNumTypos != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "synonym_num_typos", runtime.ParamLocationQuery, *params.SynonymNumTypos); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SynonymPrefix != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "synonym_prefix", runtime.ParamLocationQuery, *params.SynonymPrefix); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + if params.TextMatchType != nil { if queryFrag, err := runtime.StyleParamWithLocation("form", true, "text_match_type", runtime.ParamLocationQuery, *params.TextMatchType); err != nil { @@ -4471,6 +5247,22 @@ func NewMultiSearchRequestWithBody(server string, params *MultiSearchParams, con } + if params.VoiceQuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "voice_query", runtime.ParamLocationQuery, *params.VoiceQuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + queryURL.RawQuery = queryValues.Encode() } @@ -4924,6 +5716,11 @@ type ClientWithResponsesInterface interface { UpsertAliasWithResponse(ctx context.Context, aliasName string, body UpsertAliasJSONRequestBody, reqEditors ...RequestEditorFn) (*UpsertAliasResponse, error) + // CreateAnalyticsEventWithBodyWithResponse request with any body + CreateAnalyticsEventWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAnalyticsEventResponse, error) + + CreateAnalyticsEventWithResponse(ctx context.Context, body CreateAnalyticsEventJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAnalyticsEventResponse, error) + // RetrieveAnalyticsRulesWithResponse request RetrieveAnalyticsRulesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*RetrieveAnalyticsRulesResponse, error) @@ -4991,9 +5788,9 @@ type ClientWithResponsesInterface interface { GetDocumentWithResponse(ctx context.Context, collectionName string, documentId string, reqEditors ...RequestEditorFn) (*GetDocumentResponse, error) // UpdateDocumentWithBodyWithResponse request with any body - UpdateDocumentWithBodyWithResponse(ctx context.Context, collectionName string, documentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateDocumentResponse, error) + UpdateDocumentWithBodyWithResponse(ctx context.Context, collectionName string, documentId string, params *UpdateDocumentParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateDocumentResponse, error) - UpdateDocumentWithResponse(ctx context.Context, collectionName string, documentId string, body UpdateDocumentJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateDocumentResponse, error) + UpdateDocumentWithResponse(ctx context.Context, collectionName string, documentId string, params *UpdateDocumentParams, body UpdateDocumentJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateDocumentResponse, error) // GetSearchOverridesWithResponse request GetSearchOverridesWithResponse(ctx context.Context, collectionName string, reqEditors ...RequestEditorFn) (*GetSearchOverridesResponse, error) @@ -5023,6 +5820,25 @@ type ClientWithResponsesInterface interface { UpsertSearchSynonymWithResponse(ctx context.Context, collectionName string, synonymId string, body UpsertSearchSynonymJSONRequestBody, reqEditors ...RequestEditorFn) (*UpsertSearchSynonymResponse, error) + // RetrieveAllConversationModelsWithResponse request + RetrieveAllConversationModelsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*RetrieveAllConversationModelsResponse, error) + + // CreateConversationModelWithBodyWithResponse request with any body + CreateConversationModelWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateConversationModelResponse, error) + + CreateConversationModelWithResponse(ctx context.Context, body CreateConversationModelJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateConversationModelResponse, error) + + // DeleteConversationModelWithResponse request + DeleteConversationModelWithResponse(ctx context.Context, modelId string, reqEditors ...RequestEditorFn) (*DeleteConversationModelResponse, error) + + // RetrieveConversationModelWithResponse request + RetrieveConversationModelWithResponse(ctx context.Context, modelId string, reqEditors ...RequestEditorFn) (*RetrieveConversationModelResponse, error) + + // UpdateConversationModelWithBodyWithResponse request with any body + UpdateConversationModelWithBodyWithResponse(ctx context.Context, modelId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateConversationModelResponse, error) + + UpdateConversationModelWithResponse(ctx context.Context, modelId string, body UpdateConversationModelJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateConversationModelResponse, error) + // DebugWithResponse request DebugWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*DebugResponse, error) @@ -5181,6 +5997,29 @@ func (r UpsertAliasResponse) StatusCode() int { return 0 } +type CreateAnalyticsEventResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *AnalyticsEventCreateResponse + JSON400 *ApiResponse +} + +// Status returns HTTPResponse.Status +func (r CreateAnalyticsEventResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateAnalyticsEventResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + type RetrieveAnalyticsRulesResponse struct { Body []byte HTTPResponse *http.Response @@ -5229,7 +6068,7 @@ func (r CreateAnalyticsRuleResponse) StatusCode() int { type DeleteAnalyticsRuleResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *AnalyticsRuleSchema + JSON200 *AnalyticsRuleDeleteResponse JSON404 *ApiResponse } @@ -5275,7 +6114,7 @@ func (r RetrieveAnalyticsRuleResponse) StatusCode() int { type UpsertAnalyticsRuleResponse struct { Body []byte HTTPResponse *http.Response - JSON201 *AnalyticsRuleSchema + JSON200 *AnalyticsRuleSchema JSON400 *ApiResponse } @@ -5601,15 +6440,128 @@ func (r GetDocumentResponse) StatusCode() int { return 0 } -type UpdateDocumentResponse struct { +type UpdateDocumentResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON404 *ApiResponse +} + +// Status returns HTTPResponse.Status +func (r UpdateDocumentResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateDocumentResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetSearchOverridesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SearchOverridesResponse +} + +// Status returns HTTPResponse.Status +func (r GetSearchOverridesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetSearchOverridesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteSearchOverrideResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SearchOverride + JSON404 *ApiResponse +} + +// Status returns HTTPResponse.Status +func (r DeleteSearchOverrideResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteSearchOverrideResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetSearchOverrideResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SearchOverride +} + +// Status returns HTTPResponse.Status +func (r GetSearchOverrideResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetSearchOverrideResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpsertSearchOverrideResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SearchOverride + JSON404 *ApiResponse +} + +// Status returns HTTPResponse.Status +func (r UpsertSearchOverrideResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpsertSearchOverrideResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetSearchSynonymsResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *map[string]interface{} + JSON200 *SearchSynonymsResponse JSON404 *ApiResponse } // Status returns HTTPResponse.Status -func (r UpdateDocumentResponse) Status() string { +func (r GetSearchSynonymsResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5617,21 +6569,22 @@ func (r UpdateDocumentResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r UpdateDocumentResponse) StatusCode() int { +func (r GetSearchSynonymsResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type GetSearchOverridesResponse struct { +type DeleteSearchSynonymResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *SearchOverridesResponse + JSON200 *SearchSynonym + JSON404 *ApiResponse } // Status returns HTTPResponse.Status -func (r GetSearchOverridesResponse) Status() string { +func (r DeleteSearchSynonymResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5639,22 +6592,22 @@ func (r GetSearchOverridesResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r GetSearchOverridesResponse) StatusCode() int { +func (r DeleteSearchSynonymResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type DeleteSearchOverrideResponse struct { +type GetSearchSynonymResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *SearchOverride + JSON200 *SearchSynonym JSON404 *ApiResponse } // Status returns HTTPResponse.Status -func (r DeleteSearchOverrideResponse) Status() string { +func (r GetSearchSynonymResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5662,21 +6615,22 @@ func (r DeleteSearchOverrideResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r DeleteSearchOverrideResponse) StatusCode() int { +func (r GetSearchSynonymResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type GetSearchOverrideResponse struct { +type UpsertSearchSynonymResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *SearchOverride + JSON200 *SearchSynonym + JSON404 *ApiResponse } // Status returns HTTPResponse.Status -func (r GetSearchOverrideResponse) Status() string { +func (r UpsertSearchSynonymResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5684,22 +6638,21 @@ func (r GetSearchOverrideResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r GetSearchOverrideResponse) StatusCode() int { +func (r UpsertSearchSynonymResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type UpsertSearchOverrideResponse struct { +type RetrieveAllConversationModelsResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *SearchOverride - JSON404 *ApiResponse + JSON200 *[]*ConversationModelSchema } // Status returns HTTPResponse.Status -func (r UpsertSearchOverrideResponse) Status() string { +func (r RetrieveAllConversationModelsResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5707,22 +6660,22 @@ func (r UpsertSearchOverrideResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r UpsertSearchOverrideResponse) StatusCode() int { +func (r RetrieveAllConversationModelsResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type GetSearchSynonymsResponse struct { +type CreateConversationModelResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *SearchSynonymsResponse - JSON404 *ApiResponse + JSON200 *ConversationModelSchema + JSON400 *ApiResponse } // Status returns HTTPResponse.Status -func (r GetSearchSynonymsResponse) Status() string { +func (r CreateConversationModelResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5730,22 +6683,21 @@ func (r GetSearchSynonymsResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r GetSearchSynonymsResponse) StatusCode() int { +func (r CreateConversationModelResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type DeleteSearchSynonymResponse struct { +type DeleteConversationModelResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *SearchSynonym - JSON404 *ApiResponse + JSON200 *ConversationModelSchema } // Status returns HTTPResponse.Status -func (r DeleteSearchSynonymResponse) Status() string { +func (r DeleteConversationModelResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5753,22 +6705,21 @@ func (r DeleteSearchSynonymResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r DeleteSearchSynonymResponse) StatusCode() int { +func (r DeleteConversationModelResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type GetSearchSynonymResponse struct { +type RetrieveConversationModelResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *SearchSynonym - JSON404 *ApiResponse + JSON200 *ConversationModelSchema } // Status returns HTTPResponse.Status -func (r GetSearchSynonymResponse) Status() string { +func (r RetrieveConversationModelResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5776,22 +6727,21 @@ func (r GetSearchSynonymResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r GetSearchSynonymResponse) StatusCode() int { +func (r RetrieveConversationModelResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type UpsertSearchSynonymResponse struct { +type UpdateConversationModelResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *SearchSynonym - JSON404 *ApiResponse + JSON200 *ConversationModelSchema } // Status returns HTTPResponse.Status -func (r UpsertSearchSynonymResponse) Status() string { +func (r UpdateConversationModelResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -5799,7 +6749,7 @@ func (r UpsertSearchSynonymResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r UpsertSearchSynonymResponse) StatusCode() int { +func (r UpdateConversationModelResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } @@ -6284,6 +7234,23 @@ func (c *ClientWithResponses) UpsertAliasWithResponse(ctx context.Context, alias return ParseUpsertAliasResponse(rsp) } +// CreateAnalyticsEventWithBodyWithResponse request with arbitrary body returning *CreateAnalyticsEventResponse +func (c *ClientWithResponses) CreateAnalyticsEventWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAnalyticsEventResponse, error) { + rsp, err := c.CreateAnalyticsEventWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateAnalyticsEventResponse(rsp) +} + +func (c *ClientWithResponses) CreateAnalyticsEventWithResponse(ctx context.Context, body CreateAnalyticsEventJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAnalyticsEventResponse, error) { + rsp, err := c.CreateAnalyticsEvent(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateAnalyticsEventResponse(rsp) +} + // RetrieveAnalyticsRulesWithResponse request returning *RetrieveAnalyticsRulesResponse func (c *ClientWithResponses) RetrieveAnalyticsRulesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*RetrieveAnalyticsRulesResponse, error) { rsp, err := c.RetrieveAnalyticsRules(ctx, reqEditors...) @@ -6495,16 +7462,16 @@ func (c *ClientWithResponses) GetDocumentWithResponse(ctx context.Context, colle } // UpdateDocumentWithBodyWithResponse request with arbitrary body returning *UpdateDocumentResponse -func (c *ClientWithResponses) UpdateDocumentWithBodyWithResponse(ctx context.Context, collectionName string, documentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateDocumentResponse, error) { - rsp, err := c.UpdateDocumentWithBody(ctx, collectionName, documentId, contentType, body, reqEditors...) +func (c *ClientWithResponses) UpdateDocumentWithBodyWithResponse(ctx context.Context, collectionName string, documentId string, params *UpdateDocumentParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateDocumentResponse, error) { + rsp, err := c.UpdateDocumentWithBody(ctx, collectionName, documentId, params, contentType, body, reqEditors...) if err != nil { return nil, err } return ParseUpdateDocumentResponse(rsp) } -func (c *ClientWithResponses) UpdateDocumentWithResponse(ctx context.Context, collectionName string, documentId string, body UpdateDocumentJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateDocumentResponse, error) { - rsp, err := c.UpdateDocument(ctx, collectionName, documentId, body, reqEditors...) +func (c *ClientWithResponses) UpdateDocumentWithResponse(ctx context.Context, collectionName string, documentId string, params *UpdateDocumentParams, body UpdateDocumentJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateDocumentResponse, error) { + rsp, err := c.UpdateDocument(ctx, collectionName, documentId, params, body, reqEditors...) if err != nil { return nil, err } @@ -6599,6 +7566,67 @@ func (c *ClientWithResponses) UpsertSearchSynonymWithResponse(ctx context.Contex return ParseUpsertSearchSynonymResponse(rsp) } +// RetrieveAllConversationModelsWithResponse request returning *RetrieveAllConversationModelsResponse +func (c *ClientWithResponses) RetrieveAllConversationModelsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*RetrieveAllConversationModelsResponse, error) { + rsp, err := c.RetrieveAllConversationModels(ctx, reqEditors...) + if err != nil { + return nil, err + } + return ParseRetrieveAllConversationModelsResponse(rsp) +} + +// CreateConversationModelWithBodyWithResponse request with arbitrary body returning *CreateConversationModelResponse +func (c *ClientWithResponses) CreateConversationModelWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateConversationModelResponse, error) { + rsp, err := c.CreateConversationModelWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateConversationModelResponse(rsp) +} + +func (c *ClientWithResponses) CreateConversationModelWithResponse(ctx context.Context, body CreateConversationModelJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateConversationModelResponse, error) { + rsp, err := c.CreateConversationModel(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateConversationModelResponse(rsp) +} + +// DeleteConversationModelWithResponse request returning *DeleteConversationModelResponse +func (c *ClientWithResponses) DeleteConversationModelWithResponse(ctx context.Context, modelId string, reqEditors ...RequestEditorFn) (*DeleteConversationModelResponse, error) { + rsp, err := c.DeleteConversationModel(ctx, modelId, reqEditors...) + if err != nil { + return nil, err + } + return ParseDeleteConversationModelResponse(rsp) +} + +// RetrieveConversationModelWithResponse request returning *RetrieveConversationModelResponse +func (c *ClientWithResponses) RetrieveConversationModelWithResponse(ctx context.Context, modelId string, reqEditors ...RequestEditorFn) (*RetrieveConversationModelResponse, error) { + rsp, err := c.RetrieveConversationModel(ctx, modelId, reqEditors...) + if err != nil { + return nil, err + } + return ParseRetrieveConversationModelResponse(rsp) +} + +// UpdateConversationModelWithBodyWithResponse request with arbitrary body returning *UpdateConversationModelResponse +func (c *ClientWithResponses) UpdateConversationModelWithBodyWithResponse(ctx context.Context, modelId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateConversationModelResponse, error) { + rsp, err := c.UpdateConversationModelWithBody(ctx, modelId, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpdateConversationModelResponse(rsp) +} + +func (c *ClientWithResponses) UpdateConversationModelWithResponse(ctx context.Context, modelId string, body UpdateConversationModelJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateConversationModelResponse, error) { + rsp, err := c.UpdateConversationModel(ctx, modelId, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpdateConversationModelResponse(rsp) +} + // DebugWithResponse request returning *DebugResponse func (c *ClientWithResponses) DebugWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*DebugResponse, error) { rsp, err := c.Debug(ctx, reqEditors...) @@ -6934,6 +7962,39 @@ func ParseUpsertAliasResponse(rsp *http.Response) (*UpsertAliasResponse, error) return response, nil } +// ParseCreateAnalyticsEventResponse parses an HTTP response from a CreateAnalyticsEventWithResponse call +func ParseCreateAnalyticsEventResponse(rsp *http.Response) (*CreateAnalyticsEventResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &CreateAnalyticsEventResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest AnalyticsEventCreateResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest ApiResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil +} + // ParseRetrieveAnalyticsRulesResponse parses an HTTP response from a RetrieveAnalyticsRulesWithResponse call func ParseRetrieveAnalyticsRulesResponse(rsp *http.Response) (*RetrieveAnalyticsRulesResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -7008,7 +8069,7 @@ func ParseDeleteAnalyticsRuleResponse(rsp *http.Response) (*DeleteAnalyticsRuleR switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest AnalyticsRuleSchema + var dest AnalyticsRuleDeleteResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -7073,12 +8134,12 @@ func ParseUpsertAnalyticsRuleResponse(rsp *http.Response) (*UpsertAnalyticsRuleR } switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest AnalyticsRuleSchema if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON201 = &dest + response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: var dest ApiResponse @@ -7823,6 +8884,143 @@ func ParseUpsertSearchSynonymResponse(rsp *http.Response) (*UpsertSearchSynonymR return response, nil } +// ParseRetrieveAllConversationModelsResponse parses an HTTP response from a RetrieveAllConversationModelsWithResponse call +func ParseRetrieveAllConversationModelsResponse(rsp *http.Response) (*RetrieveAllConversationModelsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &RetrieveAllConversationModelsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []*ConversationModelSchema + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseCreateConversationModelResponse parses an HTTP response from a CreateConversationModelWithResponse call +func ParseCreateConversationModelResponse(rsp *http.Response) (*CreateConversationModelResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &CreateConversationModelResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ConversationModelSchema + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest ApiResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil +} + +// ParseDeleteConversationModelResponse parses an HTTP response from a DeleteConversationModelWithResponse call +func ParseDeleteConversationModelResponse(rsp *http.Response) (*DeleteConversationModelResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &DeleteConversationModelResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ConversationModelSchema + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseRetrieveConversationModelResponse parses an HTTP response from a RetrieveConversationModelWithResponse call +func ParseRetrieveConversationModelResponse(rsp *http.Response) (*RetrieveConversationModelResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &RetrieveConversationModelResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ConversationModelSchema + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseUpdateConversationModelResponse parses an HTTP response from a UpdateConversationModelWithResponse call +func ParseUpdateConversationModelResponse(rsp *http.Response) (*UpdateConversationModelResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &UpdateConversationModelResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ConversationModelSchema + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + // ParseDebugResponse parses an HTTP response from a DebugWithResponse call func ParseDebugResponse(rsp *http.Response) (*DebugResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) diff --git a/typesense/api/generator/generator.yml b/typesense/api/generator/generator.yml index 8b38c18..804f2cf 100644 --- a/typesense/api/generator/generator.yml +++ b/typesense/api/generator/generator.yml @@ -42,37 +42,101 @@ components: format: double type: number type: object + AnalyticsEventCreateResponse: + properties: + ok: + type: boolean + required: + - ok + type: object + AnalyticsEventCreateSchema: + properties: + data: + type: object + name: + type: string + type: + type: string + required: + - type + - name + - data + type: object + AnalyticsRuleDeleteResponse: + properties: + name: + type: string + required: + - name + type: object AnalyticsRuleParameters: properties: destination: - properties: - collection: - type: string - type: object + $ref: '#/components/schemas/AnalyticsRuleParametersDestination' + expand_query: + type: boolean limit: type: integer source: - properties: - collections: - items: - type: string - type: array - type: object + $ref: '#/components/schemas/AnalyticsRuleParametersSource' required: - source - destination - - limit type: object - AnalyticsRuleSchema: + AnalyticsRuleParametersDestination: properties: - name: + collection: + type: string + counter_field: type: string + required: + - collection + type: object + AnalyticsRuleParametersSource: + properties: + collections: + items: + type: string + type: array + events: + items: + properties: + name: + type: string + type: + type: string + weight: + format: float + type: number + required: + - type + - weight + - name + type: object + type: array + required: + - collections + type: object + AnalyticsRuleSchema: + allOf: + - $ref: '#/components/schemas/AnalyticsRuleUpsertSchema' + - properties: + name: + type: string + required: + - name + type: object + AnalyticsRuleUpsertSchema: + properties: params: $ref: '#/components/schemas/AnalyticsRuleParameters' type: + enum: + - popular_queries + - nohits_queries + - counter type: string required: - - name - type - params type: object @@ -82,6 +146,7 @@ components: items: $ref: '#/components/schemas/AnalyticsRuleSchema' type: array + x-go-type: '[]*AnalyticsRuleSchema' type: object ApiKey: allOf: @@ -232,6 +297,8 @@ components: minLength: 1 type: string type: array + voice_query_model: + $ref: '#/components/schemas/VoiceQueryModelCollectionConfig' required: - name - fields @@ -256,6 +323,90 @@ components: required: - fields type: object + ConversationModelCreateSchema: + allOf: + - $ref: '#/components/schemas/ConversationModelUpdateSchema' + - properties: + history_collection: + description: Typesense collection that stores the historical conversations + type: string + max_bytes: + description: | + The maximum number of bytes to send to the LLM in every API call. Consult the LLM's documentation on the number of bytes supported in the context window. + type: integer + model_name: + description: Name of the LLM model offered by OpenAI, Cloudflare or vLLM + type: string + required: + - model_name + - max_bytes + - history_collection + type: object + required: + - model_name + - max_bytes + ConversationModelSchema: + allOf: + - $ref: '#/components/schemas/ConversationModelCreateSchema' + - properties: + id: + description: An explicit id for the model, otherwise the API will return a response with an auto-generated conversation model id. + type: string + required: + - id + type: object + ConversationModelUpdateSchema: + properties: + account_id: + description: LLM service's account ID (only applicable for Cloudflare) + type: string + api_key: + description: The LLM service's API Key + type: string + history_collection: + description: Typesense collection that stores the historical conversations + type: string + id: + description: An explicit id for the model, otherwise the API will return a response with an auto-generated conversation model id. + type: string + max_bytes: + description: | + The maximum number of bytes to send to the LLM in every API call. Consult the LLM's documentation on the number of bytes supported in the context window. + type: integer + model_name: + description: Name of the LLM model offered by OpenAI, Cloudflare or vLLM + type: string + system_prompt: + description: The system prompt that contains special instructions to the LLM + type: string + ttl: + description: | + Time interval in seconds after which the messages would be deleted. Default: 86400 (24 hours) + type: integer + vllm_url: + description: URL of vLLM service + type: string + type: object + DirtyValues: + enum: + - coerce_or_reject + - coerce_or_drop + - drop + - reject + type: string + DocumentIndexParameters: + properties: + dirty_values: + $ref: '#/components/schemas/DirtyValues' + type: object + DropTokensMode: + description: | + Dictates the direction in which the words in the query must be dropped when the original words in the query do not appear in any document. Values: right_to_left (default), left_to_right, both_sides:3 A note on both_sides:3 - for queries upto 3 tokens (words) in length, this mode will drop tokens from both sides and exhaustively rank all matching results. If query length is greater than 3 words, Typesense will just fallback to default behavior of right_to_left + enum: + - right_to_left + - left_to_right + - both_sides:3 + type: string ErrorResponse: properties: message: @@ -321,6 +472,8 @@ components: type: string project_id: type: string + url: + type: string required: - model_name type: object @@ -351,15 +504,32 @@ components: optional: example: true type: boolean + range_index: + description: | + Enables an index optimized for range filtering on numerical fields (e.g. rating:>3.5). Default: false. + type: boolean reference: - example: string + description: | + Name of a field in another collection that should be linked to this collection so that it can be joined during query. type: string sort: example: true type: boolean + stem: + description: | + Values are stemmed before indexing in-memory. Default: false. + type: boolean + store: + description: | + When set to false, the field value will not be stored on disk. Default: true. + type: boolean type: example: string type: string + vec_dist: + description: | + The distance metric to be used for vector search. Default: `cosine`. You can also use `ip` for inner product. + type: string required: - name - type @@ -371,6 +541,13 @@ components: required: - ok type: object + IndexAction: + enum: + - create + - update + - upsert + - emplace + type: string MultiSearchCollectionParameters: allOf: - $ref: '#/components/schemas/MultiSearchParameters' @@ -390,6 +567,20 @@ components: description: | The duration (in seconds) that determines how long the search query is cached. This value can be set on a per-query basis. Default: 60. type: integer + conversation: + description: | + Enable conversational search. + type: boolean + conversation_id: + description: | + The Id of a previous conversation to continue, this tells Typesense to include prior context when communicating with the LLM. + type: string + conversation_model_id: + description: | + The Id of Conversation Model to be used. + type: string + drop_tokens_mode: + $ref: '#/components/schemas/DropTokensMode' drop_tokens_threshold: description: | If the number of results found for a specific query is less than this number, Typesense will attempt to drop the tokens in the query until enough results are found. Tokens that have the least individual hits are dropped first. Set to 0 to disable. Default: 10 @@ -399,6 +590,14 @@ components: description: | If you have some overrides defined but want to disable all of them during query time, you can do that by setting this parameter to false type: boolean + enable_synonyms: + description: | + If you have some synonyms defined but want to disable all of them for a particular search query, set enable_synonyms to false. Default: true + type: boolean + enable_typos_for_alpha_numerical_tokens: + description: | + Set this parameter to false to disable typos on alphanumerical query tokens. Default: true. + type: boolean enable_typos_for_numerical_tokens: default: true description: | @@ -429,6 +628,10 @@ components: description: Filter conditions for refining youropen api validator search results. Separate multiple conditions with &&. example: 'num_employees:>100 && country: [USA, UK]' type: string + filter_curated_hits: + description: | + Whether the filter_by condition of the search query should be applicable to curated results (override definitions, pinned hits, hidden hits, etc.). Default: false + type: boolean group_by: description: You can aggregate search results into groups or buckets by specify one or more `group_by` fields. Separate multiple fields with a comma. To group on a particular field, it must be a faceted field. type: string @@ -436,6 +639,10 @@ components: description: | Maximum number of hits to be returned for every group. If the `group_limit` is set as `K` then only the top K hits in each group are returned in the response. Default: 3 type: integer + group_missing_values: + description: | + Setting this parameter to true will place all documents that have a null value in the group_by field, into a single group. Setting this parameter to false, will cause each document with a null value in the group_by field to not be grouped with other documents. Default: true + type: boolean hidden_hits: description: | A list of records to unconditionally hide from search results. A list of `record_id`s to hide. Eg: to hide records with IDs 123 and 456, you'd specify `123,456`. @@ -568,6 +775,14 @@ components: description: | Name of the stopwords set to apply for this search, the keywords present in the set will be removed from the search query. type: string + synonym_num_typos: + description: | + Allow synonym resolution on typo-corrected words in the query. Default: 0 + type: integer + synonym_prefix: + description: | + Allow synonym resolution on word prefixes in the query. Default: false + type: boolean text_match_type: description: In a multi-field matching context, this parameter determines how the representative text match score of a record is calculated. Possible values are max_score (default) or max_weight. type: string @@ -583,9 +798,15 @@ components: description: | Vector query expression for fetching documents "closest" to a given query/document vector. type: string + voice_query: + description: | + The base64 encoded audio file in 16 khz 16-bit WAV format. + type: string type: object MultiSearchResult: properties: + conversation: + $ref: '#/components/schemas/SearchResultConversation' results: items: $ref: '#/components/schemas/SearchResult' @@ -733,6 +954,10 @@ components: type: object SearchOverrideRule: properties: + filter_by: + description: | + Indicates that the override should apply when the filter_by parameter in a search query exactly matches the string specified here (including backticks, spaces, brackets, etc). + type: string match: description: | Indicates whether the match on the query term should be `exact` or `contains`. If we want to match all queries that contained the word `apple`, we will use the `contains` match instead. @@ -744,16 +969,21 @@ components: description: Indicates what search queries should be overridden type: string tags: - description: List of tags. + description: List of tag values to associate with this override rule. items: type: string type: array - required: - - query - - match type: object SearchOverrideSchema: properties: + effective_from_ts: + description: | + A Unix timestamp that indicates the date/time from which the override will be active. You can use this to create override rules that start applying from a future point in time. + type: integer + effective_to_ts: + description: | + A Unix timestamp that indicates the date/time until which the override will be active. You can use this to create override rules that stop applying after a period of time. + type: integer excludes: description: List of document `id`s that should be excluded from the search results. items: @@ -763,17 +993,37 @@ components: description: | A filter by clause that is applied to any search query that matches the override rule. type: string + filter_curated_hits: + description: | + When set to true, the filter conditions of the query is applied to the curated records as well. Default: false. + type: boolean includes: description: List of document `id`s that should be included in the search results with their corresponding `position`s. items: $ref: '#/components/schemas/SearchOverrideInclude' type: array + metadata: + description: | + Return a custom JSON object in the Search API response, when this rule is triggered. This can can be used to display a pre-defined message (eg: a promotion banner) on the front-end when a particular rule is triggered. + type: object remove_matched_tokens: description: | Indicates whether search query tokens that exist in the override's rule should be removed from the search query. type: boolean + replace_query: + description: | + Replaces the current search query with this value, when the search query matches the override rule. + type: string rule: $ref: '#/components/schemas/SearchOverrideRule' + sort_by: + description: | + A sort by clause that is applied to any search query that matches the override rule. + type: string + stop_processing: + description: | + When set to true, override processing will stop at the first matching rule. When set to false override processing will continue and multiple override actions will be triggered in sequence. Overrides are processed in the lexical sort order of their id field. Default: true. + type: boolean required: - rule type: object @@ -793,6 +1043,20 @@ components: description: | The duration (in seconds) that determines how long the search query is cached. This value can be set on a per-query basis. Default: 60. type: integer + conversation: + description: | + Enable conversational search. + type: boolean + conversation_id: + description: | + The Id of a previous conversation to continue, this tells Typesense to include prior context when communicating with the LLM. + type: string + conversation_model_id: + description: | + The Id of Conversation Model to be used. + type: string + drop_tokens_mode: + $ref: '#/components/schemas/DropTokensMode' drop_tokens_threshold: description: | If the number of results found for a specific query is less than this number, Typesense will attempt to drop the tokens in the query until enough results are found. Tokens that have the least individual hits are dropped first. Set to 0 to disable. Default: 10 @@ -807,6 +1071,14 @@ components: description: | If you have some overrides defined but want to disable all of them during query time, you can do that by setting this parameter to false type: boolean + enable_synonyms: + description: | + If you have some synonyms defined but want to disable all of them for a particular search query, set enable_synonyms to false. Default: true + type: boolean + enable_typos_for_alpha_numerical_tokens: + description: | + Set this parameter to false to disable typos on alphanumerical query tokens. Default: true. + type: boolean enable_typos_for_numerical_tokens: default: true description: | @@ -837,6 +1109,10 @@ components: description: Filter conditions for refining youropen api validator search results. Separate multiple conditions with &&. example: 'num_employees:>100 && country: [USA, UK]' type: string + filter_curated_hits: + description: | + Whether the filter_by condition of the search query should be applicable to curated results (override definitions, pinned hits, hidden hits, etc.). Default: false + type: boolean group_by: description: You can aggregate search results into groups or buckets by specify one or more `group_by` fields. Separate multiple fields with a comma. To group on a particular field, it must be a faceted field. type: string @@ -844,6 +1120,10 @@ components: description: | Maximum number of hits to be returned for every group. If the `group_limit` is set as `K` then only the top K hits in each group are returned in the response. Default: 3 type: integer + group_missing_values: + description: | + Setting this parameter to true will place all documents that have a null value in the group_by field, into a single group. Setting this parameter to false, will cause each document with a null value in the group_by field to not be grouped with other documents. Default: true + type: boolean hidden_hits: description: | A list of records to unconditionally hide from search results. A list of `record_id`s to hide. Eg: to hide records with IDs 123 and 456, you'd specify `123,456`. @@ -984,6 +1264,14 @@ components: description: | Name of the stopwords set to apply for this search, the keywords present in the set will be removed from the search query. type: string + synonym_num_typos: + description: | + Allow synonym resolution on typo-corrected words in the query. Default: 0 + type: integer + synonym_prefix: + description: | + Allow synonym resolution on word prefixes in the query. Default: false + type: boolean text_match_type: description: In a multi-field matching context, this parameter determines how the representative text match score of a record is calculated. Possible values are max_score (default) or max_weight. type: string @@ -999,12 +1287,18 @@ components: description: | Vector query expression for fetching documents "closest" to a given query/document vector. type: string + voice_query: + description: | + The base64 encoded audio file in 16 khz 16-bit WAV format. + type: string required: - q - query_by type: object SearchResult: properties: + conversation: + $ref: '#/components/schemas/SearchResultConversation' facet_counts: items: $ref: '#/components/schemas/FacetCounts' @@ -1012,6 +1306,8 @@ components: found: description: The number of documents found type: integer + found_docs: + type: integer grouped_hits: items: $ref: '#/components/schemas/SearchGroupedHit' @@ -1035,6 +1331,11 @@ components: type: integer q: type: string + voice_query: + properties: + transcribed_query: + type: string + type: object required: - collection_name - q @@ -1047,6 +1348,24 @@ components: description: The number of milliseconds the search took type: integer type: object + SearchResultConversation: + properties: + answer: + type: string + conversation_history: + items: + type: object + type: array + conversation_id: + type: string + query: + type: string + required: + - answer + - conversation_history + - conversation_id + - query + type: object SearchResultHit: example: document: @@ -1097,9 +1416,17 @@ components: type: object SearchSynonymSchema: properties: + locale: + description: Locale for the synonym, leave blank to use the standard tokenizer. + type: string root: description: For 1-way synonyms, indicates the root word that words in the `synonyms` parameter map to. type: string + symbols_to_index: + description: By default, special characters are dropped from synonyms. Use this attribute to specify which special characters should be indexed as is. + items: + type: string + type: array synonyms: description: Array of words that should be considered as synonyms. items: @@ -1179,6 +1506,14 @@ components: required: - success type: object + VoiceQueryModelCollectionConfig: + description: | + Configuration for the voice query model + properties: + model_name: + example: ts/whisper/base.en + type: string + type: object securitySchemes: api_key_header: in: header @@ -1190,7 +1525,7 @@ externalDocs: info: description: An open source search engine for building delightful search experiences. title: Typesense API - version: "26.0" + version: "27.0" openapi: 3.0.3 paths: /aliases: @@ -1297,6 +1632,33 @@ paths: summary: Create or update a collection alias tags: - collections + /analytics/events: + post: + description: Sending events for analytics e.g rank search results based on popularity. + operationId: createAnalyticsEvent + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/AnalyticsEventCreateSchema' + description: The Analytics event to be created + required: true + responses: + 201: + content: + application/json: + schema: + $ref: '#/components/schemas/AnalyticsEventCreateResponse' + description: Analytics event successfully created + 400: + content: + application/json: + schema: + $ref: '#/components/schemas/ApiResponse' + description: Bad request, see error message for details + summary: Create an analytics event + tags: + - analytics /analytics/rules: get: description: Retrieve the details of all analytics rules @@ -1353,7 +1715,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AnalyticsRuleSchema' + $ref: '#/components/schemas/AnalyticsRuleDeleteResponse' description: Analytics rule deleted 404: content: @@ -1404,11 +1766,11 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AnalyticsRuleSchema' + $ref: '#/components/schemas/AnalyticsRuleUpsertSchema' description: The Analytics rule to be upserted required: true responses: - 201: + 200: content: application/json: schema: @@ -1583,6 +1945,10 @@ paths: name: filter_by schema: type: string + - in: query + name: ignore_not_found + schema: + type: boolean responses: 200: content: @@ -1670,10 +2036,14 @@ paths: in: query name: action schema: - enum: - - upsert + $ref: '#/components/schemas/IndexAction' example: upsert type: string + - description: Dealing with Dirty Data + in: query + name: dirty_values + schema: + $ref: '#/components/schemas/DirtyValues' requestBody: content: application/json: @@ -1783,6 +2153,11 @@ paths: required: true schema: type: string + - description: Dealing with Dirty Data + in: query + name: dirty_values + schema: + $ref: '#/components/schemas/DirtyValues' requestBody: content: application/json: @@ -1866,24 +2241,27 @@ paths: - in: query name: action schema: - type: string + $ref: '#/components/schemas/IndexAction' - in: query - name: batch_size + name: dirty_values schema: - type: integer + $ref: '#/components/schemas/DirtyValues' - in: query - name: dirty_values + name: batch_size schema: - enum: - - coerce_or_reject - - coerce_or_drop - - drop - - reject - type: string + type: integer - in: query name: remote_embedding_batch_size schema: type: integer + - in: query + name: return_doc + schema: + type: boolean + - in: query + name: return_id + schema: + type: boolean requestBody: content: application/octet-stream: @@ -1932,6 +2310,22 @@ paths: name: cache_ttl schema: type: integer + - in: query + name: conversation + schema: + type: boolean + - in: query + name: conversation_id + schema: + type: string + - in: query + name: conversation_model_id + schema: + type: string + - in: query + name: drop_tokens_mode + schema: + $ref: '#/components/schemas/DropTokensMode' - in: query name: drop_tokens_threshold schema: @@ -1944,6 +2338,14 @@ paths: name: enable_overrides schema: type: boolean + - in: query + name: enable_synonyms + schema: + type: boolean + - in: query + name: enable_typos_for_alpha_numerical_tokens + schema: + type: boolean - in: query name: enable_typos_for_numerical_tokens schema: @@ -1976,6 +2378,10 @@ paths: name: filter_by schema: type: string + - in: query + name: filter_curated_hits + schema: + type: boolean - in: query name: group_by schema: @@ -1984,6 +2390,10 @@ paths: name: group_limit schema: type: integer + - in: query + name: group_missing_values + schema: + type: boolean - in: query name: hidden_hits schema: @@ -2132,6 +2542,14 @@ paths: name: stopwords schema: type: string + - in: query + name: synonym_num_typos + schema: + type: integer + - in: query + name: synonym_prefix + schema: + type: boolean - in: query name: text_match_type schema: @@ -2148,6 +2566,10 @@ paths: name: vector_query schema: type: string + - in: query + name: voice_query + schema: + type: string responses: 200: content: @@ -2190,7 +2612,7 @@ paths: summary: List all collection overrides tags: - documents - - promote + - curation /collections/{collectionName}/overrides/{overrideId}: delete: operationId: deleteSearchOverride @@ -2223,7 +2645,7 @@ paths: summary: Delete an override associated with a collection tags: - documents - - promote + - curation get: description: Retrieve the details of a search override, given its id. operationId: getSearchOverride @@ -2290,7 +2712,7 @@ paths: summary: Create or update an override to promote certain documents over others tags: - documents - - promote + - curation /collections/{collectionName}/synonyms: get: operationId: getSearchSynonyms @@ -2420,6 +2842,114 @@ paths: summary: Create or update a synonym tags: - documents + /conversations/models: + get: + description: Retrieve all conversation models + operationId: retrieveAllConversationModels + responses: + 200: + content: + application/json: + schema: + items: + $ref: '#/components/schemas/ConversationModelSchema' + type: array + x-go-type: '[]*ConversationModelSchema' + description: List of all conversation models + summary: List all conversation models + tags: + - conversations + post: + description: Create a Conversation Model + operationId: createConversationModel + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationModelCreateSchema' + required: true + responses: + 200: + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationModelSchema' + description: Created Conversation Model + 400: + content: + application/json: + schema: + $ref: '#/components/schemas/ApiResponse' + description: Bad request, see error message for details + tags: + - conversations + /conversations/models/{modelId}: + delete: + description: Delete a conversation model + operationId: deleteConversationModel + parameters: + - description: The id of the conversation model to delete + in: path + name: modelId + required: true + schema: + type: string + responses: + 200: + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationModelSchema' + description: The conversation model was successfully deleted + summary: Delete a conversation model + tags: + - conversations + get: + description: Retrieve a conversation model + operationId: retrieveConversationModel + parameters: + - description: The id of the conversation model to retrieve + in: path + name: modelId + required: true + schema: + type: string + responses: + 200: + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationModelSchema' + description: A conversation model + summary: Retrieve a conversation model + tags: + - conversations + put: + description: Update a conversation model + operationId: updateConversationModel + parameters: + - description: The id of the conversation model to update + in: path + name: modelId + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationModelUpdateSchema' + required: true + responses: + 200: + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationModelSchema' + description: The conversation model was successfully updated + summary: Update a conversation model + tags: + - conversations /debug: get: description: Print debugging information @@ -2578,6 +3108,22 @@ paths: name: cache_ttl schema: type: integer + - in: query + name: conversation + schema: + type: boolean + - in: query + name: conversation_id + schema: + type: string + - in: query + name: conversation_model_id + schema: + type: string + - in: query + name: drop_tokens_mode + schema: + $ref: '#/components/schemas/DropTokensMode' - in: query name: drop_tokens_threshold schema: @@ -2590,6 +3136,14 @@ paths: name: enable_overrides schema: type: boolean + - in: query + name: enable_synonyms + schema: + type: boolean + - in: query + name: enable_typos_for_alpha_numerical_tokens + schema: + type: boolean - in: query name: enable_typos_for_numerical_tokens schema: @@ -2622,6 +3176,10 @@ paths: name: filter_by schema: type: string + - in: query + name: filter_curated_hits + schema: + type: boolean - in: query name: group_by schema: @@ -2630,6 +3188,10 @@ paths: name: group_limit schema: type: integer + - in: query + name: group_missing_values + schema: + type: boolean - in: query name: hidden_hits schema: @@ -2778,6 +3340,14 @@ paths: name: stopwords schema: type: string + - in: query + name: synonym_num_typos + schema: + type: integer + - in: query + name: synonym_prefix + schema: + type: boolean - in: query name: text_match_type schema: @@ -2794,6 +3364,10 @@ paths: name: vector_query schema: type: string + - in: query + name: voice_query + schema: + type: string requestBody: content: application/json: @@ -3090,15 +3664,15 @@ tags: description: Find out more url: https://typesense.org/api/#index-document name: documents - - description: Promote certain documents over others + - description: Hand-curate search results based on conditional business rules externalDocs: description: Find out more url: https://typesense.org/docs/0.23.0/api/#curation - name: promote + name: curation - description: Typesense can aggregate search queries for both analytics purposes and for query suggestions. externalDocs: description: Find out more - url: https://typesense.org/docs/0.25.0/api/analytics-query-suggestions.html + url: https://typesense.org/docs/26.0/api/analytics-query-suggestions.html name: analytics - description: Manage API Keys with fine-grain access control externalDocs: @@ -3122,3 +3696,8 @@ tags: description: Find out more url: https://typesense.org/docs/26.0/api/search.html#presets name: presets + - description: Conversational Search (RAG) + externalDocs: + description: Find out more + url: https://typesense.org/docs/27.0/api/conversational-search-rag.html + name: conversations diff --git a/typesense/api/generator/main.go b/typesense/api/generator/main.go index d932b09..ced3ccc 100644 --- a/typesense/api/generator/main.go +++ b/typesense/api/generator/main.go @@ -215,10 +215,14 @@ func unwrapSearchParameters(m *yml) { newMap["in"] = query newMap["schema"] = make(yml) if obj.Value.(yml)["oneOf"] == nil { - if obj.Value.(yml)["type"].(string) == array { + switch { + // if the param is referencing a schema + case obj.Value.(yml)["type"] == nil: + newMap["schema"].(yml)["$ref"] = obj.Value.(yml)["$ref"].(string) + case obj.Value.(yml)["type"].(string) == array: newMap["schema"].(yml)["type"] = array newMap["schema"].(yml)["items"] = obj.Value.(yml)["items"] - } else { + default: newMap["schema"].(yml)["type"] = obj.Value.(yml)["type"].(string) } } else { @@ -241,10 +245,14 @@ func unwrapMultiSearchParameters(m *yml) { newMap["in"] = query newMap["schema"] = make(yml) if obj.Value.(yml)["oneOf"] == nil { - if obj.Value.(yml)["type"].(string) == array { + switch { + // if the param is referencing a schema + case obj.Value.(yml)["type"] == nil: + newMap["schema"].(yml)["$ref"] = obj.Value.(yml)["$ref"].(string) + case obj.Value.(yml)["type"].(string) == array: newMap["schema"].(yml)["type"] = array newMap["schema"].(yml)["items"] = obj.Value.(yml)["items"] - } else { + default: newMap["schema"].(yml)["type"] = obj.Value.(yml)["type"].(string) } } else { diff --git a/typesense/api/generator/openapi.yml b/typesense/api/generator/openapi.yml index 249a696..296c508 100644 --- a/typesense/api/generator/openapi.yml +++ b/typesense/api/generator/openapi.yml @@ -2,7 +2,7 @@ openapi: 3.0.3 info: title: Typesense API description: "An open source search engine for building delightful search experiences." - version: '26.0' + version: '27.0' externalDocs: description: Find out more about Typsesense url: https://typesense.org @@ -19,8 +19,8 @@ tags: externalDocs: description: Find out more url: https://typesense.org/api/#index-document - - name: promote - description: Promote certain documents over others + - name: curation + description: Hand-curate search results based on conditional business rules externalDocs: description: Find out more url: https://typesense.org/docs/0.23.0/api/#curation @@ -28,7 +28,7 @@ tags: description: Typesense can aggregate search queries for both analytics purposes and for query suggestions. externalDocs: description: Find out more - url: https://typesense.org/docs/0.25.0/api/analytics-query-suggestions.html + url: https://typesense.org/docs/26.0/api/analytics-query-suggestions.html - name: keys description: Manage API Keys with fine-grain access control externalDocs: @@ -51,6 +51,11 @@ tags: externalDocs: description: Find out more url: https://typesense.org/docs/26.0/api/search.html#presets + - name: conversations + description: Conversational Search (RAG) + externalDocs: + description: Find out more + url: https://typesense.org/docs/27.0/api/conversational-search-rag.html paths: /collections: get: @@ -223,8 +228,12 @@ paths: schema: type: string example: upsert - enum: - - upsert + $ref: "#/components/schemas/IndexAction" + - name: dirty_values + in: query + description: Dealing with Dirty Data + schema: + $ref: "#/components/schemas/DirtyValues" requestBody: description: The document object to be indexed content: @@ -329,6 +338,8 @@ paths: in: query schema: type: object + required: + - filter_by properties: filter_by: type: string @@ -339,6 +350,8 @@ paths: at a time. A larger value will speed up deletions, but will impact performance of other operations running on the server. type: integer + ignore_not_found: + type: boolean responses: 200: description: Documents successfully deleted @@ -399,7 +412,7 @@ paths: get: tags: - documents - - promote + - curation summary: List all collection overrides operationId: getSearchOverrides parameters: @@ -447,7 +460,7 @@ paths: put: tags: - documents - - promote + - curation summary: Create or update an override to promote certain documents over others description: Create or update an override to promote certain documents over others. @@ -489,7 +502,7 @@ paths: delete: tags: - documents - - promote + - curation summary: Delete an override associated with a collection operationId: deleteSearchOverride parameters: @@ -666,9 +679,6 @@ paths: in: query schema: type: object - required: - - include_fields - - exclude_fields properties: filter_by: description: @@ -716,24 +726,32 @@ paths: required: true schema: type: string + # Do not change the index position of this param - name: importDocumentsParameters in: query schema: type: object properties: - action: - type: string batch_size: type: integer - dirty_values: - type: string - enum: - - coerce_or_reject - - coerce_or_drop - - drop - - reject + return_id: + type: boolean + description: + Returning the id of the imported documents. If you want the + import response to return the ingested document's id in the + response, you can use the return_id parameter. remote_embedding_batch_size: type: integer + return_doc: + type: boolean + - name: action + in: query + schema: + $ref: "#/components/schemas/IndexAction" + - name: dirty_values + in: query + schema: + $ref: "#/components/schemas/DirtyValues" requestBody: description: The json array of documents or the JSONL file to import content: @@ -824,6 +842,11 @@ paths: required: true schema: type: string + - name: dirty_values + in: query + description: Dealing with Dirty Data + schema: + $ref: "#/components/schemas/DirtyValues" requestBody: description: The document object with fields to be updated content: @@ -880,6 +903,114 @@ paths: application/json: schema: $ref: "#/components/schemas/ApiResponse" + /conversations/models: + get: + description: Retrieve all conversation models + operationId: retrieveAllConversationModels + responses: + 200: + content: + application/json: + schema: + items: + $ref: '#/components/schemas/ConversationModelSchema' + type: array + x-go-type: '[]*ConversationModelSchema' + description: List of all conversation models + summary: List all conversation models + tags: + - conversations + post: + description: Create a Conversation Model + operationId: createConversationModel + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationModelCreateSchema' + required: true + responses: + 200: + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationModelSchema' + description: Created Conversation Model + 400: + content: + application/json: + schema: + $ref: '#/components/schemas/ApiResponse' + description: Bad request, see error message for details + tags: + - conversations + /conversations/models/{modelId}: + get: + description: Retrieve a conversation model + operationId: retrieveConversationModel + parameters: + - name: modelId + in: path + description: The id of the conversation model to retrieve + required: true + schema: + type: string + responses: + 200: + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationModelSchema' + description: A conversation model + summary: Retrieve a conversation model + tags: + - conversations + put: + description: Update a conversation model + operationId: updateConversationModel + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationModelUpdateSchema' + required: true + parameters: + - name: modelId + in: path + description: The id of the conversation model to update + required: true + schema: + type: string + responses: + 200: + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationModelSchema' + description: The conversation model was successfully updated + summary: Update a conversation model + tags: + - conversations + delete: + description: Delete a conversation model + operationId: deleteConversationModel + parameters: + - name: modelId + in: path + description: The id of the conversation model to delete + required: true + schema: + type: string + responses: + 200: + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationModelSchema' + description: The conversation model was successfully deleted + summary: Delete a conversation model + tags: + - conversations /keys: get: tags: @@ -1205,6 +1336,33 @@ paths: application/json: schema: $ref: "#/components/schemas/ApiResponse" + /analytics/events: + post: + tags: + - analytics + summary: Create an analytics event + description: Sending events for analytics e.g rank search results based on popularity. + operationId: createAnalyticsEvent + requestBody: + description: The Analytics event to be created + content: + application/json: + schema: + $ref: '#/components/schemas/AnalyticsEventCreateSchema' + required: true + responses: + 201: + description: Analytics event successfully created + content: + application/json: + schema: + $ref: '#/components/schemas/AnalyticsEventCreateResponse' + 400: + description: Bad request, see error message for details + content: + application/json: + schema: + $ref: '#/components/schemas/ApiResponse' /analytics/rules: post: tags: @@ -1267,10 +1425,10 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/AnalyticsRuleSchema" + $ref: "#/components/schemas/AnalyticsRuleUpsertSchema" required: true responses: - 201: + 200: description: Analytics rule successfully upserted content: application/json: @@ -1329,7 +1487,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/AnalyticsRuleSchema" + $ref: "#/components/schemas/AnalyticsRuleDeleteResponse" 404: description: Analytics rule not found content: @@ -1631,12 +1789,12 @@ components: maxLength: 1 default: [] enable_nested_fields: - type: boolean - description: - Enables experimental support at a collection level for nested object or object array fields. - This field is only available if the Typesense server is version `0.24.0.rcn34` or later. - default: false - example: true + type: boolean + description: + Enables experimental support at a collection level for nested object or object array fields. + This field is only available if the Typesense server is version `0.24.0.rcn34` or later. + default: false + example: true symbols_to_index: type: array description: > @@ -1648,6 +1806,8 @@ components: minLength: 1 maxLength: 1 default: [] + voice_query_model: + $ref: "#/components/schemas/VoiceQueryModelCollectionConfig" CollectionUpdateSchema: required: - fields @@ -1720,7 +1880,8 @@ components: default: false reference: type: string - example: string + description: > + Name of a field in another collection that should be linked to this collection so that it can be joined during query. num_dim: type: integer example: 256 @@ -1728,6 +1889,22 @@ components: type: boolean example: true # omitting default value since we want it to be null + store: + type: boolean + description: > + When set to false, the field value will not be stored on disk. Default: true. + vec_dist: + type: string + description: > + The distance metric to be used for vector search. Default: `cosine`. You can also use `ip` for inner product. + range_index: + type: boolean + description: > + Enables an index optimized for range filtering on numerical fields (e.g. rating:>3.5). Default: false. + stem: + type: boolean + description: > + Values are stemmed before indexing in-memory. Default: false. embed: type: object required: @@ -1747,6 +1924,8 @@ components: type: string api_key: type: string + url: + type: string access_token: type: string client_id: @@ -1755,6 +1934,14 @@ components: type: string project_id: type: string + VoiceQueryModelCollectionConfig: + type: object + description: > + Configuration for the voice query model + properties: + model_name: + type: string + example: "ts/whisper/base.en" CollectionAliasSchema: type: object required: @@ -1796,6 +1983,8 @@ components: found: type: integer description: The number of documents found + found_docs: + type: integer search_time_ms: type: integer description: The number of milliseconds the search took @@ -1830,7 +2019,31 @@ components: type: string per_page: type: integer - + voice_query: + type: object + properties: + transcribed_query: + type: string + conversation: + $ref: "#/components/schemas/SearchResultConversation" + SearchResultConversation: + type: object + required: + - answer + - conversation_history + - conversation_id + - query + properties: + answer: + type: string + conversation_history: + type: array + items: + type: object + conversation_id: + type: string + query: + type: string SearchGroupedHit: type: object required: @@ -1957,6 +2170,37 @@ components: type: boolean description: > Indicates whether search query tokens that exist in the override's rule should be removed from the search query. + metadata: + type: object + description: > + Return a custom JSON object in the Search API response, when this rule is triggered. This can can be used to display a pre-defined message (eg: a promotion banner) on the front-end when a particular rule is triggered. + sort_by: + type: string + description: > + A sort by clause that is applied to any search query that matches the override rule. + replace_query: + type: string + description: > + Replaces the current search query with this value, when the search query matches the override rule. + filter_curated_hits: + type: boolean + description: > + When set to true, the filter conditions of the query is applied to the curated records as well. + Default: false. + effective_from_ts: + type: integer + description: > + A Unix timestamp that indicates the date/time from which the override will be active. You can use this to create override rules that start applying from a future point in time. + effective_to_ts: + type: integer + description: > + A Unix timestamp that indicates the date/time until which the override will be active. You can use this to create override rules that stop applying after a period of time. + stop_processing: + type: boolean + description: > + When set to true, override processing will stop at the first matching rule. When set to false override processing will continue and multiple override actions will be triggered in sequence. + Overrides are processed in the lexical sort order of their id field. + Default: true. SearchOverride: allOf: - $ref: "#/components/schemas/SearchOverrideSchema" @@ -1969,10 +2213,12 @@ components: readOnly: true SearchOverrideRule: type: object - required: - - query - - match properties: + tags: + type: array + description: List of tag values to associate with this override rule. + items: + type: string query: type: string description: Indicates what search queries should be overridden @@ -1985,11 +2231,10 @@ components: enum: - exact - contains - tags: - type: array - description: List of tags. - items: - type: string + filter_by: + type: string + description: > + Indicates that the override should apply when the filter_by parameter in a search query exactly matches the string specified here (including backticks, spaces, brackets, etc). SearchOverrideInclude: type: object required: @@ -2033,6 +2278,14 @@ components: description: Array of words that should be considered as synonyms. items: type: string + locale: + type: string + description: Locale for the synonym, leave blank to use the standard tokenizer. + symbols_to_index: + type: array + description: By default, special characters are dropped from synonyms. Use this attribute to specify which special characters should be indexed as is. + items: + type: string SearchSynonym: allOf: - $ref: "#/components/schemas/SearchSynonymSchema" @@ -2145,6 +2398,8 @@ components: type: array items: $ref: "#/components/schemas/SearchResult" + conversation: + $ref: "#/components/schemas/SearchResultConversation" SearchParameters: type: object required: @@ -2188,8 +2443,8 @@ components: If infix index is enabled for this field, infix searching can be done on a per-field basis by sending a comma separated string parameter called infix to the search query. This parameter can have 3 values; `off` infix search is disabled, which is default - `always` infix search is performed along with regular search - `fallback` infix search is performed if regular search does not produce results + `always` infix search is performed along with regular search + `fallback` infix search is performed if regular search does not produce results type: string max_extra_prefix: @@ -2283,6 +2538,13 @@ components: Default: 3 type: integer + group_missing_values: + description: > + Setting this parameter to true will place all documents that have a null value in the group_by field, into a single group. + Setting this parameter to false, will cause each document with a null value in the group_by field to not be grouped with other documents. + Default: true + type: boolean + include_fields: description: List of fields from the document to include in the search result type: string @@ -2332,12 +2594,35 @@ components: enough results are found. Tokens that have the least individual hits are dropped first. Set to 0 to disable. Default: 10 type: integer + drop_tokens_mode: + $ref: "#/components/schemas/DropTokensMode" typo_tokens_threshold: description: > If the number of results found for a specific query is less than this number, Typesense will attempt to look for tokens with more typos until enough results are found. Default: 100 type: integer + enable_typos_for_alpha_numerical_tokens: + type: boolean + description: > + Set this parameter to false to disable typos on alphanumerical query tokens. Default: true. + + filter_curated_hits: + type: boolean + description: > + Whether the filter_by condition of the search query should be applicable to curated results (override definitions, pinned hits, hidden hits, etc.). Default: false + enable_synonyms: + type: boolean + description: > + If you have some synonyms defined but want to disable all of them for a particular search query, set enable_synonyms to false. Default: true + synonym_prefix: + type: boolean + description: > + Allow synonym resolution on word prefixes in the query. Default: false + synonym_num_typos: + type: integer + description: > + Allow synonym resolution on typo-corrected words in the query. Default: 0 pinned_hits: description: > @@ -2485,6 +2770,22 @@ components: description: > Comma separated string of nested facet fields whose parent object should be returned in facet response. type: string + voice_query: + description: > + The base64 encoded audio file in 16 khz 16-bit WAV format. + type: string + conversation: + description: > + Enable conversational search. + type: boolean + conversation_model_id: + description: > + The Id of Conversation Model to be used. + type: string + conversation_id: + description: > + The Id of a previous conversation to continue, this tells Typesense to include prior context when communicating with the LLM. + type: string MultiSearchParameters: description: > @@ -2527,8 +2828,8 @@ components: If infix index is enabled for this field, infix searching can be done on a per-field basis by sending a comma separated string parameter called infix to the search query. This parameter can have 3 values; `off` infix search is disabled, which is default - `always` infix search is performed along with regular search - `fallback` infix search is performed if regular search does not produce results + `always` infix search is performed along with regular search + `fallback` infix search is performed if regular search does not produce results type: string max_extra_prefix: @@ -2621,6 +2922,13 @@ components: Default: 3 type: integer + group_missing_values: + description: > + Setting this parameter to true will place all documents that have a null value in the group_by field, into a single group. + Setting this parameter to false, will cause each document with a null value in the group_by field to not be grouped with other documents. + Default: true + type: boolean + include_fields: description: List of fields from the document to include in the search result type: string @@ -2663,12 +2971,35 @@ components: enough results are found. Tokens that have the least individual hits are dropped first. Set to 0 to disable. Default: 10 type: integer + drop_tokens_mode: + $ref: "#/components/schemas/DropTokensMode" typo_tokens_threshold: description: > If the number of results found for a specific query is less than this number, Typesense will attempt to look for tokens with more typos until enough results are found. Default: 100 type: integer + enable_typos_for_alpha_numerical_tokens: + type: boolean + description: > + Set this parameter to false to disable typos on alphanumerical query tokens. Default: true. + + filter_curated_hits: + type: boolean + description: > + Whether the filter_by condition of the search query should be applicable to curated results (override definitions, pinned hits, hidden hits, etc.). Default: false + enable_synonyms: + type: boolean + description: > + If you have some synonyms defined but want to disable all of them for a particular search query, set enable_synonyms to false. Default: true + synonym_prefix: + type: boolean + description: > + Allow synonym resolution on word prefixes in the query. Default: false + synonym_num_typos: + type: integer + description: > + Allow synonym resolution on typo-corrected words in the query. Default: 0 pinned_hits: description: > @@ -2809,6 +3140,22 @@ components: description: > Comma separated string of nested facet fields whose parent object should be returned in facet response. type: string + voice_query: + description: > + The base64 encoded audio file in 16 khz 16-bit WAV format. + type: string + conversation: + description: > + Enable conversational search. + type: boolean + conversation_model_id: + description: > + The Id of Conversation Model to be used. + type: string + conversation_id: + description: > + The Id of a previous conversation to continue, this tells Typesense to include prior context when communicating with the LLM. + type: string MultiSearchSearchesParameter: type: object required: @@ -2864,17 +3211,38 @@ components: avg: type: number format: double - AnalyticsRuleSchema: + AnalyticsEventCreateResponse: + type: object + required: + - ok + properties: + ok: + type: boolean + AnalyticsEventCreateSchema: type: object required: - - name - type - - params + - name + - data properties: + type: + type: string name: type: string + data: + type: object + AnalyticsRuleUpsertSchema: + type: object + required: + - type + - params + properties: type: type: string + enum: + - popular_queries + - nohits_queries + - counter params: $ref: "#/components/schemas/AnalyticsRuleParameters" AnalyticsRuleParameters: @@ -2882,22 +3250,65 @@ components: required: - source - destination - - limit properties: source: - type: object - properties: - collections: - type: array - items: - type: string + $ref: '#/components/schemas/AnalyticsRuleParametersSource' destination: - type: object - properties: - collection: - type: string + $ref: '#/components/schemas/AnalyticsRuleParametersDestination' limit: type: integer + expand_query: + type: boolean + AnalyticsRuleParametersSource: + type: object + required: + - collections + properties: + collections: + type: array + items: + type: string + events: + type: array + items: + type: object + required: + - type + - weight + - name + properties: + type: + type: string + weight: + type: number + format: float + name: + type: string + AnalyticsRuleParametersDestination: + type: object + required: + - collection + properties: + collection: + type: string + counter_field: + type: string + AnalyticsRuleDeleteResponse: + type: object + required: + - name + properties: + name: + type: string + AnalyticsRuleSchema: + allOf: + - $ref: '#/components/schemas/AnalyticsRuleUpsertSchema' + - type: object + required: + - name + properties: + name: + type: string AnalyticsRulesRetrieveSchema: type: object properties: @@ -2905,6 +3316,7 @@ components: type: array items: $ref: "#/components/schemas/AnalyticsRuleSchema" + x-go-type: '[]*AnalyticsRuleSchema' APIStatsResponse: type: object properties: @@ -3030,6 +3442,90 @@ components: properties: name: type: string + # client libraries already have .create, .upsert,... methods so we omit the `action` param + DocumentIndexParameters: + type: object + properties: + dirty_values: + $ref: "#/components/schemas/DirtyValues" + DirtyValues: + type: string + enum: [coerce_or_reject, coerce_or_drop, drop, reject] + IndexAction: + type: string + enum: [create, update, upsert, emplace] + DropTokensMode: + type: string + enum: [right_to_left, left_to_right, both_sides:3] + description: > + Dictates the direction in which the words in the query must be dropped when the original words in the query do not appear in any document. + Values: right_to_left (default), left_to_right, both_sides:3 + A note on both_sides:3 - for queries upto 3 tokens (words) in length, this mode will drop tokens from both sides and exhaustively rank all matching results. + If query length is greater than 3 words, Typesense will just fallback to default behavior of right_to_left + ConversationModelCreateSchema: + required: + - model_name + - max_bytes + allOf: + - $ref: '#/components/schemas/ConversationModelUpdateSchema' + - type: object + required: + - model_name + - max_bytes + - history_collection + properties: + model_name: + description: Name of the LLM model offered by OpenAI, Cloudflare or vLLM + type: string + max_bytes: + description: | + The maximum number of bytes to send to the LLM in every API call. Consult the LLM's documentation on the number of bytes supported in the context window. + type: integer + history_collection: + type: string + description: Typesense collection that stores the historical conversations + ConversationModelUpdateSchema: + type: object + properties: + id: + type: string + description: An explicit id for the model, otherwise the API will return a response with an auto-generated conversation model id. + model_name: + description: Name of the LLM model offered by OpenAI, Cloudflare or vLLM + type: string + api_key: + description: The LLM service's API Key + type: string + history_collection: + type: string + description: Typesense collection that stores the historical conversations + account_id: + description: LLM service's account ID (only applicable for Cloudflare) + type: string + system_prompt: + description: The system prompt that contains special instructions to the LLM + type: string + ttl: + type: integer + description: | + Time interval in seconds after which the messages would be deleted. Default: 86400 (24 hours) + max_bytes: + description: | + The maximum number of bytes to send to the LLM in every API call. Consult the LLM's documentation on the number of bytes supported in the context window. + type: integer + vllm_url: + description: URL of vLLM service + type: string + ConversationModelSchema: + allOf: + - $ref: '#/components/schemas/ConversationModelCreateSchema' + - type: object + required: + - id + properties: + id: + type: string + description: An explicit id for the model, otherwise the API will return a response with an auto-generated conversation model id. securitySchemes: api_key_header: type: apiKey diff --git a/typesense/api/pointer/pointer.go b/typesense/api/pointer/pointer.go index b53887c..d6feb14 100644 --- a/typesense/api/pointer/pointer.go +++ b/typesense/api/pointer/pointer.go @@ -33,3 +33,7 @@ func Interface(v interface{}) *interface{} { func String(v string) *string { return &v } + +func Any[T any](v T) *T { + return &v +} diff --git a/typesense/api/types_gen.go b/typesense/api/types_gen.go index 309d760..b425961 100644 --- a/typesense/api/types_gen.go +++ b/typesense/api/types_gen.go @@ -13,23 +13,47 @@ const ( Api_key_headerScopes = "api_key_header.Scopes" ) -// Defines values for SearchOverrideRuleMatch. +// Defines values for AnalyticsRuleSchemaType. const ( - Contains SearchOverrideRuleMatch = "contains" - Exact SearchOverrideRuleMatch = "exact" + AnalyticsRuleSchemaTypeCounter AnalyticsRuleSchemaType = "counter" + AnalyticsRuleSchemaTypeNohitsQueries AnalyticsRuleSchemaType = "nohits_queries" + AnalyticsRuleSchemaTypePopularQueries AnalyticsRuleSchemaType = "popular_queries" +) + +// Defines values for AnalyticsRuleUpsertSchemaType. +const ( + AnalyticsRuleUpsertSchemaTypeCounter AnalyticsRuleUpsertSchemaType = "counter" + AnalyticsRuleUpsertSchemaTypeNohitsQueries AnalyticsRuleUpsertSchemaType = "nohits_queries" + AnalyticsRuleUpsertSchemaTypePopularQueries AnalyticsRuleUpsertSchemaType = "popular_queries" +) + +// Defines values for DirtyValues. +const ( + CoerceOrDrop DirtyValues = "coerce_or_drop" + CoerceOrReject DirtyValues = "coerce_or_reject" + Drop DirtyValues = "drop" + Reject DirtyValues = "reject" ) -// Defines values for IndexDocumentParamsAction. +// Defines values for DropTokensMode. const ( - Upsert IndexDocumentParamsAction = "upsert" + BothSides3 DropTokensMode = "both_sides:3" + LeftToRight DropTokensMode = "left_to_right" + RightToLeft DropTokensMode = "right_to_left" ) -// Defines values for ImportDocumentsParamsDirtyValues. +// Defines values for IndexAction. const ( - CoerceOrDrop ImportDocumentsParamsDirtyValues = "coerce_or_drop" - CoerceOrReject ImportDocumentsParamsDirtyValues = "coerce_or_reject" - Drop ImportDocumentsParamsDirtyValues = "drop" - Reject ImportDocumentsParamsDirtyValues = "reject" + Create IndexAction = "create" + Emplace IndexAction = "emplace" + Update IndexAction = "update" + Upsert IndexAction = "upsert" +) + +// Defines values for SearchOverrideRuleMatch. +const ( + Contains SearchOverrideRuleMatch = "contains" + Exact SearchOverrideRuleMatch = "exact" ) // APIStatsResponse defines model for APIStatsResponse. @@ -49,27 +73,69 @@ type APIStatsResponse struct { WriteRequestsPerSecond *float64 `json:"write_requests_per_second,omitempty"` } +// AnalyticsEventCreateResponse defines model for AnalyticsEventCreateResponse. +type AnalyticsEventCreateResponse struct { + Ok bool `json:"ok"` +} + +// AnalyticsEventCreateSchema defines model for AnalyticsEventCreateSchema. +type AnalyticsEventCreateSchema struct { + Data map[string]interface{} `json:"data"` + Name string `json:"name"` + Type string `json:"type"` +} + +// AnalyticsRuleDeleteResponse defines model for AnalyticsRuleDeleteResponse. +type AnalyticsRuleDeleteResponse struct { + Name string `json:"name"` +} + // AnalyticsRuleParameters defines model for AnalyticsRuleParameters. type AnalyticsRuleParameters struct { - Destination struct { - Collection *string `json:"collection,omitempty"` - } `json:"destination"` - Limit int `json:"limit"` - Source struct { - Collections *[]string `json:"collections,omitempty"` - } `json:"source"` + Destination AnalyticsRuleParametersDestination `json:"destination"` + ExpandQuery *bool `json:"expand_query,omitempty"` + Limit *int `json:"limit,omitempty"` + Source AnalyticsRuleParametersSource `json:"source"` +} + +// AnalyticsRuleParametersDestination defines model for AnalyticsRuleParametersDestination. +type AnalyticsRuleParametersDestination struct { + Collection string `json:"collection"` + CounterField *string `json:"counter_field,omitempty"` +} + +// AnalyticsRuleParametersSource defines model for AnalyticsRuleParametersSource. +type AnalyticsRuleParametersSource struct { + Collections []string `json:"collections"` + Events *[]struct { + Name string `json:"name"` + Type string `json:"type"` + Weight float32 `json:"weight"` + } `json:"events,omitempty"` } // AnalyticsRuleSchema defines model for AnalyticsRuleSchema. type AnalyticsRuleSchema struct { Name string `json:"name"` Params AnalyticsRuleParameters `json:"params"` - Type string `json:"type"` + Type AnalyticsRuleSchemaType `json:"type"` } +// AnalyticsRuleSchemaType defines model for AnalyticsRuleSchema.Type. +type AnalyticsRuleSchemaType string + +// AnalyticsRuleUpsertSchema defines model for AnalyticsRuleUpsertSchema. +type AnalyticsRuleUpsertSchema struct { + Params AnalyticsRuleParameters `json:"params"` + Type AnalyticsRuleUpsertSchemaType `json:"type"` +} + +// AnalyticsRuleUpsertSchemaType defines model for AnalyticsRuleUpsertSchema.Type. +type AnalyticsRuleUpsertSchemaType string + // AnalyticsRulesRetrieveSchema defines model for AnalyticsRulesRetrieveSchema. type AnalyticsRulesRetrieveSchema struct { - Rules *[]AnalyticsRuleSchema `json:"rules,omitempty"` + Rules *[]*AnalyticsRuleSchema `json:"rules,omitempty"` } // ApiKey defines model for ApiKey. @@ -147,6 +213,9 @@ type CollectionResponse struct { // TokenSeparators List of symbols or special characters to be used for splitting the text into individual words in addition to space and new-line characters. TokenSeparators *[]string `json:"token_separators,omitempty"` + + // VoiceQueryModel Configuration for the voice query model + VoiceQueryModel *VoiceQueryModelCollectionConfig `json:"voice_query_model,omitempty"` } // CollectionSchema defines model for CollectionSchema. @@ -168,6 +237,9 @@ type CollectionSchema struct { // TokenSeparators List of symbols or special characters to be used for splitting the text into individual words in addition to space and new-line characters. TokenSeparators *[]string `json:"token_separators,omitempty"` + + // VoiceQueryModel Configuration for the voice query model + VoiceQueryModel *VoiceQueryModelCollectionConfig `json:"voice_query_model,omitempty"` } // CollectionUpdateSchema defines model for CollectionUpdateSchema. @@ -176,6 +248,102 @@ type CollectionUpdateSchema struct { Fields []Field `json:"fields"` } +// ConversationModelCreateSchema defines model for ConversationModelCreateSchema. +type ConversationModelCreateSchema struct { + // AccountId LLM service's account ID (only applicable for Cloudflare) + AccountId *string `json:"account_id,omitempty"` + + // ApiKey The LLM service's API Key + ApiKey *string `json:"api_key,omitempty"` + + // HistoryCollection Typesense collection that stores the historical conversations + HistoryCollection string `json:"history_collection"` + + // Id An explicit id for the model, otherwise the API will return a response with an auto-generated conversation model id. + Id *string `json:"id,omitempty"` + + // MaxBytes The maximum number of bytes to send to the LLM in every API call. Consult the LLM's documentation on the number of bytes supported in the context window. + MaxBytes int `json:"max_bytes"` + + // ModelName Name of the LLM model offered by OpenAI, Cloudflare or vLLM + ModelName string `json:"model_name"` + + // SystemPrompt The system prompt that contains special instructions to the LLM + SystemPrompt *string `json:"system_prompt,omitempty"` + + // Ttl Time interval in seconds after which the messages would be deleted. Default: 86400 (24 hours) + Ttl *int `json:"ttl,omitempty"` + + // VllmUrl URL of vLLM service + VllmUrl *string `json:"vllm_url,omitempty"` +} + +// ConversationModelSchema defines model for ConversationModelSchema. +type ConversationModelSchema struct { + // AccountId LLM service's account ID (only applicable for Cloudflare) + AccountId *string `json:"account_id,omitempty"` + + // ApiKey The LLM service's API Key + ApiKey *string `json:"api_key,omitempty"` + + // HistoryCollection Typesense collection that stores the historical conversations + HistoryCollection string `json:"history_collection"` + + // Id An explicit id for the model, otherwise the API will return a response with an auto-generated conversation model id. + Id string `json:"id"` + + // MaxBytes The maximum number of bytes to send to the LLM in every API call. Consult the LLM's documentation on the number of bytes supported in the context window. + MaxBytes int `json:"max_bytes"` + + // ModelName Name of the LLM model offered by OpenAI, Cloudflare or vLLM + ModelName string `json:"model_name"` + + // SystemPrompt The system prompt that contains special instructions to the LLM + SystemPrompt *string `json:"system_prompt,omitempty"` + + // Ttl Time interval in seconds after which the messages would be deleted. Default: 86400 (24 hours) + Ttl *int `json:"ttl,omitempty"` + + // VllmUrl URL of vLLM service + VllmUrl *string `json:"vllm_url,omitempty"` +} + +// ConversationModelUpdateSchema defines model for ConversationModelUpdateSchema. +type ConversationModelUpdateSchema struct { + // AccountId LLM service's account ID (only applicable for Cloudflare) + AccountId *string `json:"account_id,omitempty"` + + // ApiKey The LLM service's API Key + ApiKey *string `json:"api_key,omitempty"` + + // HistoryCollection Typesense collection that stores the historical conversations + HistoryCollection *string `json:"history_collection,omitempty"` + + // Id An explicit id for the model, otherwise the API will return a response with an auto-generated conversation model id. + Id *string `json:"id,omitempty"` + + // MaxBytes The maximum number of bytes to send to the LLM in every API call. Consult the LLM's documentation on the number of bytes supported in the context window. + MaxBytes *int `json:"max_bytes,omitempty"` + + // ModelName Name of the LLM model offered by OpenAI, Cloudflare or vLLM + ModelName *string `json:"model_name,omitempty"` + + // SystemPrompt The system prompt that contains special instructions to the LLM + SystemPrompt *string `json:"system_prompt,omitempty"` + + // Ttl Time interval in seconds after which the messages would be deleted. Default: 86400 (24 hours) + Ttl *int `json:"ttl,omitempty"` + + // VllmUrl URL of vLLM service + VllmUrl *string `json:"vllm_url,omitempty"` +} + +// DirtyValues defines model for DirtyValues. +type DirtyValues string + +// DropTokensMode Dictates the direction in which the words in the query must be dropped when the original words in the query do not appear in any document. Values: right_to_left (default), left_to_right, both_sides:3 A note on both_sides:3 - for queries upto 3 tokens (words) in length, this mode will drop tokens from both sides and exhaustively rank all matching results. If query length is greater than 3 words, Typesense will just fallback to default behavior of right_to_left +type DropTokensMode string + // FacetCounts defines model for FacetCounts. type FacetCounts struct { Counts *[]struct { @@ -206,18 +374,33 @@ type Field struct { ClientSecret *string `json:"client_secret,omitempty"` ModelName string `json:"model_name"` ProjectId *string `json:"project_id,omitempty"` + Url *string `json:"url,omitempty"` } `json:"model_config"` } `json:"embed,omitempty"` - Facet *bool `json:"facet,omitempty"` - Index *bool `json:"index,omitempty"` - Infix *bool `json:"infix,omitempty"` - Locale *string `json:"locale,omitempty"` - Name string `json:"name"` - NumDim *int `json:"num_dim,omitempty"` - Optional *bool `json:"optional,omitempty"` + Facet *bool `json:"facet,omitempty"` + Index *bool `json:"index,omitempty"` + Infix *bool `json:"infix,omitempty"` + Locale *string `json:"locale,omitempty"` + Name string `json:"name"` + NumDim *int `json:"num_dim,omitempty"` + Optional *bool `json:"optional,omitempty"` + + // RangeIndex Enables an index optimized for range filtering on numerical fields (e.g. rating:>3.5). Default: false. + RangeIndex *bool `json:"range_index,omitempty"` + + // Reference Name of a field in another collection that should be linked to this collection so that it can be joined during query. Reference *string `json:"reference,omitempty"` Sort *bool `json:"sort,omitempty"` - Type string `json:"type"` + + // Stem Values are stemmed before indexing in-memory. Default: false. + Stem *bool `json:"stem,omitempty"` + + // Store When set to false, the field value will not be stored on disk. Default: true. + Store *bool `json:"store,omitempty"` + Type string `json:"type"` + + // VecDist The distance metric to be used for vector search. Default: `cosine`. You can also use `ip` for inner product. + VecDist *string `json:"vec_dist,omitempty"` } // HealthStatus defines model for HealthStatus. @@ -225,6 +408,9 @@ type HealthStatus struct { Ok bool `json:"ok"` } +// IndexAction defines model for IndexAction. +type IndexAction string + // MultiSearchCollectionParameters defines model for MultiSearchCollectionParameters. type MultiSearchCollectionParameters struct { // CacheTtl The duration (in seconds) that determines how long the search query is cached. This value can be set on a per-query basis. Default: 60. @@ -233,12 +419,30 @@ type MultiSearchCollectionParameters struct { // Collection The collection to search in. Collection string `json:"collection"` + // Conversation Enable conversational search. + Conversation *bool `json:"conversation,omitempty"` + + // ConversationId The Id of a previous conversation to continue, this tells Typesense to include prior context when communicating with the LLM. + ConversationId *string `json:"conversation_id,omitempty"` + + // ConversationModelId The Id of Conversation Model to be used. + ConversationModelId *string `json:"conversation_model_id,omitempty"` + + // DropTokensMode Dictates the direction in which the words in the query must be dropped when the original words in the query do not appear in any document. Values: right_to_left (default), left_to_right, both_sides:3 A note on both_sides:3 - for queries upto 3 tokens (words) in length, this mode will drop tokens from both sides and exhaustively rank all matching results. If query length is greater than 3 words, Typesense will just fallback to default behavior of right_to_left + DropTokensMode *DropTokensMode `json:"drop_tokens_mode,omitempty"` + // DropTokensThreshold If the number of results found for a specific query is less than this number, Typesense will attempt to drop the tokens in the query until enough results are found. Tokens that have the least individual hits are dropped first. Set to 0 to disable. Default: 10 DropTokensThreshold *int `json:"drop_tokens_threshold,omitempty"` // EnableOverrides If you have some overrides defined but want to disable all of them during query time, you can do that by setting this parameter to false EnableOverrides *bool `json:"enable_overrides,omitempty"` + // EnableSynonyms If you have some synonyms defined but want to disable all of them for a particular search query, set enable_synonyms to false. Default: true + EnableSynonyms *bool `json:"enable_synonyms,omitempty"` + + // EnableTyposForAlphaNumericalTokens Set this parameter to false to disable typos on alphanumerical query tokens. Default: true. + EnableTyposForAlphaNumericalTokens *bool `json:"enable_typos_for_alpha_numerical_tokens,omitempty"` + // EnableTyposForNumericalTokens Make Typesense disable typos for numerical tokens. EnableTyposForNumericalTokens *bool `json:"enable_typos_for_numerical_tokens,omitempty"` @@ -263,12 +467,18 @@ type MultiSearchCollectionParameters struct { // FilterBy Filter conditions for refining youropen api validator search results. Separate multiple conditions with &&. FilterBy *string `json:"filter_by,omitempty"` + // FilterCuratedHits Whether the filter_by condition of the search query should be applicable to curated results (override definitions, pinned hits, hidden hits, etc.). Default: false + FilterCuratedHits *bool `json:"filter_curated_hits,omitempty"` + // GroupBy You can aggregate search results into groups or buckets by specify one or more `group_by` fields. Separate multiple fields with a comma. To group on a particular field, it must be a faceted field. GroupBy *string `json:"group_by,omitempty"` // GroupLimit Maximum number of hits to be returned for every group. If the `group_limit` is set as `K` then only the top K hits in each group are returned in the response. Default: 3 GroupLimit *int `json:"group_limit,omitempty"` + // GroupMissingValues Setting this parameter to true will place all documents that have a null value in the group_by field, into a single group. Setting this parameter to false, will cause each document with a null value in the group_by field to not be grouped with other documents. Default: true + GroupMissingValues *bool `json:"group_missing_values,omitempty"` + // HiddenHits A list of records to unconditionally hide from search results. A list of `record_id`s to hide. Eg: to hide records with IDs 123 and 456, you'd specify `123,456`. // You could also use the Overrides feature to override search results based on rules. Overrides are applied first, followed by `pinned_hits` and finally `hidden_hits`. HiddenHits *string `json:"hidden_hits,omitempty"` @@ -377,6 +587,12 @@ type MultiSearchCollectionParameters struct { // Stopwords Name of the stopwords set to apply for this search, the keywords present in the set will be removed from the search query. Stopwords *string `json:"stopwords,omitempty"` + // SynonymNumTypos Allow synonym resolution on typo-corrected words in the query. Default: 0 + SynonymNumTypos *int `json:"synonym_num_typos,omitempty"` + + // SynonymPrefix Allow synonym resolution on word prefixes in the query. Default: false + SynonymPrefix *bool `json:"synonym_prefix,omitempty"` + // TextMatchType In a multi-field matching context, this parameter determines how the representative text match score of a record is calculated. Possible values are max_score (default) or max_weight. TextMatchType *string `json:"text_match_type,omitempty"` @@ -388,6 +604,9 @@ type MultiSearchCollectionParameters struct { // VectorQuery Vector query expression for fetching documents "closest" to a given query/document vector. VectorQuery *string `json:"vector_query,omitempty"` + + // VoiceQuery The base64 encoded audio file in 16 khz 16-bit WAV format. + VoiceQuery *string `json:"voice_query,omitempty"` } // MultiSearchParameters Parameters for the multi search API. @@ -395,12 +614,30 @@ type MultiSearchParameters struct { // CacheTtl The duration (in seconds) that determines how long the search query is cached. This value can be set on a per-query basis. Default: 60. CacheTtl *int `json:"cache_ttl,omitempty"` + // Conversation Enable conversational search. + Conversation *bool `json:"conversation,omitempty"` + + // ConversationId The Id of a previous conversation to continue, this tells Typesense to include prior context when communicating with the LLM. + ConversationId *string `json:"conversation_id,omitempty"` + + // ConversationModelId The Id of Conversation Model to be used. + ConversationModelId *string `json:"conversation_model_id,omitempty"` + + // DropTokensMode Dictates the direction in which the words in the query must be dropped when the original words in the query do not appear in any document. Values: right_to_left (default), left_to_right, both_sides:3 A note on both_sides:3 - for queries upto 3 tokens (words) in length, this mode will drop tokens from both sides and exhaustively rank all matching results. If query length is greater than 3 words, Typesense will just fallback to default behavior of right_to_left + DropTokensMode *DropTokensMode `json:"drop_tokens_mode,omitempty"` + // DropTokensThreshold If the number of results found for a specific query is less than this number, Typesense will attempt to drop the tokens in the query until enough results are found. Tokens that have the least individual hits are dropped first. Set to 0 to disable. Default: 10 DropTokensThreshold *int `json:"drop_tokens_threshold,omitempty"` // EnableOverrides If you have some overrides defined but want to disable all of them during query time, you can do that by setting this parameter to false EnableOverrides *bool `json:"enable_overrides,omitempty"` + // EnableSynonyms If you have some synonyms defined but want to disable all of them for a particular search query, set enable_synonyms to false. Default: true + EnableSynonyms *bool `json:"enable_synonyms,omitempty"` + + // EnableTyposForAlphaNumericalTokens Set this parameter to false to disable typos on alphanumerical query tokens. Default: true. + EnableTyposForAlphaNumericalTokens *bool `json:"enable_typos_for_alpha_numerical_tokens,omitempty"` + // EnableTyposForNumericalTokens Make Typesense disable typos for numerical tokens. EnableTyposForNumericalTokens *bool `json:"enable_typos_for_numerical_tokens,omitempty"` @@ -425,12 +662,18 @@ type MultiSearchParameters struct { // FilterBy Filter conditions for refining youropen api validator search results. Separate multiple conditions with &&. FilterBy *string `json:"filter_by,omitempty"` + // FilterCuratedHits Whether the filter_by condition of the search query should be applicable to curated results (override definitions, pinned hits, hidden hits, etc.). Default: false + FilterCuratedHits *bool `json:"filter_curated_hits,omitempty"` + // GroupBy You can aggregate search results into groups or buckets by specify one or more `group_by` fields. Separate multiple fields with a comma. To group on a particular field, it must be a faceted field. GroupBy *string `json:"group_by,omitempty"` // GroupLimit Maximum number of hits to be returned for every group. If the `group_limit` is set as `K` then only the top K hits in each group are returned in the response. Default: 3 GroupLimit *int `json:"group_limit,omitempty"` + // GroupMissingValues Setting this parameter to true will place all documents that have a null value in the group_by field, into a single group. Setting this parameter to false, will cause each document with a null value in the group_by field to not be grouped with other documents. Default: true + GroupMissingValues *bool `json:"group_missing_values,omitempty"` + // HiddenHits A list of records to unconditionally hide from search results. A list of `record_id`s to hide. Eg: to hide records with IDs 123 and 456, you'd specify `123,456`. // You could also use the Overrides feature to override search results based on rules. Overrides are applied first, followed by `pinned_hits` and finally `hidden_hits`. HiddenHits *string `json:"hidden_hits,omitempty"` @@ -539,6 +782,12 @@ type MultiSearchParameters struct { // Stopwords Name of the stopwords set to apply for this search, the keywords present in the set will be removed from the search query. Stopwords *string `json:"stopwords,omitempty"` + // SynonymNumTypos Allow synonym resolution on typo-corrected words in the query. Default: 0 + SynonymNumTypos *int `json:"synonym_num_typos,omitempty"` + + // SynonymPrefix Allow synonym resolution on word prefixes in the query. Default: false + SynonymPrefix *bool `json:"synonym_prefix,omitempty"` + // TextMatchType In a multi-field matching context, this parameter determines how the representative text match score of a record is calculated. Possible values are max_score (default) or max_weight. TextMatchType *string `json:"text_match_type,omitempty"` @@ -550,11 +799,15 @@ type MultiSearchParameters struct { // VectorQuery Vector query expression for fetching documents "closest" to a given query/document vector. VectorQuery *string `json:"vector_query,omitempty"` + + // VoiceQuery The base64 encoded audio file in 16 khz 16-bit WAV format. + VoiceQuery *string `json:"voice_query,omitempty"` } // MultiSearchResult defines model for MultiSearchResult. type MultiSearchResult struct { - Results []SearchResult `json:"results"` + Conversation *SearchResultConversation `json:"conversation,omitempty"` + Results []SearchResult `json:"results"` } // MultiSearchSearchesParameter defines model for MultiSearchSearchesParameter. @@ -625,19 +878,40 @@ type SearchHighlight struct { // SearchOverride defines model for SearchOverride. type SearchOverride struct { + // EffectiveFromTs A Unix timestamp that indicates the date/time from which the override will be active. You can use this to create override rules that start applying from a future point in time. + EffectiveFromTs *int `json:"effective_from_ts,omitempty"` + + // EffectiveToTs A Unix timestamp that indicates the date/time until which the override will be active. You can use this to create override rules that stop applying after a period of time. + EffectiveToTs *int `json:"effective_to_ts,omitempty"` + // Excludes List of document `id`s that should be excluded from the search results. Excludes *[]SearchOverrideExclude `json:"excludes,omitempty"` // FilterBy A filter by clause that is applied to any search query that matches the override rule. FilterBy *string `json:"filter_by,omitempty"` - Id *string `json:"id,omitempty"` + + // FilterCuratedHits When set to true, the filter conditions of the query is applied to the curated records as well. Default: false. + FilterCuratedHits *bool `json:"filter_curated_hits,omitempty"` + Id *string `json:"id,omitempty"` // Includes List of document `id`s that should be included in the search results with their corresponding `position`s. Includes *[]SearchOverrideInclude `json:"includes,omitempty"` + // Metadata Return a custom JSON object in the Search API response, when this rule is triggered. This can can be used to display a pre-defined message (eg: a promotion banner) on the front-end when a particular rule is triggered. + Metadata *map[string]interface{} `json:"metadata,omitempty"` + // RemoveMatchedTokens Indicates whether search query tokens that exist in the override's rule should be removed from the search query. - RemoveMatchedTokens *bool `json:"remove_matched_tokens,omitempty"` - Rule SearchOverrideRule `json:"rule"` + RemoveMatchedTokens *bool `json:"remove_matched_tokens,omitempty"` + + // ReplaceQuery Replaces the current search query with this value, when the search query matches the override rule. + ReplaceQuery *string `json:"replace_query,omitempty"` + Rule SearchOverrideRule `json:"rule"` + + // SortBy A sort by clause that is applied to any search query that matches the override rule. + SortBy *string `json:"sort_by,omitempty"` + + // StopProcessing When set to true, override processing will stop at the first matching rule. When set to false override processing will continue and multiple override actions will be triggered in sequence. Overrides are processed in the lexical sort order of their id field. Default: true. + StopProcessing *bool `json:"stop_processing,omitempty"` } // SearchOverrideExclude defines model for SearchOverrideExclude. @@ -657,13 +931,16 @@ type SearchOverrideInclude struct { // SearchOverrideRule defines model for SearchOverrideRule. type SearchOverrideRule struct { + // FilterBy Indicates that the override should apply when the filter_by parameter in a search query exactly matches the string specified here (including backticks, spaces, brackets, etc). + FilterBy *string `json:"filter_by,omitempty"` + // Match Indicates whether the match on the query term should be `exact` or `contains`. If we want to match all queries that contained the word `apple`, we will use the `contains` match instead. - Match SearchOverrideRuleMatch `json:"match"` + Match *SearchOverrideRuleMatch `json:"match,omitempty"` // Query Indicates what search queries should be overridden - Query string `json:"query"` + Query *string `json:"query,omitempty"` - // Tags List of tags. + // Tags List of tag values to associate with this override rule. Tags *[]string `json:"tags,omitempty"` } @@ -672,18 +949,39 @@ type SearchOverrideRuleMatch string // SearchOverrideSchema defines model for SearchOverrideSchema. type SearchOverrideSchema struct { + // EffectiveFromTs A Unix timestamp that indicates the date/time from which the override will be active. You can use this to create override rules that start applying from a future point in time. + EffectiveFromTs *int `json:"effective_from_ts,omitempty"` + + // EffectiveToTs A Unix timestamp that indicates the date/time until which the override will be active. You can use this to create override rules that stop applying after a period of time. + EffectiveToTs *int `json:"effective_to_ts,omitempty"` + // Excludes List of document `id`s that should be excluded from the search results. Excludes *[]SearchOverrideExclude `json:"excludes,omitempty"` // FilterBy A filter by clause that is applied to any search query that matches the override rule. FilterBy *string `json:"filter_by,omitempty"` + // FilterCuratedHits When set to true, the filter conditions of the query is applied to the curated records as well. Default: false. + FilterCuratedHits *bool `json:"filter_curated_hits,omitempty"` + // Includes List of document `id`s that should be included in the search results with their corresponding `position`s. Includes *[]SearchOverrideInclude `json:"includes,omitempty"` + // Metadata Return a custom JSON object in the Search API response, when this rule is triggered. This can can be used to display a pre-defined message (eg: a promotion banner) on the front-end when a particular rule is triggered. + Metadata *map[string]interface{} `json:"metadata,omitempty"` + // RemoveMatchedTokens Indicates whether search query tokens that exist in the override's rule should be removed from the search query. - RemoveMatchedTokens *bool `json:"remove_matched_tokens,omitempty"` - Rule SearchOverrideRule `json:"rule"` + RemoveMatchedTokens *bool `json:"remove_matched_tokens,omitempty"` + + // ReplaceQuery Replaces the current search query with this value, when the search query matches the override rule. + ReplaceQuery *string `json:"replace_query,omitempty"` + Rule SearchOverrideRule `json:"rule"` + + // SortBy A sort by clause that is applied to any search query that matches the override rule. + SortBy *string `json:"sort_by,omitempty"` + + // StopProcessing When set to true, override processing will stop at the first matching rule. When set to false override processing will continue and multiple override actions will be triggered in sequence. Overrides are processed in the lexical sort order of their id field. Default: true. + StopProcessing *bool `json:"stop_processing,omitempty"` } // SearchOverridesResponse defines model for SearchOverridesResponse. @@ -696,6 +994,18 @@ type SearchParameters struct { // CacheTtl The duration (in seconds) that determines how long the search query is cached. This value can be set on a per-query basis. Default: 60. CacheTtl *int `json:"cache_ttl,omitempty"` + // Conversation Enable conversational search. + Conversation *bool `json:"conversation,omitempty"` + + // ConversationId The Id of a previous conversation to continue, this tells Typesense to include prior context when communicating with the LLM. + ConversationId *string `json:"conversation_id,omitempty"` + + // ConversationModelId The Id of Conversation Model to be used. + ConversationModelId *string `json:"conversation_model_id,omitempty"` + + // DropTokensMode Dictates the direction in which the words in the query must be dropped when the original words in the query do not appear in any document. Values: right_to_left (default), left_to_right, both_sides:3 A note on both_sides:3 - for queries upto 3 tokens (words) in length, this mode will drop tokens from both sides and exhaustively rank all matching results. If query length is greater than 3 words, Typesense will just fallback to default behavior of right_to_left + DropTokensMode *DropTokensMode `json:"drop_tokens_mode,omitempty"` + // DropTokensThreshold If the number of results found for a specific query is less than this number, Typesense will attempt to drop the tokens in the query until enough results are found. Tokens that have the least individual hits are dropped first. Set to 0 to disable. Default: 10 DropTokensThreshold *int `json:"drop_tokens_threshold,omitempty"` @@ -705,6 +1015,12 @@ type SearchParameters struct { // EnableOverrides If you have some overrides defined but want to disable all of them during query time, you can do that by setting this parameter to false EnableOverrides *bool `json:"enable_overrides,omitempty"` + // EnableSynonyms If you have some synonyms defined but want to disable all of them for a particular search query, set enable_synonyms to false. Default: true + EnableSynonyms *bool `json:"enable_synonyms,omitempty"` + + // EnableTyposForAlphaNumericalTokens Set this parameter to false to disable typos on alphanumerical query tokens. Default: true. + EnableTyposForAlphaNumericalTokens *bool `json:"enable_typos_for_alpha_numerical_tokens,omitempty"` + // EnableTyposForNumericalTokens Make Typesense disable typos for numerical tokens. EnableTyposForNumericalTokens *bool `json:"enable_typos_for_numerical_tokens,omitempty"` @@ -729,12 +1045,18 @@ type SearchParameters struct { // FilterBy Filter conditions for refining youropen api validator search results. Separate multiple conditions with &&. FilterBy *string `json:"filter_by,omitempty"` + // FilterCuratedHits Whether the filter_by condition of the search query should be applicable to curated results (override definitions, pinned hits, hidden hits, etc.). Default: false + FilterCuratedHits *bool `json:"filter_curated_hits,omitempty"` + // GroupBy You can aggregate search results into groups or buckets by specify one or more `group_by` fields. Separate multiple fields with a comma. To group on a particular field, it must be a faceted field. GroupBy *string `json:"group_by,omitempty"` // GroupLimit Maximum number of hits to be returned for every group. If the `group_limit` is set as `K` then only the top K hits in each group are returned in the response. Default: 3 GroupLimit *int `json:"group_limit,omitempty"` + // GroupMissingValues Setting this parameter to true will place all documents that have a null value in the group_by field, into a single group. Setting this parameter to false, will cause each document with a null value in the group_by field to not be grouped with other documents. Default: true + GroupMissingValues *bool `json:"group_missing_values,omitempty"` + // HiddenHits A list of records to unconditionally hide from search results. A list of `record_id`s to hide. Eg: to hide records with IDs 123 and 456, you'd specify `123,456`. // You could also use the Overrides feature to override search results based on rules. Overrides are applied first, followed by `pinned_hits` and finally `hidden_hits`. HiddenHits *string `json:"hidden_hits,omitempty"` @@ -849,6 +1171,12 @@ type SearchParameters struct { // Stopwords Name of the stopwords set to apply for this search, the keywords present in the set will be removed from the search query. Stopwords *string `json:"stopwords,omitempty"` + // SynonymNumTypos Allow synonym resolution on typo-corrected words in the query. Default: 0 + SynonymNumTypos *int `json:"synonym_num_typos,omitempty"` + + // SynonymPrefix Allow synonym resolution on word prefixes in the query. Default: false + SynonymPrefix *bool `json:"synonym_prefix,omitempty"` + // TextMatchType In a multi-field matching context, this parameter determines how the representative text match score of a record is calculated. Possible values are max_score (default) or max_weight. TextMatchType *string `json:"text_match_type,omitempty"` @@ -860,14 +1188,19 @@ type SearchParameters struct { // VectorQuery Vector query expression for fetching documents "closest" to a given query/document vector. VectorQuery *string `json:"vector_query,omitempty"` + + // VoiceQuery The base64 encoded audio file in 16 khz 16-bit WAV format. + VoiceQuery *string `json:"voice_query,omitempty"` } // SearchResult defines model for SearchResult. type SearchResult struct { - FacetCounts *[]FacetCounts `json:"facet_counts,omitempty"` + Conversation *SearchResultConversation `json:"conversation,omitempty"` + FacetCounts *[]FacetCounts `json:"facet_counts,omitempty"` // Found The number of documents found Found *int `json:"found,omitempty"` + FoundDocs *int `json:"found_docs,omitempty"` GroupedHits *[]SearchGroupedHit `json:"grouped_hits,omitempty"` // Hits The documents that matched the search query @@ -882,6 +1215,9 @@ type SearchResult struct { CollectionName string `json:"collection_name"` PerPage int `json:"per_page"` Q string `json:"q"` + VoiceQuery *struct { + TranscribedQuery *string `json:"transcribed_query,omitempty"` + } `json:"voice_query,omitempty"` } `json:"request_params,omitempty"` // SearchCutoff Whether the search was cut off @@ -891,6 +1227,14 @@ type SearchResult struct { SearchTimeMs *int `json:"search_time_ms,omitempty"` } +// SearchResultConversation defines model for SearchResultConversation. +type SearchResultConversation struct { + Answer string `json:"answer"` + ConversationHistory []map[string]interface{} `json:"conversation_history"` + ConversationId string `json:"conversation_id"` + Query string `json:"query"` +} + // SearchResultHit defines model for SearchResultHit. type SearchResultHit struct { // Document Can be any key-value pair @@ -914,18 +1258,30 @@ type SearchResultHit struct { type SearchSynonym struct { Id *string `json:"id,omitempty"` + // Locale Locale for the synonym, leave blank to use the standard tokenizer. + Locale *string `json:"locale,omitempty"` + // Root For 1-way synonyms, indicates the root word that words in the `synonyms` parameter map to. Root *string `json:"root,omitempty"` + // SymbolsToIndex By default, special characters are dropped from synonyms. Use this attribute to specify which special characters should be indexed as is. + SymbolsToIndex *[]string `json:"symbols_to_index,omitempty"` + // Synonyms Array of words that should be considered as synonyms. Synonyms []string `json:"synonyms"` } // SearchSynonymSchema defines model for SearchSynonymSchema. type SearchSynonymSchema struct { + // Locale Locale for the synonym, leave blank to use the standard tokenizer. + Locale *string `json:"locale,omitempty"` + // Root For 1-way synonyms, indicates the root word that words in the `synonyms` parameter map to. Root *string `json:"root,omitempty"` + // SymbolsToIndex By default, special characters are dropped from synonyms. Use this attribute to specify which special characters should be indexed as is. + SymbolsToIndex *[]string `json:"symbols_to_index,omitempty"` + // Synonyms Array of words that should be considered as synonyms. Synonyms []string `json:"synonyms"` } @@ -963,10 +1319,16 @@ type SuccessStatus struct { Success bool `json:"success"` } +// VoiceQueryModelCollectionConfig Configuration for the voice query model +type VoiceQueryModelCollectionConfig struct { + ModelName *string `json:"model_name,omitempty"` +} + // DeleteDocumentsParams defines parameters for DeleteDocuments. type DeleteDocumentsParams struct { - BatchSize *int `form:"batch_size,omitempty" json:"batch_size,omitempty"` - FilterBy *string `form:"filter_by,omitempty" json:"filter_by,omitempty"` + BatchSize *int `form:"batch_size,omitempty" json:"batch_size,omitempty"` + FilterBy *string `form:"filter_by,omitempty" json:"filter_by,omitempty"` + IgnoreNotFound *bool `form:"ignore_not_found,omitempty" json:"ignore_not_found,omitempty"` } // UpdateDocumentsJSONBody defines parameters for UpdateDocuments. @@ -983,11 +1345,11 @@ type IndexDocumentJSONBody = interface{} // IndexDocumentParams defines parameters for IndexDocument. type IndexDocumentParams struct { // Action Additional action to perform - Action *IndexDocumentParamsAction `form:"action,omitempty" json:"action,omitempty"` -} + Action *IndexAction `form:"action,omitempty" json:"action,omitempty"` -// IndexDocumentParamsAction defines parameters for IndexDocument. -type IndexDocumentParamsAction string + // DirtyValues Dealing with Dirty Data + DirtyValues *DirtyValues `form:"dirty_values,omitempty" json:"dirty_values,omitempty"` +} // ExportDocumentsParams defines parameters for ExportDocuments. type ExportDocumentsParams struct { @@ -998,134 +1360,161 @@ type ExportDocumentsParams struct { // ImportDocumentsParams defines parameters for ImportDocuments. type ImportDocumentsParams struct { - Action *string `form:"action,omitempty" json:"action,omitempty"` - BatchSize *int `form:"batch_size,omitempty" json:"batch_size,omitempty"` - DirtyValues *ImportDocumentsParamsDirtyValues `form:"dirty_values,omitempty" json:"dirty_values,omitempty"` - RemoteEmbeddingBatchSize *int `form:"remote_embedding_batch_size,omitempty" json:"remote_embedding_batch_size,omitempty"` + Action *IndexAction `form:"action,omitempty" json:"action,omitempty"` + DirtyValues *DirtyValues `form:"dirty_values,omitempty" json:"dirty_values,omitempty"` + BatchSize *int `form:"batch_size,omitempty" json:"batch_size,omitempty"` + RemoteEmbeddingBatchSize *int `form:"remote_embedding_batch_size,omitempty" json:"remote_embedding_batch_size,omitempty"` + ReturnDoc *bool `form:"return_doc,omitempty" json:"return_doc,omitempty"` + ReturnId *bool `form:"return_id,omitempty" json:"return_id,omitempty"` } -// ImportDocumentsParamsDirtyValues defines parameters for ImportDocuments. -type ImportDocumentsParamsDirtyValues string - // SearchCollectionParams defines parameters for SearchCollection. type SearchCollectionParams struct { - CacheTtl *int `form:"cache_ttl,omitempty" json:"cache_ttl,omitempty"` - DropTokensThreshold *int `form:"drop_tokens_threshold,omitempty" json:"drop_tokens_threshold,omitempty"` - EnableHighlightV1 *bool `form:"enable_highlight_v1,omitempty" json:"enable_highlight_v1,omitempty"` - EnableOverrides *bool `form:"enable_overrides,omitempty" json:"enable_overrides,omitempty"` - EnableTyposForNumericalTokens *bool `form:"enable_typos_for_numerical_tokens,omitempty" json:"enable_typos_for_numerical_tokens,omitempty"` - ExcludeFields *string `form:"exclude_fields,omitempty" json:"exclude_fields,omitempty"` - ExhaustiveSearch *bool `form:"exhaustive_search,omitempty" json:"exhaustive_search,omitempty"` - FacetBy *string `form:"facet_by,omitempty" json:"facet_by,omitempty"` - FacetQuery *string `form:"facet_query,omitempty" json:"facet_query,omitempty"` - FacetReturnParent *string `form:"facet_return_parent,omitempty" json:"facet_return_parent,omitempty"` - FacetStrategy *string `form:"facet_strategy,omitempty" json:"facet_strategy,omitempty"` - FilterBy *string `form:"filter_by,omitempty" json:"filter_by,omitempty"` - GroupBy *string `form:"group_by,omitempty" json:"group_by,omitempty"` - GroupLimit *int `form:"group_limit,omitempty" json:"group_limit,omitempty"` - HiddenHits *string `form:"hidden_hits,omitempty" json:"hidden_hits,omitempty"` - HighlightAffixNumTokens *int `form:"highlight_affix_num_tokens,omitempty" json:"highlight_affix_num_tokens,omitempty"` - HighlightEndTag *string `form:"highlight_end_tag,omitempty" json:"highlight_end_tag,omitempty"` - HighlightFields *string `form:"highlight_fields,omitempty" json:"highlight_fields,omitempty"` - HighlightFullFields *string `form:"highlight_full_fields,omitempty" json:"highlight_full_fields,omitempty"` - HighlightStartTag *string `form:"highlight_start_tag,omitempty" json:"highlight_start_tag,omitempty"` - IncludeFields *string `form:"include_fields,omitempty" json:"include_fields,omitempty"` - Infix *string `form:"infix,omitempty" json:"infix,omitempty"` - Limit *int `form:"limit,omitempty" json:"limit,omitempty"` - MaxCandidates *int `form:"max_candidates,omitempty" json:"max_candidates,omitempty"` - MaxExtraPrefix *int `form:"max_extra_prefix,omitempty" json:"max_extra_prefix,omitempty"` - MaxExtraSuffix *int `form:"max_extra_suffix,omitempty" json:"max_extra_suffix,omitempty"` - MaxFacetValues *int `form:"max_facet_values,omitempty" json:"max_facet_values,omitempty"` - MinLen1typo *int `form:"min_len_1typo,omitempty" json:"min_len_1typo,omitempty"` - MinLen2typo *int `form:"min_len_2typo,omitempty" json:"min_len_2typo,omitempty"` - NumTypos *string `form:"num_typos,omitempty" json:"num_typos,omitempty"` - Offset *int `form:"offset,omitempty" json:"offset,omitempty"` - OverrideTags *string `form:"override_tags,omitempty" json:"override_tags,omitempty"` - Page *int `form:"page,omitempty" json:"page,omitempty"` - PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` - PinnedHits *string `form:"pinned_hits,omitempty" json:"pinned_hits,omitempty"` - PreSegmentedQuery *bool `form:"pre_segmented_query,omitempty" json:"pre_segmented_query,omitempty"` - Prefix *string `form:"prefix,omitempty" json:"prefix,omitempty"` - Preset *string `form:"preset,omitempty" json:"preset,omitempty"` - PrioritizeExactMatch *bool `form:"prioritize_exact_match,omitempty" json:"prioritize_exact_match,omitempty"` - PrioritizeNumMatchingFields *bool `form:"prioritize_num_matching_fields,omitempty" json:"prioritize_num_matching_fields,omitempty"` - PrioritizeTokenPosition *bool `form:"prioritize_token_position,omitempty" json:"prioritize_token_position,omitempty"` - Q *string `form:"q,omitempty" json:"q,omitempty"` - QueryBy *string `form:"query_by,omitempty" json:"query_by,omitempty"` - QueryByWeights *string `form:"query_by_weights,omitempty" json:"query_by_weights,omitempty"` - RemoteEmbeddingNumTries *int `form:"remote_embedding_num_tries,omitempty" json:"remote_embedding_num_tries,omitempty"` - RemoteEmbeddingTimeoutMs *int `form:"remote_embedding_timeout_ms,omitempty" json:"remote_embedding_timeout_ms,omitempty"` - SearchCutoffMs *int `form:"search_cutoff_ms,omitempty" json:"search_cutoff_ms,omitempty"` - SnippetThreshold *int `form:"snippet_threshold,omitempty" json:"snippet_threshold,omitempty"` - SortBy *string `form:"sort_by,omitempty" json:"sort_by,omitempty"` - SplitJoinTokens *string `form:"split_join_tokens,omitempty" json:"split_join_tokens,omitempty"` - Stopwords *string `form:"stopwords,omitempty" json:"stopwords,omitempty"` - TextMatchType *string `form:"text_match_type,omitempty" json:"text_match_type,omitempty"` - TypoTokensThreshold *int `form:"typo_tokens_threshold,omitempty" json:"typo_tokens_threshold,omitempty"` - UseCache *bool `form:"use_cache,omitempty" json:"use_cache,omitempty"` - VectorQuery *string `form:"vector_query,omitempty" json:"vector_query,omitempty"` + CacheTtl *int `form:"cache_ttl,omitempty" json:"cache_ttl,omitempty"` + Conversation *bool `form:"conversation,omitempty" json:"conversation,omitempty"` + ConversationId *string `form:"conversation_id,omitempty" json:"conversation_id,omitempty"` + ConversationModelId *string `form:"conversation_model_id,omitempty" json:"conversation_model_id,omitempty"` + DropTokensMode *DropTokensMode `form:"drop_tokens_mode,omitempty" json:"drop_tokens_mode,omitempty"` + DropTokensThreshold *int `form:"drop_tokens_threshold,omitempty" json:"drop_tokens_threshold,omitempty"` + EnableHighlightV1 *bool `form:"enable_highlight_v1,omitempty" json:"enable_highlight_v1,omitempty"` + EnableOverrides *bool `form:"enable_overrides,omitempty" json:"enable_overrides,omitempty"` + EnableSynonyms *bool `form:"enable_synonyms,omitempty" json:"enable_synonyms,omitempty"` + EnableTyposForAlphaNumericalTokens *bool `form:"enable_typos_for_alpha_numerical_tokens,omitempty" json:"enable_typos_for_alpha_numerical_tokens,omitempty"` + EnableTyposForNumericalTokens *bool `form:"enable_typos_for_numerical_tokens,omitempty" json:"enable_typos_for_numerical_tokens,omitempty"` + ExcludeFields *string `form:"exclude_fields,omitempty" json:"exclude_fields,omitempty"` + ExhaustiveSearch *bool `form:"exhaustive_search,omitempty" json:"exhaustive_search,omitempty"` + FacetBy *string `form:"facet_by,omitempty" json:"facet_by,omitempty"` + FacetQuery *string `form:"facet_query,omitempty" json:"facet_query,omitempty"` + FacetReturnParent *string `form:"facet_return_parent,omitempty" json:"facet_return_parent,omitempty"` + FacetStrategy *string `form:"facet_strategy,omitempty" json:"facet_strategy,omitempty"` + FilterBy *string `form:"filter_by,omitempty" json:"filter_by,omitempty"` + FilterCuratedHits *bool `form:"filter_curated_hits,omitempty" json:"filter_curated_hits,omitempty"` + GroupBy *string `form:"group_by,omitempty" json:"group_by,omitempty"` + GroupLimit *int `form:"group_limit,omitempty" json:"group_limit,omitempty"` + GroupMissingValues *bool `form:"group_missing_values,omitempty" json:"group_missing_values,omitempty"` + HiddenHits *string `form:"hidden_hits,omitempty" json:"hidden_hits,omitempty"` + HighlightAffixNumTokens *int `form:"highlight_affix_num_tokens,omitempty" json:"highlight_affix_num_tokens,omitempty"` + HighlightEndTag *string `form:"highlight_end_tag,omitempty" json:"highlight_end_tag,omitempty"` + HighlightFields *string `form:"highlight_fields,omitempty" json:"highlight_fields,omitempty"` + HighlightFullFields *string `form:"highlight_full_fields,omitempty" json:"highlight_full_fields,omitempty"` + HighlightStartTag *string `form:"highlight_start_tag,omitempty" json:"highlight_start_tag,omitempty"` + IncludeFields *string `form:"include_fields,omitempty" json:"include_fields,omitempty"` + Infix *string `form:"infix,omitempty" json:"infix,omitempty"` + Limit *int `form:"limit,omitempty" json:"limit,omitempty"` + MaxCandidates *int `form:"max_candidates,omitempty" json:"max_candidates,omitempty"` + MaxExtraPrefix *int `form:"max_extra_prefix,omitempty" json:"max_extra_prefix,omitempty"` + MaxExtraSuffix *int `form:"max_extra_suffix,omitempty" json:"max_extra_suffix,omitempty"` + MaxFacetValues *int `form:"max_facet_values,omitempty" json:"max_facet_values,omitempty"` + MinLen1typo *int `form:"min_len_1typo,omitempty" json:"min_len_1typo,omitempty"` + MinLen2typo *int `form:"min_len_2typo,omitempty" json:"min_len_2typo,omitempty"` + NumTypos *string `form:"num_typos,omitempty" json:"num_typos,omitempty"` + Offset *int `form:"offset,omitempty" json:"offset,omitempty"` + OverrideTags *string `form:"override_tags,omitempty" json:"override_tags,omitempty"` + Page *int `form:"page,omitempty" json:"page,omitempty"` + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + PinnedHits *string `form:"pinned_hits,omitempty" json:"pinned_hits,omitempty"` + PreSegmentedQuery *bool `form:"pre_segmented_query,omitempty" json:"pre_segmented_query,omitempty"` + Prefix *string `form:"prefix,omitempty" json:"prefix,omitempty"` + Preset *string `form:"preset,omitempty" json:"preset,omitempty"` + PrioritizeExactMatch *bool `form:"prioritize_exact_match,omitempty" json:"prioritize_exact_match,omitempty"` + PrioritizeNumMatchingFields *bool `form:"prioritize_num_matching_fields,omitempty" json:"prioritize_num_matching_fields,omitempty"` + PrioritizeTokenPosition *bool `form:"prioritize_token_position,omitempty" json:"prioritize_token_position,omitempty"` + Q *string `form:"q,omitempty" json:"q,omitempty"` + QueryBy *string `form:"query_by,omitempty" json:"query_by,omitempty"` + QueryByWeights *string `form:"query_by_weights,omitempty" json:"query_by_weights,omitempty"` + RemoteEmbeddingNumTries *int `form:"remote_embedding_num_tries,omitempty" json:"remote_embedding_num_tries,omitempty"` + RemoteEmbeddingTimeoutMs *int `form:"remote_embedding_timeout_ms,omitempty" json:"remote_embedding_timeout_ms,omitempty"` + SearchCutoffMs *int `form:"search_cutoff_ms,omitempty" json:"search_cutoff_ms,omitempty"` + SnippetThreshold *int `form:"snippet_threshold,omitempty" json:"snippet_threshold,omitempty"` + SortBy *string `form:"sort_by,omitempty" json:"sort_by,omitempty"` + SplitJoinTokens *string `form:"split_join_tokens,omitempty" json:"split_join_tokens,omitempty"` + Stopwords *string `form:"stopwords,omitempty" json:"stopwords,omitempty"` + SynonymNumTypos *int `form:"synonym_num_typos,omitempty" json:"synonym_num_typos,omitempty"` + SynonymPrefix *bool `form:"synonym_prefix,omitempty" json:"synonym_prefix,omitempty"` + TextMatchType *string `form:"text_match_type,omitempty" json:"text_match_type,omitempty"` + TypoTokensThreshold *int `form:"typo_tokens_threshold,omitempty" json:"typo_tokens_threshold,omitempty"` + UseCache *bool `form:"use_cache,omitempty" json:"use_cache,omitempty"` + VectorQuery *string `form:"vector_query,omitempty" json:"vector_query,omitempty"` + VoiceQuery *string `form:"voice_query,omitempty" json:"voice_query,omitempty"` } // UpdateDocumentJSONBody defines parameters for UpdateDocument. type UpdateDocumentJSONBody = interface{} +// UpdateDocumentParams defines parameters for UpdateDocument. +type UpdateDocumentParams struct { + // DirtyValues Dealing with Dirty Data + DirtyValues *DirtyValues `form:"dirty_values,omitempty" json:"dirty_values,omitempty"` +} + // MultiSearchParams defines parameters for MultiSearch. type MultiSearchParams struct { - CacheTtl *int `form:"cache_ttl,omitempty" json:"cache_ttl,omitempty"` - DropTokensThreshold *int `form:"drop_tokens_threshold,omitempty" json:"drop_tokens_threshold,omitempty"` - EnableHighlightV1 *bool `form:"enable_highlight_v1,omitempty" json:"enable_highlight_v1,omitempty"` - EnableOverrides *bool `form:"enable_overrides,omitempty" json:"enable_overrides,omitempty"` - EnableTyposForNumericalTokens *bool `form:"enable_typos_for_numerical_tokens,omitempty" json:"enable_typos_for_numerical_tokens,omitempty"` - ExcludeFields *string `form:"exclude_fields,omitempty" json:"exclude_fields,omitempty"` - ExhaustiveSearch *bool `form:"exhaustive_search,omitempty" json:"exhaustive_search,omitempty"` - FacetBy *string `form:"facet_by,omitempty" json:"facet_by,omitempty"` - FacetQuery *string `form:"facet_query,omitempty" json:"facet_query,omitempty"` - FacetReturnParent *string `form:"facet_return_parent,omitempty" json:"facet_return_parent,omitempty"` - FacetStrategy *string `form:"facet_strategy,omitempty" json:"facet_strategy,omitempty"` - FilterBy *string `form:"filter_by,omitempty" json:"filter_by,omitempty"` - GroupBy *string `form:"group_by,omitempty" json:"group_by,omitempty"` - GroupLimit *int `form:"group_limit,omitempty" json:"group_limit,omitempty"` - HiddenHits *string `form:"hidden_hits,omitempty" json:"hidden_hits,omitempty"` - HighlightAffixNumTokens *int `form:"highlight_affix_num_tokens,omitempty" json:"highlight_affix_num_tokens,omitempty"` - HighlightEndTag *string `form:"highlight_end_tag,omitempty" json:"highlight_end_tag,omitempty"` - HighlightFields *string `form:"highlight_fields,omitempty" json:"highlight_fields,omitempty"` - HighlightFullFields *string `form:"highlight_full_fields,omitempty" json:"highlight_full_fields,omitempty"` - HighlightStartTag *string `form:"highlight_start_tag,omitempty" json:"highlight_start_tag,omitempty"` - IncludeFields *string `form:"include_fields,omitempty" json:"include_fields,omitempty"` - Infix *string `form:"infix,omitempty" json:"infix,omitempty"` - Limit *int `form:"limit,omitempty" json:"limit,omitempty"` - MaxCandidates *int `form:"max_candidates,omitempty" json:"max_candidates,omitempty"` - MaxExtraPrefix *int `form:"max_extra_prefix,omitempty" json:"max_extra_prefix,omitempty"` - MaxExtraSuffix *int `form:"max_extra_suffix,omitempty" json:"max_extra_suffix,omitempty"` - MaxFacetValues *int `form:"max_facet_values,omitempty" json:"max_facet_values,omitempty"` - MinLen1typo *int `form:"min_len_1typo,omitempty" json:"min_len_1typo,omitempty"` - MinLen2typo *int `form:"min_len_2typo,omitempty" json:"min_len_2typo,omitempty"` - NumTypos *string `form:"num_typos,omitempty" json:"num_typos,omitempty"` - Offset *int `form:"offset,omitempty" json:"offset,omitempty"` - OverrideTags *string `form:"override_tags,omitempty" json:"override_tags,omitempty"` - Page *int `form:"page,omitempty" json:"page,omitempty"` - PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` - PinnedHits *string `form:"pinned_hits,omitempty" json:"pinned_hits,omitempty"` - PreSegmentedQuery *bool `form:"pre_segmented_query,omitempty" json:"pre_segmented_query,omitempty"` - Prefix *string `form:"prefix,omitempty" json:"prefix,omitempty"` - Preset *string `form:"preset,omitempty" json:"preset,omitempty"` - PrioritizeExactMatch *bool `form:"prioritize_exact_match,omitempty" json:"prioritize_exact_match,omitempty"` - PrioritizeNumMatchingFields *bool `form:"prioritize_num_matching_fields,omitempty" json:"prioritize_num_matching_fields,omitempty"` - PrioritizeTokenPosition *bool `form:"prioritize_token_position,omitempty" json:"prioritize_token_position,omitempty"` - Q *string `form:"q,omitempty" json:"q,omitempty"` - QueryBy *string `form:"query_by,omitempty" json:"query_by,omitempty"` - QueryByWeights *string `form:"query_by_weights,omitempty" json:"query_by_weights,omitempty"` - RemoteEmbeddingNumTries *int `form:"remote_embedding_num_tries,omitempty" json:"remote_embedding_num_tries,omitempty"` - RemoteEmbeddingTimeoutMs *int `form:"remote_embedding_timeout_ms,omitempty" json:"remote_embedding_timeout_ms,omitempty"` - SearchCutoffMs *int `form:"search_cutoff_ms,omitempty" json:"search_cutoff_ms,omitempty"` - SnippetThreshold *int `form:"snippet_threshold,omitempty" json:"snippet_threshold,omitempty"` - SortBy *string `form:"sort_by,omitempty" json:"sort_by,omitempty"` - SplitJoinTokens *string `form:"split_join_tokens,omitempty" json:"split_join_tokens,omitempty"` - Stopwords *string `form:"stopwords,omitempty" json:"stopwords,omitempty"` - TextMatchType *string `form:"text_match_type,omitempty" json:"text_match_type,omitempty"` - TypoTokensThreshold *int `form:"typo_tokens_threshold,omitempty" json:"typo_tokens_threshold,omitempty"` - UseCache *bool `form:"use_cache,omitempty" json:"use_cache,omitempty"` - VectorQuery *string `form:"vector_query,omitempty" json:"vector_query,omitempty"` + CacheTtl *int `form:"cache_ttl,omitempty" json:"cache_ttl,omitempty"` + Conversation *bool `form:"conversation,omitempty" json:"conversation,omitempty"` + ConversationId *string `form:"conversation_id,omitempty" json:"conversation_id,omitempty"` + ConversationModelId *string `form:"conversation_model_id,omitempty" json:"conversation_model_id,omitempty"` + DropTokensMode *DropTokensMode `form:"drop_tokens_mode,omitempty" json:"drop_tokens_mode,omitempty"` + DropTokensThreshold *int `form:"drop_tokens_threshold,omitempty" json:"drop_tokens_threshold,omitempty"` + EnableHighlightV1 *bool `form:"enable_highlight_v1,omitempty" json:"enable_highlight_v1,omitempty"` + EnableOverrides *bool `form:"enable_overrides,omitempty" json:"enable_overrides,omitempty"` + EnableSynonyms *bool `form:"enable_synonyms,omitempty" json:"enable_synonyms,omitempty"` + EnableTyposForAlphaNumericalTokens *bool `form:"enable_typos_for_alpha_numerical_tokens,omitempty" json:"enable_typos_for_alpha_numerical_tokens,omitempty"` + EnableTyposForNumericalTokens *bool `form:"enable_typos_for_numerical_tokens,omitempty" json:"enable_typos_for_numerical_tokens,omitempty"` + ExcludeFields *string `form:"exclude_fields,omitempty" json:"exclude_fields,omitempty"` + ExhaustiveSearch *bool `form:"exhaustive_search,omitempty" json:"exhaustive_search,omitempty"` + FacetBy *string `form:"facet_by,omitempty" json:"facet_by,omitempty"` + FacetQuery *string `form:"facet_query,omitempty" json:"facet_query,omitempty"` + FacetReturnParent *string `form:"facet_return_parent,omitempty" json:"facet_return_parent,omitempty"` + FacetStrategy *string `form:"facet_strategy,omitempty" json:"facet_strategy,omitempty"` + FilterBy *string `form:"filter_by,omitempty" json:"filter_by,omitempty"` + FilterCuratedHits *bool `form:"filter_curated_hits,omitempty" json:"filter_curated_hits,omitempty"` + GroupBy *string `form:"group_by,omitempty" json:"group_by,omitempty"` + GroupLimit *int `form:"group_limit,omitempty" json:"group_limit,omitempty"` + GroupMissingValues *bool `form:"group_missing_values,omitempty" json:"group_missing_values,omitempty"` + HiddenHits *string `form:"hidden_hits,omitempty" json:"hidden_hits,omitempty"` + HighlightAffixNumTokens *int `form:"highlight_affix_num_tokens,omitempty" json:"highlight_affix_num_tokens,omitempty"` + HighlightEndTag *string `form:"highlight_end_tag,omitempty" json:"highlight_end_tag,omitempty"` + HighlightFields *string `form:"highlight_fields,omitempty" json:"highlight_fields,omitempty"` + HighlightFullFields *string `form:"highlight_full_fields,omitempty" json:"highlight_full_fields,omitempty"` + HighlightStartTag *string `form:"highlight_start_tag,omitempty" json:"highlight_start_tag,omitempty"` + IncludeFields *string `form:"include_fields,omitempty" json:"include_fields,omitempty"` + Infix *string `form:"infix,omitempty" json:"infix,omitempty"` + Limit *int `form:"limit,omitempty" json:"limit,omitempty"` + MaxCandidates *int `form:"max_candidates,omitempty" json:"max_candidates,omitempty"` + MaxExtraPrefix *int `form:"max_extra_prefix,omitempty" json:"max_extra_prefix,omitempty"` + MaxExtraSuffix *int `form:"max_extra_suffix,omitempty" json:"max_extra_suffix,omitempty"` + MaxFacetValues *int `form:"max_facet_values,omitempty" json:"max_facet_values,omitempty"` + MinLen1typo *int `form:"min_len_1typo,omitempty" json:"min_len_1typo,omitempty"` + MinLen2typo *int `form:"min_len_2typo,omitempty" json:"min_len_2typo,omitempty"` + NumTypos *string `form:"num_typos,omitempty" json:"num_typos,omitempty"` + Offset *int `form:"offset,omitempty" json:"offset,omitempty"` + OverrideTags *string `form:"override_tags,omitempty" json:"override_tags,omitempty"` + Page *int `form:"page,omitempty" json:"page,omitempty"` + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + PinnedHits *string `form:"pinned_hits,omitempty" json:"pinned_hits,omitempty"` + PreSegmentedQuery *bool `form:"pre_segmented_query,omitempty" json:"pre_segmented_query,omitempty"` + Prefix *string `form:"prefix,omitempty" json:"prefix,omitempty"` + Preset *string `form:"preset,omitempty" json:"preset,omitempty"` + PrioritizeExactMatch *bool `form:"prioritize_exact_match,omitempty" json:"prioritize_exact_match,omitempty"` + PrioritizeNumMatchingFields *bool `form:"prioritize_num_matching_fields,omitempty" json:"prioritize_num_matching_fields,omitempty"` + PrioritizeTokenPosition *bool `form:"prioritize_token_position,omitempty" json:"prioritize_token_position,omitempty"` + Q *string `form:"q,omitempty" json:"q,omitempty"` + QueryBy *string `form:"query_by,omitempty" json:"query_by,omitempty"` + QueryByWeights *string `form:"query_by_weights,omitempty" json:"query_by_weights,omitempty"` + RemoteEmbeddingNumTries *int `form:"remote_embedding_num_tries,omitempty" json:"remote_embedding_num_tries,omitempty"` + RemoteEmbeddingTimeoutMs *int `form:"remote_embedding_timeout_ms,omitempty" json:"remote_embedding_timeout_ms,omitempty"` + SearchCutoffMs *int `form:"search_cutoff_ms,omitempty" json:"search_cutoff_ms,omitempty"` + SnippetThreshold *int `form:"snippet_threshold,omitempty" json:"snippet_threshold,omitempty"` + SortBy *string `form:"sort_by,omitempty" json:"sort_by,omitempty"` + SplitJoinTokens *string `form:"split_join_tokens,omitempty" json:"split_join_tokens,omitempty"` + Stopwords *string `form:"stopwords,omitempty" json:"stopwords,omitempty"` + SynonymNumTypos *int `form:"synonym_num_typos,omitempty" json:"synonym_num_typos,omitempty"` + SynonymPrefix *bool `form:"synonym_prefix,omitempty" json:"synonym_prefix,omitempty"` + TextMatchType *string `form:"text_match_type,omitempty" json:"text_match_type,omitempty"` + TypoTokensThreshold *int `form:"typo_tokens_threshold,omitempty" json:"typo_tokens_threshold,omitempty"` + UseCache *bool `form:"use_cache,omitempty" json:"use_cache,omitempty"` + VectorQuery *string `form:"vector_query,omitempty" json:"vector_query,omitempty"` + VoiceQuery *string `form:"voice_query,omitempty" json:"voice_query,omitempty"` } // TakeSnapshotParams defines parameters for TakeSnapshot. @@ -1137,11 +1526,14 @@ type TakeSnapshotParams struct { // UpsertAliasJSONRequestBody defines body for UpsertAlias for application/json ContentType. type UpsertAliasJSONRequestBody = CollectionAliasSchema +// CreateAnalyticsEventJSONRequestBody defines body for CreateAnalyticsEvent for application/json ContentType. +type CreateAnalyticsEventJSONRequestBody = AnalyticsEventCreateSchema + // CreateAnalyticsRuleJSONRequestBody defines body for CreateAnalyticsRule for application/json ContentType. type CreateAnalyticsRuleJSONRequestBody = AnalyticsRuleSchema // UpsertAnalyticsRuleJSONRequestBody defines body for UpsertAnalyticsRule for application/json ContentType. -type UpsertAnalyticsRuleJSONRequestBody = AnalyticsRuleSchema +type UpsertAnalyticsRuleJSONRequestBody = AnalyticsRuleUpsertSchema // CreateCollectionJSONRequestBody defines body for CreateCollection for application/json ContentType. type CreateCollectionJSONRequestBody = CollectionSchema @@ -1164,6 +1556,12 @@ type UpsertSearchOverrideJSONRequestBody = SearchOverrideSchema // UpsertSearchSynonymJSONRequestBody defines body for UpsertSearchSynonym for application/json ContentType. type UpsertSearchSynonymJSONRequestBody = SearchSynonymSchema +// CreateConversationModelJSONRequestBody defines body for CreateConversationModel for application/json ContentType. +type CreateConversationModelJSONRequestBody = ConversationModelCreateSchema + +// UpdateConversationModelJSONRequestBody defines body for UpdateConversationModel for application/json ContentType. +type UpdateConversationModelJSONRequestBody = ConversationModelUpdateSchema + // CreateKeyJSONRequestBody defines body for CreateKey for application/json ContentType. type CreateKeyJSONRequestBody = ApiKeySchema diff --git a/typesense/client.go b/typesense/client.go index fe714c1..9b35eff 100644 --- a/typesense/client.go +++ b/typesense/client.go @@ -44,6 +44,14 @@ func (c *Client) Alias(aliasName string) AliasInterface { return &alias{apiClient: c.apiClient, name: aliasName} } +func (c *Client) Analytics() AnalyticsInterface { + return &analytics{apiClient: c.apiClient} +} + +func (c *Client) Conversations() ConversationsInterface { + return &conversations{apiClient: c.apiClient} +} + func (c *Client) Keys() KeysInterface { return &keys{apiClient: c.apiClient} } diff --git a/typesense/conversation_model.go b/typesense/conversation_model.go new file mode 100644 index 0000000..ce888b6 --- /dev/null +++ b/typesense/conversation_model.go @@ -0,0 +1,51 @@ +package typesense + +import ( + "context" + + "github.com/typesense/typesense-go/v2/typesense/api" +) + +type ConversationModelInterface interface { + Retrieve(ctx context.Context) (*api.ConversationModelSchema, error) + Update(ctx context.Context, schema *api.ConversationModelUpdateSchema) (*api.ConversationModelSchema, error) + Delete(ctx context.Context) (*api.ConversationModelSchema, error) +} + +type conversationModel struct { + apiClient APIClientInterface + modelId string +} + +func (c *conversationModel) Retrieve(ctx context.Context) (*api.ConversationModelSchema, error) { + response, err := c.apiClient.RetrieveConversationModelWithResponse(ctx, c.modelId) + if err != nil { + return nil, err + } + if response.JSON200 == nil { + return nil, &HTTPError{Status: response.StatusCode(), Body: response.Body} + } + return response.JSON200, nil +} + +func (c *conversationModel) Update(ctx context.Context, schema *api.ConversationModelUpdateSchema) (*api.ConversationModelSchema, error) { + response, err := c.apiClient.UpdateConversationModelWithResponse(ctx, c.modelId, api.UpdateConversationModelJSONRequestBody(*schema)) + if err != nil { + return nil, err + } + if response.JSON200 == nil { + return nil, &HTTPError{Status: response.StatusCode(), Body: response.Body} + } + return response.JSON200, nil +} + +func (c *conversationModel) Delete(ctx context.Context) (*api.ConversationModelSchema, error) { + response, err := c.apiClient.DeleteConversationModelWithResponse(ctx, c.modelId) + if err != nil { + return nil, err + } + if response.JSON200 == nil { + return nil, &HTTPError{Status: response.StatusCode(), Body: response.Body} + } + return response.JSON200, nil +} diff --git a/typesense/conversation_model_test.go b/typesense/conversation_model_test.go new file mode 100644 index 0000000..278f6d4 --- /dev/null +++ b/typesense/conversation_model_test.go @@ -0,0 +1,131 @@ +package typesense + +import ( + "context" + "encoding/json" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/typesense/typesense-go/v2/typesense/api" + "github.com/typesense/typesense-go/v2/typesense/api/pointer" +) + +func TestConversationModelRetrieve(t *testing.T) { + expectedData := &api.ConversationModelSchema{ + Id: "123", + ModelName: "cloudflare/@cf/mistral/mistral-7b-instruct-v0.1", + ApiKey: pointer.String("CLOUDFLARE_API_KEY"), + AccountId: pointer.String("CLOUDFLARE_ACCOUNT_ID"), + SystemPrompt: pointer.String("..."), + MaxBytes: 16384, + } + + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/conversations/models/123", http.MethodGet) + data := jsonEncode(t, expectedData) + w.Header().Set("Content-Type", "application/json") + w.Write(data) + }) + defer server.Close() + + res, err := client.Conversations().Model("123").Retrieve(context.Background()) + assert.NoError(t, err) + assert.Equal(t, expectedData, res) +} + +func TestConversationModelRetrieveOnHttpStatusErrorCodeReturnsError(t *testing.T) { + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/conversations/models/123", http.MethodGet) + w.WriteHeader(http.StatusConflict) + }) + defer server.Close() + + _, err := client.Conversations().Model("123").Retrieve(context.Background()) + assert.ErrorContains(t, err, "status: 409") +} + +func TestConversationModelUpdate(t *testing.T) { + model := &api.ConversationModelUpdateSchema{ + ModelName: pointer.String("cf/mistral/mistral-7b-instruct-v0.1"), + ApiKey: pointer.String("CLOUDFLARE_API_KEY"), + AccountId: pointer.String("CLOUDFLARE_ACCOUNT_ID"), + SystemPrompt: pointer.String("..."), + MaxBytes: pointer.Int(16384), + } + expectedData := &api.ConversationModelSchema{ + Id: "123", + ModelName: *model.ModelName, + ApiKey: model.ApiKey, + AccountId: model.AccountId, + SystemPrompt: model.SystemPrompt, + MaxBytes: *model.MaxBytes, + } + + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/conversations/models/123", http.MethodPut) + + var reqBody api.ConversationModelUpdateSchema + err := json.NewDecoder(r.Body).Decode(&reqBody) + + assert.NoError(t, err) + assert.Equal(t, model, &reqBody) + + data := jsonEncode(t, expectedData) + w.Header().Set("Content-Type", "application/json") + w.Write(data) + }) + defer server.Close() + + res, err := client.Conversations().Model("123").Update(context.Background(), model) + assert.NoError(t, err) + assert.Equal(t, expectedData, res) +} + +func TestConversationModelUpdateOnHttpStatusErrorCodeReturnsError(t *testing.T) { + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/conversations/models/123", http.MethodPut) + w.WriteHeader(http.StatusConflict) + }) + defer server.Close() + + _, err := client.Conversations().Model("123").Update(context.Background(), &api.ConversationModelUpdateSchema{}) + assert.ErrorContains(t, err, "status: 409") +} + +func TestConversationModelDelete(t *testing.T) { + expectedData := &api.ConversationModelSchema{ + Id: "123", + ModelName: "cf/mistral/mistral-7b-instruct-v0.1", + ApiKey: pointer.String("CLOUDFLARE_API_KEY"), + AccountId: pointer.String("CLOUDFLARE_ACCOUNT_ID"), + SystemPrompt: pointer.String("..."), + MaxBytes: 16384, + HistoryCollection: "conversation-store", + Ttl: pointer.Int(10000), + } + + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/conversations/models/123", http.MethodDelete) + + data := jsonEncode(t, expectedData) + w.Header().Set("Content-Type", "application/json") + w.Write(data) + }) + defer server.Close() + + res, err := client.Conversations().Model("123").Delete(context.Background()) + assert.NoError(t, err) + assert.Equal(t, expectedData, res) +} + +func TestConversationModelDeleteOnHttpStatusErrorCodeReturnsError(t *testing.T) { + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/conversations/models/123", http.MethodDelete) + w.WriteHeader(http.StatusConflict) + }) + defer server.Close() + + _, err := client.Conversations().Model("123").Delete(context.Background()) + assert.ErrorContains(t, err, "status: 409") +} diff --git a/typesense/conversation_models.go b/typesense/conversation_models.go new file mode 100644 index 0000000..bda1d89 --- /dev/null +++ b/typesense/conversation_models.go @@ -0,0 +1,40 @@ +package typesense + +import ( + "context" + + "github.com/typesense/typesense-go/v2/typesense/api" +) + +// ConversationModelsInterface is a type for ConversationModels API operations +type ConversationModelsInterface interface { + Create(ctx context.Context, schema *api.ConversationModelCreateSchema) (*api.ConversationModelSchema, error) + Retrieve(ctx context.Context) ([]*api.ConversationModelSchema, error) +} + +// conversationModels is internal implementation of ConversationModelsInterface +type conversationModels struct { + apiClient APIClientInterface +} + +func (c *conversationModels) Create(ctx context.Context, schema *api.ConversationModelCreateSchema) (*api.ConversationModelSchema, error) { + response, err := c.apiClient.CreateConversationModelWithResponse(ctx, api.CreateConversationModelJSONRequestBody(*schema)) + if err != nil { + return nil, err + } + if response.JSON200 == nil { + return nil, &HTTPError{Status: response.StatusCode(), Body: response.Body} + } + return response.JSON200, nil +} + +func (c *conversationModels) Retrieve(ctx context.Context) ([]*api.ConversationModelSchema, error) { + response, err := c.apiClient.RetrieveAllConversationModelsWithResponse(ctx) + if err != nil { + return nil, err + } + if response.JSON200 == nil { + return nil, &HTTPError{Status: response.StatusCode(), Body: response.Body} + } + return *response.JSON200, nil +} diff --git a/typesense/conversation_models_test.go b/typesense/conversation_models_test.go new file mode 100644 index 0000000..41f9236 --- /dev/null +++ b/typesense/conversation_models_test.go @@ -0,0 +1,100 @@ +package typesense + +import ( + "context" + "encoding/json" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/typesense/typesense-go/v2/typesense/api" + "github.com/typesense/typesense-go/v2/typesense/api/pointer" +) + +func TestConversationModelsRetrieve(t *testing.T) { + expectedData := []*api.ConversationModelSchema{{ + Id: "123", + ModelName: "cf/mistral/mistral-7b-instruct-v0.1", + ApiKey: pointer.String("CLOUDFLARE_API_KEY"), + AccountId: pointer.String("CLOUDFLARE_ACCOUNT_ID"), + SystemPrompt: pointer.String("..."), + MaxBytes: 16384, + HistoryCollection: "conversation-store", + Ttl: pointer.Int(10000), + }} + + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/conversations/models", http.MethodGet) + data := jsonEncode(t, expectedData) + w.Header().Set("Content-Type", "application/json") + w.Write(data) + }) + defer server.Close() + + res, err := client.Conversations().Models().Retrieve(context.Background()) + assert.NoError(t, err) + assert.Equal(t, expectedData, res) +} + +func TestConversationModelsRetrieveOnHttpStatusErrorCodeReturnsError(t *testing.T) { + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/conversations/models", http.MethodGet) + w.WriteHeader(http.StatusConflict) + }) + defer server.Close() + + _, err := client.Conversations().Models().Retrieve(context.Background()) + assert.ErrorContains(t, err, "status: 409") +} + +func TestConversationModelsCreate(t *testing.T) { + model := &api.ConversationModelCreateSchema{ + ModelName: "cf/mistral/mistral-7b-instruct-v0.1", + ApiKey: pointer.String("CLOUDFLARE_API_KEY"), + AccountId: pointer.String("CLOUDFLARE_ACCOUNT_ID"), + SystemPrompt: pointer.String("..."), + MaxBytes: 16384, + HistoryCollection: "conversation-store", + Ttl: pointer.Int(10000), + } + expectedData := &api.ConversationModelSchema{ + Id: "123", + ModelName: model.ModelName, + ApiKey: model.ApiKey, + AccountId: model.AccountId, + SystemPrompt: model.SystemPrompt, + MaxBytes: model.MaxBytes, + HistoryCollection: model.HistoryCollection, + Ttl: pointer.Int(10000), + } + + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/conversations/models", http.MethodPost) + + var reqBody api.ConversationModelCreateSchema + err := json.NewDecoder(r.Body).Decode(&reqBody) + + assert.NoError(t, err) + assert.Equal(t, model, &reqBody) + + data := jsonEncode(t, expectedData) + w.Header().Set("Content-Type", "application/json") + w.Write(data) + }) + defer server.Close() + + res, err := client.Conversations().Models().Create(context.Background(), model) + assert.NoError(t, err) + assert.Equal(t, expectedData, res) +} + +func TestConversationModelsCreateOnHttpStatusErrorCodeReturnsError(t *testing.T) { + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/conversations/models", http.MethodPost) + w.WriteHeader(http.StatusConflict) + }) + defer server.Close() + + _, err := client.Conversations().Models().Create(context.Background(), &api.ConversationModelCreateSchema{}) + assert.ErrorContains(t, err, "status: 409") +} diff --git a/typesense/conversations.go b/typesense/conversations.go new file mode 100644 index 0000000..a00c9f1 --- /dev/null +++ b/typesense/conversations.go @@ -0,0 +1,20 @@ +package typesense + +// ConversationsInterface is a type for Conversations API operations +type ConversationsInterface interface { + Models() ConversationModelsInterface + Model(modelId string) ConversationModelInterface +} + +// conversations is internal implementation of ConversationsInterface +type conversations struct { + apiClient APIClientInterface +} + +func (c *conversations) Models() ConversationModelsInterface { + return &conversationModels{apiClient: c.apiClient} +} + +func (c *conversations) Model(modelId string) ConversationModelInterface { + return &conversationModel{apiClient: c.apiClient, modelId: modelId} +} diff --git a/typesense/document.go b/typesense/document.go index 360f3de..e244d0f 100644 --- a/typesense/document.go +++ b/typesense/document.go @@ -5,11 +5,13 @@ import ( "encoding/json" "io" "strings" + + "github.com/typesense/typesense-go/v2/typesense/api" ) type DocumentInterface[T any] interface { Retrieve(ctx context.Context) (T, error) - Update(ctx context.Context, document any) (T, error) + Update(ctx context.Context, document any, params *api.DocumentIndexParameters) (T, error) Delete(ctx context.Context) (T, error) } @@ -39,9 +41,9 @@ func (d *document[T]) Retrieve(ctx context.Context) (resp T, err error) { return resp, nil } -func (d *document[T]) Update(ctx context.Context, document any) (resp T, err error) { +func (d *document[T]) Update(ctx context.Context, document any, params *api.DocumentIndexParameters) (resp T, err error) { response, err := d.apiClient.UpdateDocument(ctx, - d.collectionName, d.documentID, document) + d.collectionName, d.documentID, &api.UpdateDocumentParams{DirtyValues: params.DirtyValues}, document) if err != nil { return resp, err } diff --git a/typesense/document_test.go b/typesense/document_test.go index 6ee38bf..c968fb3 100644 --- a/typesense/document_test.go +++ b/typesense/document_test.go @@ -6,6 +6,8 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/typesense/typesense-go/v2/typesense/api" + "github.com/typesense/typesense-go/v2/typesense/api/pointer" "github.com/typesense/typesense-go/v2/typesense/mocks" "go.uber.org/mock/gomock" ) @@ -71,13 +73,13 @@ func TestDocumentUpdate(t *testing.T) { notNill := gomock.Not(gomock.Nil()) mockAPIClient.EXPECT(). - UpdateDocument(notNill, "companies", "123", expectedDocument). + UpdateDocument(notNill, "companies", "123", &api.UpdateDocumentParams{DirtyValues: pointer.Any(api.CoerceOrDrop)}, expectedDocument). Return(createResponse(200, "", mockedResult), nil). Times(1) client := NewClient(WithAPIClient(mockAPIClient)) document := createNewDocument() - result, err := client.Collection("companies").Document("123").Update(context.Background(), document) + result, err := client.Collection("companies").Document("123").Update(context.Background(), document, &api.DocumentIndexParameters{DirtyValues: pointer.Any(api.CoerceOrDrop)}) assert.Nil(t, err) assert.Equal(t, expectedResult, result) @@ -92,13 +94,13 @@ func TestDocumentUpdateOnApiClientErrorReturnsError(t *testing.T) { notNill := gomock.Not(gomock.Nil()) mockAPIClient.EXPECT(). - UpdateDocument(notNill, "companies", "123", expectedDocument). + UpdateDocument(notNill, "companies", "123", &api.UpdateDocumentParams{}, expectedDocument). Return(nil, errors.New("failed request")). Times(1) client := NewClient(WithAPIClient(mockAPIClient)) document := createNewDocument() - _, err := client.Collection("companies").Document("123").Update(context.Background(), document) + _, err := client.Collection("companies").Document("123").Update(context.Background(), document, &api.DocumentIndexParameters{}) assert.NotNil(t, err) } @@ -111,13 +113,13 @@ func TestDocumentUpdateOnHttpStatusErrorCodeReturnsError(t *testing.T) { notNill := gomock.Not(gomock.Nil()) mockAPIClient.EXPECT(). - UpdateDocument(notNill, "companies", "123", expectedDocument). + UpdateDocument(notNill, "companies", "123", &api.UpdateDocumentParams{}, expectedDocument). Return(createResponse(500, "Internal server error", nil), nil). Times(1) client := NewClient(WithAPIClient(mockAPIClient)) document := createNewDocument() - _, err := client.Collection("companies").Document("123").Update(context.Background(), document) + _, err := client.Collection("companies").Document("123").Update(context.Background(), document, &api.DocumentIndexParameters{}) assert.NotNil(t, err) } diff --git a/typesense/documents.go b/typesense/documents.go index 9d8134e..8bfd56c 100644 --- a/typesense/documents.go +++ b/typesense/documents.go @@ -9,29 +9,27 @@ import ( "net/http" "github.com/typesense/typesense-go/v2/typesense/api" + "github.com/typesense/typesense-go/v2/typesense/api/pointer" ) -var upsertAction api.IndexDocumentParamsAction = "upsert" - const ( defaultImportBatchSize = 40 - defaultImportAction = "create" ) // DocumentsInterface is a type for Documents API operations type DocumentsInterface interface { // Create returns indexed document - Create(ctx context.Context, document interface{}) (map[string]interface{}, error) + Create(ctx context.Context, document interface{}, params *api.DocumentIndexParameters) (map[string]interface{}, error) // Update updates documents matching the filter_by condition Update(ctx context.Context, updateFields interface{}, params *api.UpdateDocumentsParams) (int, error) // Upsert returns indexed/updated document - Upsert(context.Context, interface{}) (map[string]interface{}, error) + Upsert(ctx context.Context, document interface{}, params *api.DocumentIndexParameters) (map[string]interface{}, error) // Delete returns number of deleted documents Delete(ctx context.Context, filter *api.DeleteDocumentsParams) (int, error) // Search performs document search in collection Search(ctx context.Context, params *api.SearchCollectionParams) (*api.SearchResult, error) // Export returns all documents from index in jsonl format - Export(ctx context.Context) (io.ReadCloser, error) + Export(ctx context.Context, params *api.ExportDocumentsParams) (io.ReadCloser, error) // Import returns json array. Each item of the response indicates // the result of each document present in the request body (in the same order). Import(ctx context.Context, documents []interface{}, params *api.ImportDocumentsParams) ([]*api.ImportDocumentResponse, error) @@ -59,8 +57,8 @@ func (d *documents) indexDocument(ctx context.Context, document interface{}, par return *response.JSON201, nil } -func (d *documents) Create(ctx context.Context, document interface{}) (map[string]interface{}, error) { - return d.indexDocument(ctx, document, &api.IndexDocumentParams{}) +func (d *documents) Create(ctx context.Context, document interface{}, params *api.DocumentIndexParameters) (map[string]interface{}, error) { + return d.indexDocument(ctx, document, &api.IndexDocumentParams{DirtyValues: params.DirtyValues}) } func (d *documents) Update(ctx context.Context, updateFields interface{}, params *api.UpdateDocumentsParams) (int, error) { @@ -75,8 +73,8 @@ func (d *documents) Update(ctx context.Context, updateFields interface{}, params return response.JSON200.NumUpdated, nil } -func (d *documents) Upsert(ctx context.Context, document interface{}) (map[string]interface{}, error) { - return d.indexDocument(ctx, document, &api.IndexDocumentParams{Action: &upsertAction}) +func (d *documents) Upsert(ctx context.Context, document interface{}, params *api.DocumentIndexParameters) (map[string]interface{}, error) { + return d.indexDocument(ctx, document, &api.IndexDocumentParams{Action: pointer.Any(api.Upsert), DirtyValues: params.DirtyValues}) } func (d *documents) Delete(ctx context.Context, filter *api.DeleteDocumentsParams) (int, error) { @@ -103,8 +101,8 @@ func (d *documents) Search(ctx context.Context, params *api.SearchCollectionPara return response.JSON200, nil } -func (d *documents) Export(ctx context.Context) (io.ReadCloser, error) { - response, err := d.apiClient.ExportDocuments(ctx, d.collectionName, &api.ExportDocumentsParams{}) +func (d *documents) Export(ctx context.Context, params *api.ExportDocumentsParams) (io.ReadCloser, error) { + response, err := d.apiClient.ExportDocuments(ctx, d.collectionName, params) if err != nil { return nil, err } @@ -122,8 +120,7 @@ func initImportParams(params *api.ImportDocumentsParams) { *params.BatchSize = defaultImportBatchSize } if params.Action == nil { - params.Action = new(string) - *params.Action = defaultImportAction + params.Action = pointer.Any(api.Create) } } diff --git a/typesense/documents_test.go b/typesense/documents_test.go index b8e09c0..dbd6150 100644 --- a/typesense/documents_test.go +++ b/typesense/documents_test.go @@ -4,7 +4,6 @@ import ( "context" "errors" "io" - "io/ioutil" "net/http" "strings" "testing" @@ -54,7 +53,7 @@ func TestDocumentCreate(t *testing.T) { mockedResult := createNewDocumentResponse() notNill := gomock.Not(gomock.Nil()) - indexParams := &api.IndexDocumentParams{} + indexParams := &api.IndexDocumentParams{DirtyValues: pointer.Any(api.CoerceOrDrop)} mockAPIClient.EXPECT(). IndexDocumentWithResponse(notNill, "companies", indexParams, expectedDocument). Return(&api.IndexDocumentResponse{ @@ -64,7 +63,7 @@ func TestDocumentCreate(t *testing.T) { client := NewClient(WithAPIClient(mockAPIClient)) document := createNewDocument() - result, err := client.Collection("companies").Documents().Create(context.Background(), document) + result, err := client.Collection("companies").Documents().Create(context.Background(), document, &api.DocumentIndexParameters{DirtyValues: pointer.Any(api.CoerceOrDrop)}) assert.Nil(t, err) assert.Equal(t, expectedResult, result) @@ -85,7 +84,7 @@ func TestDocumentCreateOnApiClientErrorReturnsError(t *testing.T) { Times(1) client := NewClient(WithAPIClient(mockAPIClient)) - _, err := client.Collection("companies").Documents().Create(context.Background(), newDocument) + _, err := client.Collection("companies").Documents().Create(context.Background(), newDocument, &api.DocumentIndexParameters{}) assert.NotNil(t, err) } @@ -109,7 +108,7 @@ func TestDocumentCreateOnHttpStatusErrorCodeReturnsError(t *testing.T) { Times(1) client := NewClient(WithAPIClient(mockAPIClient)) - _, err := client.Collection("companies").Documents().Create(context.Background(), newDocument) + _, err := client.Collection("companies").Documents().Create(context.Background(), newDocument, &api.DocumentIndexParameters{}) assert.NotNil(t, err) } @@ -123,7 +122,7 @@ func TestDocumentUpsert(t *testing.T) { mockedResult := createNewDocumentResponse() notNill := gomock.Not(gomock.Nil()) - indexParams := &api.IndexDocumentParams{Action: &upsertAction} + indexParams := &api.IndexDocumentParams{Action: pointer.Any(api.Upsert), DirtyValues: pointer.Any(api.CoerceOrDrop)} mockAPIClient.EXPECT(). IndexDocumentWithResponse(notNill, "companies", indexParams, newDocument). Return(&api.IndexDocumentResponse{ @@ -132,7 +131,7 @@ func TestDocumentUpsert(t *testing.T) { Times(1) client := NewClient(WithAPIClient(mockAPIClient)) - result, err := client.Collection("companies").Documents().Upsert(context.Background(), newDocument) + result, err := client.Collection("companies").Documents().Upsert(context.Background(), newDocument, &api.DocumentIndexParameters{DirtyValues: pointer.Any(api.CoerceOrDrop)}) assert.Nil(t, err) assert.Equal(t, expectedResult, result) @@ -146,14 +145,14 @@ func TestDocumentUpsertOnApiClientErrorReturnsError(t *testing.T) { mockAPIClient := mocks.NewMockAPIClientInterface(ctrl) notNill := gomock.Not(gomock.Nil()) - indexParams := &api.IndexDocumentParams{Action: &upsertAction} + indexParams := &api.IndexDocumentParams{Action: pointer.Any(api.Upsert)} mockAPIClient.EXPECT(). IndexDocumentWithResponse(notNill, "companies", indexParams, newDocument). Return(nil, errors.New("failed request")). Times(1) client := NewClient(WithAPIClient(mockAPIClient)) - _, err := client.Collection("companies").Documents().Upsert(context.Background(), newDocument) + _, err := client.Collection("companies").Documents().Upsert(context.Background(), newDocument, &api.DocumentIndexParameters{}) assert.NotNil(t, err) } @@ -165,7 +164,7 @@ func TestDocumentUpsertOnHttpStatusErrorCodeReturnsError(t *testing.T) { mockAPIClient := mocks.NewMockAPIClientInterface(ctrl) notNill := gomock.Not(gomock.Nil()) - indexParams := &api.IndexDocumentParams{Action: &upsertAction} + indexParams := &api.IndexDocumentParams{Action: pointer.Any(api.Upsert)} mockAPIClient.EXPECT(). IndexDocumentWithResponse(notNill, "companies", indexParams, newDocument). Return(&api.IndexDocumentResponse{ @@ -177,7 +176,7 @@ func TestDocumentUpsertOnHttpStatusErrorCodeReturnsError(t *testing.T) { Times(1) client := NewClient(WithAPIClient(mockAPIClient)) - _, err := client.Collection("companies").Documents().Upsert(context.Background(), newDocument) + _, err := client.Collection("companies").Documents().Upsert(context.Background(), newDocument, &api.DocumentIndexParameters{}) assert.NotNil(t, err) } @@ -277,11 +276,11 @@ func TestDocumentsDeleteOnHttpStatusErrorCodeReturnsError(t *testing.T) { } func createDocumentStream() io.ReadCloser { - return ioutil.NopCloser(strings.NewReader(`{"id": "125","company_name":"Future Technology","num_employees":1232,"country":"UK"}`)) + return io.NopCloser(strings.NewReader(`{"id": "125","company_name":"Future Technology","num_employees":1232,"country":"UK"}`)) } func TestDocumentsExport(t *testing.T) { - expectedBytes, err := ioutil.ReadAll(createDocumentStream()) + expectedBytes, err := io.ReadAll(createDocumentStream()) assert.Nil(t, err) ctrl := gomock.NewController(t) @@ -298,10 +297,10 @@ func TestDocumentsExport(t *testing.T) { Times(1) client := NewClient(WithAPIClient(mockAPIClient)) - result, err := client.Collection("companies").Documents().Export(context.Background()) + result, err := client.Collection("companies").Documents().Export(context.Background(), &api.ExportDocumentsParams{}) assert.Nil(t, err) - resultBytes, err := ioutil.ReadAll(result) + resultBytes, err := io.ReadAll(result) assert.Nil(t, err) assert.Equal(t, string(expectedBytes), string(resultBytes)) } @@ -317,7 +316,7 @@ func TestDocumentsExportOnApiClientErrorReturnsError(t *testing.T) { Times(1) client := NewClient(WithAPIClient(mockAPIClient)) - _, err := client.Collection("companies").Documents().Export(context.Background()) + _, err := client.Collection("companies").Documents().Export(context.Background(), &api.ExportDocumentsParams{}) assert.NotNil(t, err) } @@ -330,11 +329,59 @@ func TestDocumentsExportOnHttpStatusErrorCodeReturnsError(t *testing.T) { ExportDocuments(gomock.Not(gomock.Nil()), "companies", &api.ExportDocumentsParams{}). Return(&http.Response{ StatusCode: http.StatusInternalServerError, - Body: ioutil.NopCloser(strings.NewReader("Internal server error")), + Body: io.NopCloser(strings.NewReader("Internal server error")), }, nil). Times(1) client := NewClient(WithAPIClient(mockAPIClient)) - _, err := client.Collection("companies").Documents().Export(context.Background()) + _, err := client.Collection("companies").Documents().Export(context.Background(), &api.ExportDocumentsParams{}) assert.NotNil(t, err) } + +func TestSingleCollectionSearchRAG(t *testing.T) { + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/collections/test/documents/search?conversation=true&conversation_id=123&conversation_model_id=conv-1&q=can+you+suggest", http.MethodGet) + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(` + { + "conversation": { + "answer": "Based on the context provided,...", + "conversation_history": [ + { + "user": "can you suggest an action series" + }, + { + "assistant": "Based on the context provided,..." + } + ], + "conversation_id": "abc", + "query": "can you suggest" + } + }`)) + }) + defer server.Close() + + res, err := client.Collection("test").Documents().Search(context.Background(), &api.SearchCollectionParams{ + Q: pointer.String("can you suggest"), + Conversation: pointer.True(), + ConversationModelId: pointer.String("conv-1"), + ConversationId: pointer.String("123"), + }) + + assert.NoError(t, err) + assert.Equal(t, &api.SearchResult{ + Conversation: &api.SearchResultConversation{ + Answer: "Based on the context provided,...", + ConversationHistory: []map[string]interface{}{ + { + "user": "can you suggest an action series", + }, + { + "assistant": "Based on the context provided,...", + }, + }, + ConversationId: "abc", + Query: "can you suggest", + }, + }, res) +} diff --git a/typesense/import_test.go b/typesense/import_test.go index 0aaf30a..4459f2a 100644 --- a/typesense/import_test.go +++ b/typesense/import_test.go @@ -5,7 +5,6 @@ import ( "context" "errors" "io" - "io/ioutil" "net/http" "reflect" "strings" @@ -23,7 +22,7 @@ type eqReaderMatcher struct { } func eqReader(r io.Reader) gomock.Matcher { - allBytes, err := ioutil.ReadAll(r) + allBytes, err := io.ReadAll(r) if err != nil { panic(err) } @@ -35,7 +34,7 @@ func (m *eqReaderMatcher) Matches(x interface{}) bool { return false } r := x.(io.Reader) - allBytes, err := ioutil.ReadAll(r) + allBytes, err := io.ReadAll(r) if err != nil { panic(err) } @@ -48,7 +47,7 @@ func (m *eqReaderMatcher) String() string { func TestDocumentsImportWithOneDocument(t *testing.T) { expectedParams := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } expectedBody := strings.NewReader(`{"id":"123","companyName":"Stark Industries","numEmployees":5215,"country":"USA"}` + "\n") @@ -66,7 +65,7 @@ func TestDocumentsImportWithOneDocument(t *testing.T) { "companies", expectedParams, "application/octet-stream", eqReader(expectedBody)). Return(&http.Response{ StatusCode: http.StatusOK, - Body: ioutil.NopCloser(strings.NewReader(expectedResultString)), + Body: io.NopCloser(strings.NewReader(expectedResultString)), }, nil). Times(1) @@ -75,7 +74,7 @@ func TestDocumentsImportWithOneDocument(t *testing.T) { createNewDocument(), } params := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } result, err := client.Collection("companies").Documents().Import(context.Background(), documents, params) @@ -91,7 +90,7 @@ func TestDocumentsImportWithEmptyListReturnsError(t *testing.T) { client := NewClient(WithAPIClient(mockAPIClient)) params := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } documents := []interface{}{} @@ -101,7 +100,7 @@ func TestDocumentsImportWithEmptyListReturnsError(t *testing.T) { func TestDocumentsImportWithOneDocumentAndInvalidResultJsonReturnsError(t *testing.T) { expectedParams := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } expectedBody := strings.NewReader(`{"id":"123","companyName":"Stark Industries","numEmployees":5215,"country":"USA"}` + "\n") @@ -116,7 +115,7 @@ func TestDocumentsImportWithOneDocumentAndInvalidResultJsonReturnsError(t *testi "companies", expectedParams, "application/octet-stream", eqReader(expectedBody)). Return(&http.Response{ StatusCode: http.StatusOK, - Body: ioutil.NopCloser(strings.NewReader(expectedResultString)), + Body: io.NopCloser(strings.NewReader(expectedResultString)), }, nil). Times(1) @@ -125,7 +124,7 @@ func TestDocumentsImportWithOneDocumentAndInvalidResultJsonReturnsError(t *testi createNewDocument(), } params := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } _, err := client.Collection("companies").Documents().Import(context.Background(), documents, params) @@ -139,7 +138,7 @@ func TestDocumentsImportWithInvalidInputDataReturnsError(t *testing.T) { client := NewClient(WithAPIClient(mockAPIClient)) params := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } documents := []interface{}{ @@ -162,7 +161,7 @@ func TestDocumentsImportOnApiClientErrorReturnsError(t *testing.T) { client := NewClient(WithAPIClient(mockAPIClient)) params := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } documents := []interface{}{ @@ -182,13 +181,13 @@ func TestDocumentsImportOnHttpStatusErrorCodeReturnsError(t *testing.T) { "companies", gomock.Any(), "application/octet-stream", gomock.Any()). Return(&http.Response{ StatusCode: http.StatusInternalServerError, - Body: ioutil.NopCloser(strings.NewReader("Internal server error")), + Body: io.NopCloser(strings.NewReader("Internal server error")), }, nil). Times(1) client := NewClient(WithAPIClient(mockAPIClient)) params := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } documents := []interface{}{ @@ -200,7 +199,7 @@ func TestDocumentsImportOnHttpStatusErrorCodeReturnsError(t *testing.T) { func TestDocumentsImportWithTwoDocuments(t *testing.T) { expectedParams := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } expectedBody := strings.NewReader(`{"id":"123","companyName":"Stark Industries","numEmployees":5215,"country":"USA"}` + @@ -220,7 +219,7 @@ func TestDocumentsImportWithTwoDocuments(t *testing.T) { "companies", expectedParams, "application/octet-stream", eqReader(expectedBody)). Return(&http.Response{ StatusCode: http.StatusOK, - Body: ioutil.NopCloser(strings.NewReader(expectedResultString)), + Body: io.NopCloser(strings.NewReader(expectedResultString)), }, nil). Times(1) @@ -230,7 +229,7 @@ func TestDocumentsImportWithTwoDocuments(t *testing.T) { createNewDocument("125"), } params := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } result, err := client.Collection("companies").Documents().Import(context.Background(), documents, params) @@ -241,7 +240,7 @@ func TestDocumentsImportWithTwoDocuments(t *testing.T) { func TestDocumentsImportWithActionOnly(t *testing.T) { expectedParams := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(defaultImportBatchSize), } @@ -254,7 +253,7 @@ func TestDocumentsImportWithActionOnly(t *testing.T) { "companies", expectedParams, "application/octet-stream", gomock.Any()). Return(&http.Response{ StatusCode: http.StatusOK, - Body: ioutil.NopCloser(strings.NewReader(`{"success": true}`)), + Body: io.NopCloser(strings.NewReader(`{"success": true}`)), }, nil). Times(1) @@ -263,7 +262,7 @@ func TestDocumentsImportWithActionOnly(t *testing.T) { createNewDocument(), } params := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), } _, err := client.Collection("companies").Documents().Import(context.Background(), documents, params) assert.Nil(t, err) @@ -271,7 +270,7 @@ func TestDocumentsImportWithActionOnly(t *testing.T) { func TestDocumentsImportWithBatchSizeOnly(t *testing.T) { expectedParams := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(10), } @@ -284,7 +283,7 @@ func TestDocumentsImportWithBatchSizeOnly(t *testing.T) { "companies", expectedParams, "application/octet-stream", gomock.Any()). Return(&http.Response{ StatusCode: http.StatusOK, - Body: ioutil.NopCloser(strings.NewReader(`{"success": true}`)), + Body: io.NopCloser(strings.NewReader(`{"success": true}`)), }, nil). Times(1) @@ -302,7 +301,7 @@ func TestDocumentsImportWithBatchSizeOnly(t *testing.T) { func TestDocumentsImportJsonl(t *testing.T) { expectedBytes := []byte(`{"success": true}`) expectedParams := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } expectedBody := createDocumentStream() @@ -316,20 +315,20 @@ func TestDocumentsImportJsonl(t *testing.T) { "companies", expectedParams, "application/octet-stream", eqReader(expectedBody)). Return(&http.Response{ StatusCode: http.StatusOK, - Body: ioutil.NopCloser(bytes.NewBuffer(expectedBytes)), + Body: io.NopCloser(bytes.NewBuffer(expectedBytes)), }, nil). Times(1) client := NewClient(WithAPIClient(mockAPIClient)) params := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } importBody := createDocumentStream() result, err := client.Collection("companies").Documents().ImportJsonl(context.Background(), importBody, params) assert.Nil(t, err) - resultBytes, err := ioutil.ReadAll(result) + resultBytes, err := io.ReadAll(result) assert.Nil(t, err) assert.Equal(t, string(expectedBytes), string(resultBytes)) } @@ -347,7 +346,7 @@ func TestDocumentsImportJsonlOnApiClientErrorReturnsError(t *testing.T) { client := NewClient(WithAPIClient(mockAPIClient)) params := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } importBody := createDocumentStream() @@ -365,13 +364,13 @@ func TestDocumentsImportJsonlOnHttpStatusErrorCodeReturnsError(t *testing.T) { gomock.Any(), gomock.Any(), "application/octet-stream", gomock.Any()). Return(&http.Response{ StatusCode: http.StatusInternalServerError, - Body: ioutil.NopCloser(strings.NewReader("Internal server error")), + Body: io.NopCloser(strings.NewReader("Internal server error")), }, nil). Times(1) client := NewClient(WithAPIClient(mockAPIClient)) params := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(40), } importBody := createDocumentStream() @@ -381,7 +380,7 @@ func TestDocumentsImportJsonlOnHttpStatusErrorCodeReturnsError(t *testing.T) { func TestDocumentsImportJsonlWithActionOnly(t *testing.T) { expectedParams := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(defaultImportBatchSize), } @@ -394,13 +393,13 @@ func TestDocumentsImportJsonlWithActionOnly(t *testing.T) { "companies", expectedParams, "application/octet-stream", gomock.Any()). Return(&http.Response{ StatusCode: http.StatusOK, - Body: ioutil.NopCloser(strings.NewReader(`{"success": true}`)), + Body: io.NopCloser(strings.NewReader(`{"success": true}`)), }, nil). Times(1) client := NewClient(WithAPIClient(mockAPIClient)) params := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), } importBody := createDocumentStream() _, err := client.Collection("companies").Documents().ImportJsonl(context.Background(), importBody, params) @@ -409,7 +408,7 @@ func TestDocumentsImportJsonlWithActionOnly(t *testing.T) { func TestDocumentsImportJsonlWithBatchSizeOnly(t *testing.T) { expectedParams := &api.ImportDocumentsParams{ - Action: pointer.String("create"), + Action: pointer.Any(api.Create), BatchSize: pointer.Int(10), } @@ -422,7 +421,7 @@ func TestDocumentsImportJsonlWithBatchSizeOnly(t *testing.T) { "companies", expectedParams, "application/octet-stream", gomock.Any()). Return(&http.Response{ StatusCode: http.StatusOK, - Body: ioutil.NopCloser(strings.NewReader(`{"success": true}`)), + Body: io.NopCloser(strings.NewReader(`{"success": true}`)), }, nil). Times(1) diff --git a/typesense/mocks/mock_client.go b/typesense/mocks/mock_client.go index 5d66b65..d24b223 100644 --- a/typesense/mocks/mock_client.go +++ b/typesense/mocks/mock_client.go @@ -42,6 +42,86 @@ func (m *MockAPIClientInterface) EXPECT() *MockAPIClientInterfaceMockRecorder { return m.recorder } +// CreateAnalyticsEvent mocks base method. +func (m *MockAPIClientInterface) CreateAnalyticsEvent(ctx context.Context, body api.CreateAnalyticsEventJSONRequestBody, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "CreateAnalyticsEvent", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateAnalyticsEvent indicates an expected call of CreateAnalyticsEvent. +func (mr *MockAPIClientInterfaceMockRecorder) CreateAnalyticsEvent(ctx, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateAnalyticsEvent", reflect.TypeOf((*MockAPIClientInterface)(nil).CreateAnalyticsEvent), varargs...) +} + +// CreateAnalyticsEventWithBody mocks base method. +func (m *MockAPIClientInterface) CreateAnalyticsEventWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, contentType, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "CreateAnalyticsEventWithBody", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateAnalyticsEventWithBody indicates an expected call of CreateAnalyticsEventWithBody. +func (mr *MockAPIClientInterfaceMockRecorder) CreateAnalyticsEventWithBody(ctx, contentType, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, contentType, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateAnalyticsEventWithBody", reflect.TypeOf((*MockAPIClientInterface)(nil).CreateAnalyticsEventWithBody), varargs...) +} + +// CreateAnalyticsEventWithBodyWithResponse mocks base method. +func (m *MockAPIClientInterface) CreateAnalyticsEventWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*api.CreateAnalyticsEventResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, contentType, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "CreateAnalyticsEventWithBodyWithResponse", varargs...) + ret0, _ := ret[0].(*api.CreateAnalyticsEventResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateAnalyticsEventWithBodyWithResponse indicates an expected call of CreateAnalyticsEventWithBodyWithResponse. +func (mr *MockAPIClientInterfaceMockRecorder) CreateAnalyticsEventWithBodyWithResponse(ctx, contentType, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, contentType, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateAnalyticsEventWithBodyWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).CreateAnalyticsEventWithBodyWithResponse), varargs...) +} + +// CreateAnalyticsEventWithResponse mocks base method. +func (m *MockAPIClientInterface) CreateAnalyticsEventWithResponse(ctx context.Context, body api.CreateAnalyticsEventJSONRequestBody, reqEditors ...api.RequestEditorFn) (*api.CreateAnalyticsEventResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "CreateAnalyticsEventWithResponse", varargs...) + ret0, _ := ret[0].(*api.CreateAnalyticsEventResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateAnalyticsEventWithResponse indicates an expected call of CreateAnalyticsEventWithResponse. +func (mr *MockAPIClientInterfaceMockRecorder) CreateAnalyticsEventWithResponse(ctx, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateAnalyticsEventWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).CreateAnalyticsEventWithResponse), varargs...) +} + // CreateAnalyticsRule mocks base method. func (m *MockAPIClientInterface) CreateAnalyticsRule(ctx context.Context, body api.CreateAnalyticsRuleJSONRequestBody, reqEditors ...api.RequestEditorFn) (*http.Response, error) { m.ctrl.T.Helper() @@ -202,6 +282,86 @@ func (mr *MockAPIClientInterfaceMockRecorder) CreateCollectionWithResponse(ctx, return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateCollectionWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).CreateCollectionWithResponse), varargs...) } +// CreateConversationModel mocks base method. +func (m *MockAPIClientInterface) CreateConversationModel(ctx context.Context, body api.CreateConversationModelJSONRequestBody, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "CreateConversationModel", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateConversationModel indicates an expected call of CreateConversationModel. +func (mr *MockAPIClientInterfaceMockRecorder) CreateConversationModel(ctx, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateConversationModel", reflect.TypeOf((*MockAPIClientInterface)(nil).CreateConversationModel), varargs...) +} + +// CreateConversationModelWithBody mocks base method. +func (m *MockAPIClientInterface) CreateConversationModelWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, contentType, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "CreateConversationModelWithBody", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateConversationModelWithBody indicates an expected call of CreateConversationModelWithBody. +func (mr *MockAPIClientInterfaceMockRecorder) CreateConversationModelWithBody(ctx, contentType, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, contentType, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateConversationModelWithBody", reflect.TypeOf((*MockAPIClientInterface)(nil).CreateConversationModelWithBody), varargs...) +} + +// CreateConversationModelWithBodyWithResponse mocks base method. +func (m *MockAPIClientInterface) CreateConversationModelWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*api.CreateConversationModelResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, contentType, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "CreateConversationModelWithBodyWithResponse", varargs...) + ret0, _ := ret[0].(*api.CreateConversationModelResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateConversationModelWithBodyWithResponse indicates an expected call of CreateConversationModelWithBodyWithResponse. +func (mr *MockAPIClientInterfaceMockRecorder) CreateConversationModelWithBodyWithResponse(ctx, contentType, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, contentType, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateConversationModelWithBodyWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).CreateConversationModelWithBodyWithResponse), varargs...) +} + +// CreateConversationModelWithResponse mocks base method. +func (m *MockAPIClientInterface) CreateConversationModelWithResponse(ctx context.Context, body api.CreateConversationModelJSONRequestBody, reqEditors ...api.RequestEditorFn) (*api.CreateConversationModelResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "CreateConversationModelWithResponse", varargs...) + ret0, _ := ret[0].(*api.CreateConversationModelResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateConversationModelWithResponse indicates an expected call of CreateConversationModelWithResponse. +func (mr *MockAPIClientInterfaceMockRecorder) CreateConversationModelWithResponse(ctx, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateConversationModelWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).CreateConversationModelWithResponse), varargs...) +} + // CreateKey mocks base method. func (m *MockAPIClientInterface) CreateKey(ctx context.Context, body api.CreateKeyJSONRequestBody, reqEditors ...api.RequestEditorFn) (*http.Response, error) { m.ctrl.T.Helper() @@ -442,6 +602,46 @@ func (mr *MockAPIClientInterfaceMockRecorder) DeleteCollectionWithResponse(ctx, return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteCollectionWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).DeleteCollectionWithResponse), varargs...) } +// DeleteConversationModel mocks base method. +func (m *MockAPIClientInterface) DeleteConversationModel(ctx context.Context, modelId string, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, modelId} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "DeleteConversationModel", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DeleteConversationModel indicates an expected call of DeleteConversationModel. +func (mr *MockAPIClientInterfaceMockRecorder) DeleteConversationModel(ctx, modelId any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, modelId}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteConversationModel", reflect.TypeOf((*MockAPIClientInterface)(nil).DeleteConversationModel), varargs...) +} + +// DeleteConversationModelWithResponse mocks base method. +func (m *MockAPIClientInterface) DeleteConversationModelWithResponse(ctx context.Context, modelId string, reqEditors ...api.RequestEditorFn) (*api.DeleteConversationModelResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, modelId} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "DeleteConversationModelWithResponse", varargs...) + ret0, _ := ret[0].(*api.DeleteConversationModelResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DeleteConversationModelWithResponse indicates an expected call of DeleteConversationModelWithResponse. +func (mr *MockAPIClientInterfaceMockRecorder) DeleteConversationModelWithResponse(ctx, modelId any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, modelId}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteConversationModelWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).DeleteConversationModelWithResponse), varargs...) +} + // DeleteDocument mocks base method. func (m *MockAPIClientInterface) DeleteDocument(ctx context.Context, collectionName, documentId string, reqEditors ...api.RequestEditorFn) (*http.Response, error) { m.ctrl.T.Helper() @@ -1482,6 +1682,46 @@ func (mr *MockAPIClientInterfaceMockRecorder) RetrieveAPIStatsWithResponse(ctx a return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RetrieveAPIStatsWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).RetrieveAPIStatsWithResponse), varargs...) } +// RetrieveAllConversationModels mocks base method. +func (m *MockAPIClientInterface) RetrieveAllConversationModels(ctx context.Context, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "RetrieveAllConversationModels", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RetrieveAllConversationModels indicates an expected call of RetrieveAllConversationModels. +func (mr *MockAPIClientInterfaceMockRecorder) RetrieveAllConversationModels(ctx any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RetrieveAllConversationModels", reflect.TypeOf((*MockAPIClientInterface)(nil).RetrieveAllConversationModels), varargs...) +} + +// RetrieveAllConversationModelsWithResponse mocks base method. +func (m *MockAPIClientInterface) RetrieveAllConversationModelsWithResponse(ctx context.Context, reqEditors ...api.RequestEditorFn) (*api.RetrieveAllConversationModelsResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "RetrieveAllConversationModelsWithResponse", varargs...) + ret0, _ := ret[0].(*api.RetrieveAllConversationModelsResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RetrieveAllConversationModelsWithResponse indicates an expected call of RetrieveAllConversationModelsWithResponse. +func (mr *MockAPIClientInterfaceMockRecorder) RetrieveAllConversationModelsWithResponse(ctx any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RetrieveAllConversationModelsWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).RetrieveAllConversationModelsWithResponse), varargs...) +} + // RetrieveAllPresets mocks base method. func (m *MockAPIClientInterface) RetrieveAllPresets(ctx context.Context, reqEditors ...api.RequestEditorFn) (*http.Response, error) { m.ctrl.T.Helper() @@ -1602,6 +1842,46 @@ func (mr *MockAPIClientInterfaceMockRecorder) RetrieveAnalyticsRulesWithResponse return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RetrieveAnalyticsRulesWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).RetrieveAnalyticsRulesWithResponse), varargs...) } +// RetrieveConversationModel mocks base method. +func (m *MockAPIClientInterface) RetrieveConversationModel(ctx context.Context, modelId string, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, modelId} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "RetrieveConversationModel", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RetrieveConversationModel indicates an expected call of RetrieveConversationModel. +func (mr *MockAPIClientInterfaceMockRecorder) RetrieveConversationModel(ctx, modelId any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, modelId}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RetrieveConversationModel", reflect.TypeOf((*MockAPIClientInterface)(nil).RetrieveConversationModel), varargs...) +} + +// RetrieveConversationModelWithResponse mocks base method. +func (m *MockAPIClientInterface) RetrieveConversationModelWithResponse(ctx context.Context, modelId string, reqEditors ...api.RequestEditorFn) (*api.RetrieveConversationModelResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, modelId} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "RetrieveConversationModelWithResponse", varargs...) + ret0, _ := ret[0].(*api.RetrieveConversationModelResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RetrieveConversationModelWithResponse indicates an expected call of RetrieveConversationModelWithResponse. +func (mr *MockAPIClientInterfaceMockRecorder) RetrieveConversationModelWithResponse(ctx, modelId any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, modelId}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RetrieveConversationModelWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).RetrieveConversationModelWithResponse), varargs...) +} + // RetrieveMetrics mocks base method. func (m *MockAPIClientInterface) RetrieveMetrics(ctx context.Context, reqEditors ...api.RequestEditorFn) (*http.Response, error) { m.ctrl.T.Helper() @@ -1922,10 +2202,90 @@ func (mr *MockAPIClientInterfaceMockRecorder) UpdateCollectionWithResponse(ctx, return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateCollectionWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).UpdateCollectionWithResponse), varargs...) } +// UpdateConversationModel mocks base method. +func (m *MockAPIClientInterface) UpdateConversationModel(ctx context.Context, modelId string, body api.UpdateConversationModelJSONRequestBody, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, modelId, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "UpdateConversationModel", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateConversationModel indicates an expected call of UpdateConversationModel. +func (mr *MockAPIClientInterfaceMockRecorder) UpdateConversationModel(ctx, modelId, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, modelId, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateConversationModel", reflect.TypeOf((*MockAPIClientInterface)(nil).UpdateConversationModel), varargs...) +} + +// UpdateConversationModelWithBody mocks base method. +func (m *MockAPIClientInterface) UpdateConversationModelWithBody(ctx context.Context, modelId, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, modelId, contentType, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "UpdateConversationModelWithBody", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateConversationModelWithBody indicates an expected call of UpdateConversationModelWithBody. +func (mr *MockAPIClientInterfaceMockRecorder) UpdateConversationModelWithBody(ctx, modelId, contentType, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, modelId, contentType, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateConversationModelWithBody", reflect.TypeOf((*MockAPIClientInterface)(nil).UpdateConversationModelWithBody), varargs...) +} + +// UpdateConversationModelWithBodyWithResponse mocks base method. +func (m *MockAPIClientInterface) UpdateConversationModelWithBodyWithResponse(ctx context.Context, modelId, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*api.UpdateConversationModelResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, modelId, contentType, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "UpdateConversationModelWithBodyWithResponse", varargs...) + ret0, _ := ret[0].(*api.UpdateConversationModelResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateConversationModelWithBodyWithResponse indicates an expected call of UpdateConversationModelWithBodyWithResponse. +func (mr *MockAPIClientInterfaceMockRecorder) UpdateConversationModelWithBodyWithResponse(ctx, modelId, contentType, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, modelId, contentType, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateConversationModelWithBodyWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).UpdateConversationModelWithBodyWithResponse), varargs...) +} + +// UpdateConversationModelWithResponse mocks base method. +func (m *MockAPIClientInterface) UpdateConversationModelWithResponse(ctx context.Context, modelId string, body api.UpdateConversationModelJSONRequestBody, reqEditors ...api.RequestEditorFn) (*api.UpdateConversationModelResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, modelId, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "UpdateConversationModelWithResponse", varargs...) + ret0, _ := ret[0].(*api.UpdateConversationModelResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateConversationModelWithResponse indicates an expected call of UpdateConversationModelWithResponse. +func (mr *MockAPIClientInterfaceMockRecorder) UpdateConversationModelWithResponse(ctx, modelId, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, modelId, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateConversationModelWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).UpdateConversationModelWithResponse), varargs...) +} + // UpdateDocument mocks base method. -func (m *MockAPIClientInterface) UpdateDocument(ctx context.Context, collectionName, documentId string, body api.UpdateDocumentJSONRequestBody, reqEditors ...api.RequestEditorFn) (*http.Response, error) { +func (m *MockAPIClientInterface) UpdateDocument(ctx context.Context, collectionName, documentId string, params *api.UpdateDocumentParams, body api.UpdateDocumentJSONRequestBody, reqEditors ...api.RequestEditorFn) (*http.Response, error) { m.ctrl.T.Helper() - varargs := []any{ctx, collectionName, documentId, body} + varargs := []any{ctx, collectionName, documentId, params, body} for _, a := range reqEditors { varargs = append(varargs, a) } @@ -1936,16 +2296,16 @@ func (m *MockAPIClientInterface) UpdateDocument(ctx context.Context, collectionN } // UpdateDocument indicates an expected call of UpdateDocument. -func (mr *MockAPIClientInterfaceMockRecorder) UpdateDocument(ctx, collectionName, documentId, body any, reqEditors ...any) *gomock.Call { +func (mr *MockAPIClientInterfaceMockRecorder) UpdateDocument(ctx, collectionName, documentId, params, body any, reqEditors ...any) *gomock.Call { mr.mock.ctrl.T.Helper() - varargs := append([]any{ctx, collectionName, documentId, body}, reqEditors...) + varargs := append([]any{ctx, collectionName, documentId, params, body}, reqEditors...) return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateDocument", reflect.TypeOf((*MockAPIClientInterface)(nil).UpdateDocument), varargs...) } // UpdateDocumentWithBody mocks base method. -func (m *MockAPIClientInterface) UpdateDocumentWithBody(ctx context.Context, collectionName, documentId, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*http.Response, error) { +func (m *MockAPIClientInterface) UpdateDocumentWithBody(ctx context.Context, collectionName, documentId string, params *api.UpdateDocumentParams, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*http.Response, error) { m.ctrl.T.Helper() - varargs := []any{ctx, collectionName, documentId, contentType, body} + varargs := []any{ctx, collectionName, documentId, params, contentType, body} for _, a := range reqEditors { varargs = append(varargs, a) } @@ -1956,16 +2316,16 @@ func (m *MockAPIClientInterface) UpdateDocumentWithBody(ctx context.Context, col } // UpdateDocumentWithBody indicates an expected call of UpdateDocumentWithBody. -func (mr *MockAPIClientInterfaceMockRecorder) UpdateDocumentWithBody(ctx, collectionName, documentId, contentType, body any, reqEditors ...any) *gomock.Call { +func (mr *MockAPIClientInterfaceMockRecorder) UpdateDocumentWithBody(ctx, collectionName, documentId, params, contentType, body any, reqEditors ...any) *gomock.Call { mr.mock.ctrl.T.Helper() - varargs := append([]any{ctx, collectionName, documentId, contentType, body}, reqEditors...) + varargs := append([]any{ctx, collectionName, documentId, params, contentType, body}, reqEditors...) return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateDocumentWithBody", reflect.TypeOf((*MockAPIClientInterface)(nil).UpdateDocumentWithBody), varargs...) } // UpdateDocumentWithBodyWithResponse mocks base method. -func (m *MockAPIClientInterface) UpdateDocumentWithBodyWithResponse(ctx context.Context, collectionName, documentId, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*api.UpdateDocumentResponse, error) { +func (m *MockAPIClientInterface) UpdateDocumentWithBodyWithResponse(ctx context.Context, collectionName, documentId string, params *api.UpdateDocumentParams, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*api.UpdateDocumentResponse, error) { m.ctrl.T.Helper() - varargs := []any{ctx, collectionName, documentId, contentType, body} + varargs := []any{ctx, collectionName, documentId, params, contentType, body} for _, a := range reqEditors { varargs = append(varargs, a) } @@ -1976,16 +2336,16 @@ func (m *MockAPIClientInterface) UpdateDocumentWithBodyWithResponse(ctx context. } // UpdateDocumentWithBodyWithResponse indicates an expected call of UpdateDocumentWithBodyWithResponse. -func (mr *MockAPIClientInterfaceMockRecorder) UpdateDocumentWithBodyWithResponse(ctx, collectionName, documentId, contentType, body any, reqEditors ...any) *gomock.Call { +func (mr *MockAPIClientInterfaceMockRecorder) UpdateDocumentWithBodyWithResponse(ctx, collectionName, documentId, params, contentType, body any, reqEditors ...any) *gomock.Call { mr.mock.ctrl.T.Helper() - varargs := append([]any{ctx, collectionName, documentId, contentType, body}, reqEditors...) + varargs := append([]any{ctx, collectionName, documentId, params, contentType, body}, reqEditors...) return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateDocumentWithBodyWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).UpdateDocumentWithBodyWithResponse), varargs...) } // UpdateDocumentWithResponse mocks base method. -func (m *MockAPIClientInterface) UpdateDocumentWithResponse(ctx context.Context, collectionName, documentId string, body api.UpdateDocumentJSONRequestBody, reqEditors ...api.RequestEditorFn) (*api.UpdateDocumentResponse, error) { +func (m *MockAPIClientInterface) UpdateDocumentWithResponse(ctx context.Context, collectionName, documentId string, params *api.UpdateDocumentParams, body api.UpdateDocumentJSONRequestBody, reqEditors ...api.RequestEditorFn) (*api.UpdateDocumentResponse, error) { m.ctrl.T.Helper() - varargs := []any{ctx, collectionName, documentId, body} + varargs := []any{ctx, collectionName, documentId, params, body} for _, a := range reqEditors { varargs = append(varargs, a) } @@ -1996,9 +2356,9 @@ func (m *MockAPIClientInterface) UpdateDocumentWithResponse(ctx context.Context, } // UpdateDocumentWithResponse indicates an expected call of UpdateDocumentWithResponse. -func (mr *MockAPIClientInterfaceMockRecorder) UpdateDocumentWithResponse(ctx, collectionName, documentId, body any, reqEditors ...any) *gomock.Call { +func (mr *MockAPIClientInterfaceMockRecorder) UpdateDocumentWithResponse(ctx, collectionName, documentId, params, body any, reqEditors ...any) *gomock.Call { mr.mock.ctrl.T.Helper() - varargs := append([]any{ctx, collectionName, documentId, body}, reqEditors...) + varargs := append([]any{ctx, collectionName, documentId, params, body}, reqEditors...) return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateDocumentWithResponse", reflect.TypeOf((*MockAPIClientInterface)(nil).UpdateDocumentWithResponse), varargs...) } diff --git a/typesense/multi_search_test.go b/typesense/multi_search_test.go index da019e6..08a7afc 100644 --- a/typesense/multi_search_test.go +++ b/typesense/multi_search_test.go @@ -293,3 +293,53 @@ func TestMultiSearchOnApiClientError(t *testing.T) { _, err := client.MultiSearch.Perform(context.Background(), params, newMultiSearchBodyParams()) assert.NotNil(t, err) } + +func TestMultiSearchRAG(t *testing.T) { + server, client := newTestServerAndClient(func(w http.ResponseWriter, r *http.Request) { + validateRequestMetadata(t, r, "/multi_search?conversation=true&conversation_id=123&conversation_model_id=conv-1&q=can+you+suggest", http.MethodPost) + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(` + { + "conversation": { + "answer": "Based on the context provided,...", + "conversation_history": [ + { + "user": "can you suggest an action series" + }, + { + "assistant": "Based on the context provided,..." + } + ], + "conversation_id": "abc", + "query": "can you suggest" + } + }`)) + }) + defer server.Close() + + res, err := client.MultiSearch.Perform(context.Background(), &api.MultiSearchParams{ + Q: pointer.String("can you suggest"), + Conversation: pointer.True(), + ConversationModelId: pointer.String("conv-1"), + ConversationId: pointer.String("123"), + }, api.MultiSearchSearchesParameter{ + Searches: newMultiSearchBodyParams().Searches, + }) + + assert.NoError(t, err) + assert.Equal(t, &api.MultiSearchResult{ + Conversation: &api.SearchResultConversation{ + Answer: "Based on the context provided,...", + ConversationHistory: []map[string]interface{}{ + { + "user": "can you suggest an action series", + }, + { + "assistant": "Based on the context provided,...", + }, + }, + ConversationId: "abc", + Query: "can you suggest", + }, + }, res) +} diff --git a/typesense/overrides_test.go b/typesense/overrides_test.go index 3278fb1..14318a0 100644 --- a/typesense/overrides_test.go +++ b/typesense/overrides_test.go @@ -18,8 +18,8 @@ import ( func createNewSearchOverrideSchema() *api.SearchOverrideSchema { return &api.SearchOverrideSchema{ Rule: api.SearchOverrideRule{ - Query: "apple", - Match: "exact", + Query: pointer.String("apple"), + Match: pointer.Any(api.Exact), }, Includes: &[]api.SearchOverrideInclude{ { diff --git a/typesense/test/analytics_events_test.go b/typesense/test/analytics_events_test.go new file mode 100644 index 0000000..2757338 --- /dev/null +++ b/typesense/test/analytics_events_test.go @@ -0,0 +1,31 @@ +//go:build integration +// +build integration + +package test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + "github.com/typesense/typesense-go/v2/typesense/api" +) + +func TestAnalyticsEventsCreate(t *testing.T) { + eventName := newUUIDName("event") + collectionName := createNewCollection(t, "analytics-rules-collection") + createNewAnalyticsRule(t, collectionName, eventName) + + result, err := typesenseClient.Analytics().Events().Create(context.Background(), &api.AnalyticsEventCreateSchema{ + Type: "click", + Name: eventName, + Data: map[string]interface{}{ + "q": "nike shoes", + "doc_id": "1024", + "user_id": "111112", + }, + }) + + require.NoError(t, err) + require.True(t, result.Ok) +} diff --git a/typesense/test/analytics_rule_test.go b/typesense/test/analytics_rule_test.go new file mode 100644 index 0000000..b948012 --- /dev/null +++ b/typesense/test/analytics_rule_test.go @@ -0,0 +1,33 @@ +//go:build integration +// +build integration + +package test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestAnalyticsRuleRetrieve(t *testing.T) { + eventName := newUUIDName("event") + collectionName := createNewCollection(t, "analytics-rules-collection") + expectedRule := createNewAnalyticsRule(t, collectionName, eventName) + + result, err := typesenseClient.Analytics().Rule(expectedRule.Name).Retrieve(context.Background()) + + require.NoError(t, err) + require.Equal(t, expectedRule, result) +} + +func TestAnalyticsRuleDelete(t *testing.T) { + eventName := newUUIDName("event") + collectionName := createNewCollection(t, "analytics-rules-collection") + expectedRule := createNewAnalyticsRule(t, collectionName, eventName) + + result, err := typesenseClient.Analytics().Rule(expectedRule.Name).Delete(context.Background()) + + require.NoError(t, err) + require.Equal(t, expectedRule.Name, result.Name) +} diff --git a/typesense/test/analytics_rules_test.go b/typesense/test/analytics_rules_test.go new file mode 100644 index 0000000..6606b8d --- /dev/null +++ b/typesense/test/analytics_rules_test.go @@ -0,0 +1,47 @@ +//go:build integration +// +build integration + +package test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + "github.com/typesense/typesense-go/v2/typesense/api" +) + +func TestAnalyticsRulesUpsert(t *testing.T) { + collectionName := createNewCollection(t, "analytics-rules-collection") + eventName := newUUIDName("event") + ruleSchema := newAnalyticsRuleUpsertSchema(collectionName, eventName) + ruleName := newUUIDName("test-rule") + expectedData := newAnalyticsRule(ruleName, collectionName, eventName) + + result, err := typesenseClient.Analytics().Rules().Upsert(context.Background(), ruleName, ruleSchema) + + require.NoError(t, err) + require.Equal(t, expectedData, result) +} + +func TestAnalyticsRulesRetrieve(t *testing.T) { + collectionName := createNewCollection(t, "analytics-rules-collection") + eventName := newUUIDName("event") + expectedRule := createNewAnalyticsRule(t, collectionName, eventName) + + results, err := typesenseClient.Analytics().Rules().Retrieve(context.Background()) + + require.NoError(t, err) + require.True(t, len(results) >= 1, "number of rules is invalid") + + var result *api.AnalyticsRuleSchema + for _, rule := range results { + if rule.Name == expectedRule.Name { + result = rule + break + } + } + + require.NotNil(t, result, "rule not found") + require.Equal(t, expectedRule, result) +} diff --git a/typesense/test/dbhelpers_test.go b/typesense/test/dbhelpers_test.go index 8fea813..dde658b 100644 --- a/typesense/test/dbhelpers_test.go +++ b/typesense/test/dbhelpers_test.go @@ -56,6 +56,8 @@ func expectedNewCollection(name string) *api.CollectionResponse { Locale: pointer.String(""), Sort: pointer.False(), Drop: nil, + Store: pointer.True(), + Stem: pointer.False(), }, { Name: "num_employees", @@ -67,6 +69,8 @@ func expectedNewCollection(name string) *api.CollectionResponse { Locale: pointer.String(""), Sort: pointer.True(), Drop: nil, + Store: pointer.True(), + Stem: pointer.False(), }, { Name: "country", @@ -78,6 +82,8 @@ func expectedNewCollection(name string) *api.CollectionResponse { Locale: pointer.String(""), Sort: pointer.False(), Drop: nil, + Store: pointer.True(), + Stem: pointer.False(), }, }, EnableNestedFields: pointer.False(), @@ -187,17 +193,11 @@ func newKey() *api.ApiKey { type newSearchOverrideSchemaOption func(*api.SearchOverrideSchema) -func withOverrideRuleMatch(match api.SearchOverrideRuleMatch) newSearchOverrideSchemaOption { - return func(o *api.SearchOverrideSchema) { - o.Rule.Match = match - } -} - func newSearchOverrideSchema() *api.SearchOverrideSchema { schema := &api.SearchOverrideSchema{ Rule: api.SearchOverrideRule{ - Query: "apple", - Match: "exact", + Query: pointer.String("apple"), + Match: pointer.Any(api.Exact), }, Includes: &[]api.SearchOverrideInclude{ { @@ -215,6 +215,8 @@ func newSearchOverrideSchema() *api.SearchOverrideSchema { }, }, RemoveMatchedTokens: pointer.True(), + FilterCuratedHits: pointer.False(), + StopProcessing: pointer.True(), } return schema @@ -224,8 +226,8 @@ func newSearchOverride(overrideID string) *api.SearchOverride { return &api.SearchOverride{ Id: pointer.String(overrideID), Rule: api.SearchOverrideRule{ - Query: "apple", - Match: "exact", + Query: pointer.String("apple"), + Match: pointer.Any(api.Exact), }, Includes: &[]api.SearchOverrideInclude{ { @@ -243,6 +245,8 @@ func newSearchOverride(overrideID string) *api.SearchOverride { }, }, RemoveMatchedTokens: pointer.True(), + FilterCuratedHits: pointer.False(), + StopProcessing: pointer.True(), } } @@ -322,6 +326,53 @@ func newPresetFromMultiSearchSearchesParameter(presetName string) *api.PresetSch return preset } +func newAnalyticsRuleUpsertSchema(collectionName string, eventName string) *api.AnalyticsRuleUpsertSchema { + return &api.AnalyticsRuleUpsertSchema{ + Type: "counter", + Params: api.AnalyticsRuleParameters{ + Source: api.AnalyticsRuleParametersSource{ + Collections: []string{"products"}, + Events: &[]struct { + Name string "json:\"name\"" + Type string "json:\"type\"" + Weight float32 "json:\"weight\"" + }{ + {Type: "click", Weight: 1, Name: eventName}, + }, + }, + Destination: api.AnalyticsRuleParametersDestination{ + Collection: collectionName, + CounterField: pointer.String("num_employees"), + }, + Limit: pointer.Int(9999), + }, + } +} + +func newAnalyticsRule(ruleName string, collectionName string, eventName string) *api.AnalyticsRuleSchema { + return &api.AnalyticsRuleSchema{ + Name: ruleName, + Type: "counter", + Params: api.AnalyticsRuleParameters{ + Source: api.AnalyticsRuleParametersSource{ + Collections: []string{"products"}, + Events: &[]struct { + Name string "json:\"name\"" + Type string "json:\"type\"" + Weight float32 "json:\"weight\"" + }{ + {Type: "click", Weight: 1, Name: eventName}, + }, + }, + Destination: api.AnalyticsRuleParametersDestination{ + Collection: collectionName, + CounterField: pointer.String("num_employees"), + }, + Limit: pointer.Int(9999), + }, + } +} + func createNewCollection(t *testing.T, namePrefix string) string { t.Helper() collectionName := newUUIDName(namePrefix) @@ -334,7 +385,7 @@ func createNewCollection(t *testing.T, namePrefix string) string { func createDocument(t *testing.T, collectionName string, document *testDocument) { t.Helper() - _, err := typesenseClient.Collection(collectionName).Documents().Create(context.Background(), document) + _, err := typesenseClient.Collection(collectionName).Documents().Create(context.Background(), document, &api.DocumentIndexParameters{}) require.NoError(t, err) } @@ -363,6 +414,17 @@ func createNewPreset(t *testing.T, presetValueIsFromSearchParameters ...bool) (s return presetName, result } +func createNewAnalyticsRule(t *testing.T, collectionName string, eventName string) *api.AnalyticsRuleSchema { + t.Helper() + ruleSchema := newAnalyticsRuleUpsertSchema(collectionName, eventName) + ruleName := newUUIDName("test-rule") + + result, err := typesenseClient.Analytics().Rules().Upsert(context.Background(), ruleName, ruleSchema) + + require.NoError(t, err) + return result +} + func retrieveDocuments(t *testing.T, collectionName string, docIDs ...string) []map[string]interface{} { results := make([]map[string]interface{}, len(docIDs)) for i, docID := range docIDs { diff --git a/typesense/test/document_test.go b/typesense/test/document_test.go index ce21af1..77516b9 100644 --- a/typesense/test/document_test.go +++ b/typesense/test/document_test.go @@ -9,6 +9,7 @@ import ( "github.com/stretchr/testify/require" "github.com/typesense/typesense-go/v2/typesense" + "github.com/typesense/typesense-go/v2/typesense/api" ) func TestDocumentRetrieveGeneric(t *testing.T) { @@ -41,7 +42,7 @@ func TestDocumentUpdate(t *testing.T) { createDocument(t, collectionName, document) document.CompanyName = newCompanyName - typesenseClient.Collection(collectionName).Document("123").Update(context.Background(), document) + typesenseClient.Collection(collectionName).Document("123").Update(context.Background(), document, &api.DocumentIndexParameters{}) result, err := typesenseClient.Collection(collectionName).Document("123").Retrieve(context.Background()) diff --git a/typesense/test/documents_test.go b/typesense/test/documents_test.go index f13769a..53cb3e2 100644 --- a/typesense/test/documents_test.go +++ b/typesense/test/documents_test.go @@ -19,7 +19,7 @@ func TestDocumentCreate(t *testing.T) { expectedResult := newDocumentResponse("123") document := newDocument("123") - result, err := typesenseClient.Collection(collectionName).Documents().Create(context.Background(), document) + result, err := typesenseClient.Collection(collectionName).Documents().Create(context.Background(), document, &api.DocumentIndexParameters{}) require.NoError(t, err) require.Equal(t, expectedResult, result) @@ -35,7 +35,7 @@ func TestDocumentUpsertNewDocument(t *testing.T) { expectedResult := newDocumentResponse("123") document := newDocument("123") - result, err := typesenseClient.Collection(collectionName).Documents().Upsert(context.Background(), document) + result, err := typesenseClient.Collection(collectionName).Documents().Upsert(context.Background(), document, &api.DocumentIndexParameters{}) require.NoError(t, err) require.Equal(t, expectedResult, result) @@ -52,12 +52,12 @@ func TestDocumentUpsertExistingDocument(t *testing.T) { expectedResult := newDocumentResponse("123", withResponseCompanyName(newCompanyName)) document := newDocument("123") - _, err := typesenseClient.Collection(collectionName).Documents().Create(context.Background(), document) + _, err := typesenseClient.Collection(collectionName).Documents().Create(context.Background(), document, &api.DocumentIndexParameters{}) require.NoError(t, err) document.CompanyName = newCompanyName - result, err := typesenseClient.Collection(collectionName).Documents().Upsert(context.Background(), document) + result, err := typesenseClient.Collection(collectionName).Documents().Upsert(context.Background(), document, &api.DocumentIndexParameters{}) require.NoError(t, err) require.Equal(t, expectedResult, result) @@ -73,12 +73,12 @@ func TestDocumentsDelete(t *testing.T) { document := newDocument("123") document.NumEmployees = 5000 - _, err := typesenseClient.Collection(collectionName).Documents().Create(context.Background(), document) + _, err := typesenseClient.Collection(collectionName).Documents().Create(context.Background(), document, &api.DocumentIndexParameters{}) require.NoError(t, err) document = newDocument("124") document.NumEmployees = 7000 - _, err = typesenseClient.Collection(collectionName).Documents().Create(context.Background(), document) + _, err = typesenseClient.Collection(collectionName).Documents().Create(context.Background(), document, &api.DocumentIndexParameters{}) require.NoError(t, err) filter := &api.DeleteDocumentsParams{FilterBy: pointer.String("num_employees:>6500"), BatchSize: pointer.Int(100)} @@ -106,7 +106,7 @@ func TestDocumentsExport(t *testing.T) { createDocument(t, collectionName, newDocument("125", withCompanyName("Company2"))) createDocument(t, collectionName, newDocument("127", withCompanyName("Company3"))) - body, err := typesenseClient.Collection(collectionName).Documents().Export(context.Background()) + body, err := typesenseClient.Collection(collectionName).Documents().Export(context.Background(), &api.ExportDocumentsParams{}) require.NoError(t, err) defer body.Close() diff --git a/typesense/test/import_test.go b/typesense/test/import_test.go index 6ba8fe5..c30a21a 100644 --- a/typesense/test/import_test.go +++ b/typesense/test/import_test.go @@ -29,7 +29,7 @@ func TestDocumentsImport(t *testing.T) { newDocument("127", withCompanyName("Company3")), } - params := &api.ImportDocumentsParams{Action: pointer.String("create")} + params := &api.ImportDocumentsParams{Action: pointer.Any(api.Create)} responses, err := typesenseClient.Collection(collectionName).Documents().Import(context.Background(), documents, params) require.NoError(t, err) @@ -58,7 +58,7 @@ func TestDocumentsImportJsonl(t *testing.T) { require.NoError(t, buffer.WriteByte('\n')) require.NoError(t, je.Encode(newDocument("127", withCompanyName("Company3")))) - params := &api.ImportDocumentsParams{Action: pointer.String("create")} + params := &api.ImportDocumentsParams{Action: pointer.Any(api.Create)} responses, err := typesenseClient.Collection(collectionName).Documents().ImportJsonl(context.Background(), &buffer, params) require.NoError(t, err) diff --git a/typesense/test/multi_search_test.go b/typesense/test/multi_search_test.go index bfda00f..de17e96 100644 --- a/typesense/test/multi_search_test.go +++ b/typesense/test/multi_search_test.go @@ -23,7 +23,7 @@ func TestMultiSearch(t *testing.T) { newDocument("131", withCompanyName("Stark Industries 5"), withNumEmployees(1000)), } - params := &api.ImportDocumentsParams{Action: pointer.String("create")} + params := &api.ImportDocumentsParams{Action: pointer.Any(api.Create)} _, err := typesenseClient.Collection(collectionName1).Documents().Import(context.Background(), documents, params) require.NoError(t, err) @@ -95,7 +95,7 @@ func TestMultiSearchGroupBy(t *testing.T) { newDocument("3", withCompanyName("Company 4"), withNumEmployees(500), withCountry("England")), } - params := &api.ImportDocumentsParams{Action: pointer.String("create")} + params := &api.ImportDocumentsParams{Action: pointer.Any(api.Create)} _, err := typesenseClient.Collection(collectionName1).Documents().Import(context.Background(), documents, params) require.NoError(t, err) @@ -174,7 +174,7 @@ func TestMultiSearchVectorQuery(t *testing.T) { Vec: []float32{0.45, 0.222, 0.021, 0.1323}, } - _, err = typesenseClient.Collection("embeddings").Documents().Create(context.Background(), vecDoc) + _, err = typesenseClient.Collection("embeddings").Documents().Create(context.Background(), vecDoc, &api.DocumentIndexParameters{}) require.NoError(t, err) searchParams := &api.MultiSearchParams{} @@ -207,7 +207,7 @@ func TestMultiSearchWithPreset(t *testing.T) { newDocument("131", withCompanyName("Stark Industries 5"), withNumEmployees(1000)), } - params := &api.ImportDocumentsParams{Action: pointer.String("create")} + params := &api.ImportDocumentsParams{Action: pointer.Any(api.Create)} _, err := typesenseClient.Collection(collectionName1).Documents().Import(context.Background(), documents, params) require.NoError(t, err) @@ -288,7 +288,7 @@ func TestMultiSearchWithStopwords(t *testing.T) { newDocument("129", withCompanyName("Stark Industries 4"), withNumEmployees(1500)), } - params := &api.ImportDocumentsParams{Action: pointer.String("create")} + params := &api.ImportDocumentsParams{Action: pointer.Any(api.Create)} _, err := typesenseClient.Collection(collectionName1).Documents().Import(context.Background(), documents, params) require.NoError(t, err) diff --git a/typesense/test/overrides_test.go b/typesense/test/overrides_test.go index 14e6abf..54cb7a0 100644 --- a/typesense/test/overrides_test.go +++ b/typesense/test/overrides_test.go @@ -10,6 +10,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/typesense/typesense-go/v2/typesense/api" + "github.com/typesense/typesense-go/v2/typesense/api/pointer" ) func TestSearchOverrideUpsertNewOverride(t *testing.T) { @@ -33,14 +34,14 @@ func TestSearchOverrideUpsertExistingOverride(t *testing.T) { collectionName := createNewCollection(t, "companies") overrideID := newUUIDName("customize-apple") expectedResult := newSearchOverride(overrideID) - expectedResult.Rule.Match = "contains" + expectedResult.Rule.Match = pointer.Any(api.Contains) body := newSearchOverrideSchema() - body.Rule.Match = "exact" + body.Rule.Match = pointer.Any(api.Exact) _, err := typesenseClient.Collection(collectionName).Overrides().Upsert(context.Background(), overrideID, body) require.NoError(t, err) - body.Rule.Match = "contains" + body.Rule.Match = pointer.Any(api.Contains) result, err := typesenseClient.Collection(collectionName).Overrides().Upsert(context.Background(), overrideID, body) diff --git a/typesense/test/search_test.go b/typesense/test/search_test.go index 4578105..1c13d33 100644 --- a/typesense/test/search_test.go +++ b/typesense/test/search_test.go @@ -22,7 +22,7 @@ func TestCollectionSearch(t *testing.T) { newDocument("131", withCompanyName("Stark Industries 5"), withNumEmployees(1000)), } - params := &api.ImportDocumentsParams{Action: pointer.String("create")} + params := &api.ImportDocumentsParams{Action: pointer.Any(api.Create)} _, err := typesenseClient.Collection(collectionName).Documents().Import(context.Background(), documents, params) require.NoError(t, err) @@ -67,7 +67,7 @@ func TestCollectionSearchRange(t *testing.T) { newDocument("129", withCompanyName("Stark Industries 4"), withNumEmployees(500)), } - params := &api.ImportDocumentsParams{Action: pointer.String("create")} + params := &api.ImportDocumentsParams{Action: pointer.Any(api.Create)} _, err := typesenseClient.Collection(collectionName).Documents().Import(context.Background(), documents, params) require.NoError(t, err) @@ -131,7 +131,7 @@ func TestCollectionGroupByStringArray(t *testing.T) { }, } - params := &api.ImportDocumentsParams{Action: pointer.String("create")} + params := &api.ImportDocumentsParams{Action: pointer.Any(api.Create)} _, err = typesenseClient.Collection(collectionName).Documents().Import(context.Background(), documents, params) require.NoError(t, err) @@ -160,7 +160,7 @@ func TestCollectionSearchWithPreset(t *testing.T) { newDocument("131", withCompanyName("Stark Industries 5"), withNumEmployees(1000)), } - params := &api.ImportDocumentsParams{Action: pointer.String("create")} + params := &api.ImportDocumentsParams{Action: pointer.Any(api.Create)} _, err := typesenseClient.Collection(collectionName).Documents().Import(context.Background(), documents, params) require.NoError(t, err) @@ -214,7 +214,7 @@ func TestCollectionSearchWithStopwords(t *testing.T) { newDocument("129", withCompanyName("Stark Industries 4"), withNumEmployees(2000)), } - params := &api.ImportDocumentsParams{Action: pointer.String("create")} + params := &api.ImportDocumentsParams{Action: pointer.Any(api.Create)} _, err := typesenseClient.Collection(collectionName).Documents().Import(context.Background(), documents, params) require.NoError(t, err)