From a0b9b62e17feb8ffe4ea0677bfc669e0446656ac Mon Sep 17 00:00:00 2001 From: jaeyson Date: Sun, 12 Apr 2026 09:15:38 +0800 Subject: [PATCH 01/13] fix test case not passing on Typesense v30.0 --- .github/workflows/ci_v26.0.yml | 8 +- .github/workflows/ci_v27.0.yml | 8 +- .github/workflows/ci_v27.1.yml | 8 +- .github/workflows/ci_v28.0.yml | 8 +- .github/workflows/ci_v29.0.yml | 10 +- .github/workflows/ci_v30.0.yml | 172 ++++ .github/workflows/llm.yml | 23 +- .iex.exs | 1 + CHANGELOG.md | 9 + README.md | 7 +- docker-compose.yml | 2 +- lib/open_api_typesense/client.ex | 3 - .../operations/analytics.ex | 13 +- .../operations/curation_sets.ex | 250 +++++ .../operations/documents.ex | 14 +- lib/open_api_typesense/operations/synonyms.ex | 246 +++++ .../schemas/curation_exclude.ex | 18 + .../schemas/curation_include.ex | 18 + .../schemas/curation_item_create_schema.ex | 96 ++ .../schemas/curation_item_delete_schema.ex | 18 + .../schemas/curation_item_schema.ex | 96 ++ .../schemas/curation_rule.ex | 23 + .../schemas/curation_set_create_schema.ex | 57 ++ .../schemas/curation_set_delete_schema.ex | 18 + .../schemas/curation_set_schema.ex | 62 ++ .../schemas/facet_counts.ex | 4 +- lib/open_api_typesense/schemas/field.ex | 3 + .../multi_search_collection_parameters.ex | 19 +- .../schemas/search_parameters.ex | 19 +- .../schemas/search_request_params.ex | 4 +- .../schemas/synonym_item_delete_schema.ex | 18 + .../schemas/synonym_item_schema.ex | 30 + .../schemas/synonym_item_upsert_schema.ex | 23 + .../schemas/synonym_set_create_schema.ex | 54 + .../schemas/synonym_set_delete_schema.ex | 18 + .../schemas/synonym_set_schema.ex | 54 + mix.exs | 2 +- priv/open_api.yml | 965 ++++++++++++------ test/connection_test.exs | 18 +- test/custom_client_test.exs | 6 +- test/default_client_test.exs | 6 +- test/operations/analytics_test.exs | 249 ++++- test/operations/collections_test.exs | 37 +- test/operations/conversations_test.exs | 10 +- test/operations/curation_sets_test.exs | 387 +++++++ test/operations/curation_test.exs | 43 + test/operations/debug_test.exs | 4 +- test/operations/documents_test.exs | 67 +- test/operations/health_test.exs | 8 +- test/operations/keys_test.exs | 10 +- test/operations/operations_test.exs | 16 +- test/operations/override_test.exs | 24 +- test/operations/presets_test.exs | 8 +- test/operations/stemming_test.exs | 16 +- test/operations/stopwords_test.exs | 10 +- test/operations/synonyms_test.exs | 328 +++++- 56 files changed, 3125 insertions(+), 523 deletions(-) create mode 100644 .github/workflows/ci_v30.0.yml create mode 100644 lib/open_api_typesense/operations/curation_sets.ex create mode 100644 lib/open_api_typesense/schemas/curation_exclude.ex create mode 100644 lib/open_api_typesense/schemas/curation_include.ex create mode 100644 lib/open_api_typesense/schemas/curation_item_create_schema.ex create mode 100644 lib/open_api_typesense/schemas/curation_item_delete_schema.ex create mode 100644 lib/open_api_typesense/schemas/curation_item_schema.ex create mode 100644 lib/open_api_typesense/schemas/curation_rule.ex create mode 100644 lib/open_api_typesense/schemas/curation_set_create_schema.ex create mode 100644 lib/open_api_typesense/schemas/curation_set_delete_schema.ex create mode 100644 lib/open_api_typesense/schemas/curation_set_schema.ex create mode 100644 lib/open_api_typesense/schemas/synonym_item_delete_schema.ex create mode 100644 lib/open_api_typesense/schemas/synonym_item_schema.ex create mode 100644 lib/open_api_typesense/schemas/synonym_item_upsert_schema.ex create mode 100644 lib/open_api_typesense/schemas/synonym_set_create_schema.ex create mode 100644 lib/open_api_typesense/schemas/synonym_set_delete_schema.ex create mode 100644 lib/open_api_typesense/schemas/synonym_set_schema.ex create mode 100644 test/operations/curation_sets_test.exs diff --git a/.github/workflows/ci_v26.0.yml b/.github/workflows/ci_v26.0.yml index 4d97112..1657997 100644 --- a/.github/workflows/ci_v26.0.yml +++ b/.github/workflows/ci_v26.0.yml @@ -52,10 +52,10 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - name: Check for misspellings - uses: codespell-project/actions-codespell@v2 + uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 - name: Start Typesense run: | @@ -93,13 +93,13 @@ jobs: echo "Typesense healthcheck elapsed: ${elapsed}s" - name: Setup Elixir/OTP - uses: erlef/setup-beam@v1 + uses: erlef/setup-beam@fc68ffb90438ef2936bbb3251622353b3dcb2f93 with: otp-version: ${{matrix.otp}} elixir-version: ${{matrix.elixir}} - name: Cache dependencies/builds - uses: actions/cache@v5 + uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 with: path: | deps diff --git a/.github/workflows/ci_v27.0.yml b/.github/workflows/ci_v27.0.yml index 8aac74d..6af956b 100644 --- a/.github/workflows/ci_v27.0.yml +++ b/.github/workflows/ci_v27.0.yml @@ -52,10 +52,10 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - name: Check for misspellings - uses: codespell-project/actions-codespell@v2 + uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 - name: Start Typesense run: | @@ -93,13 +93,13 @@ jobs: echo "Typesense healthcheck elapsed: ${elapsed}s" - name: Setup Elixir/OTP - uses: erlef/setup-beam@v1 + uses: erlef/setup-beam@fc68ffb90438ef2936bbb3251622353b3dcb2f93 with: otp-version: ${{matrix.otp}} elixir-version: ${{matrix.elixir}} - name: Cache dependencies/builds - uses: actions/cache@v5 + uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 with: path: | deps diff --git a/.github/workflows/ci_v27.1.yml b/.github/workflows/ci_v27.1.yml index c710fc5..735cac1 100644 --- a/.github/workflows/ci_v27.1.yml +++ b/.github/workflows/ci_v27.1.yml @@ -52,10 +52,10 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - name: Check for misspellings - uses: codespell-project/actions-codespell@v2 + uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 - name: Start Typesense run: | @@ -93,13 +93,13 @@ jobs: echo "Typesense healthcheck elapsed: ${elapsed}s" - name: Setup Elixir/OTP - uses: erlef/setup-beam@v1 + uses: erlef/setup-beam@fc68ffb90438ef2936bbb3251622353b3dcb2f93 with: otp-version: ${{matrix.otp}} elixir-version: ${{matrix.elixir}} - name: Cache dependencies/builds - uses: actions/cache@v5 + uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 with: path: | deps diff --git a/.github/workflows/ci_v28.0.yml b/.github/workflows/ci_v28.0.yml index 6074bca..6f90fcb 100644 --- a/.github/workflows/ci_v28.0.yml +++ b/.github/workflows/ci_v28.0.yml @@ -48,10 +48,10 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - name: Check for misspellings - uses: codespell-project/actions-codespell@v2 + uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 - name: Start Typesense run: | @@ -89,13 +89,13 @@ jobs: echo "Typesense healthcheck elapsed: ${elapsed}s" - name: Setup Elixir/OTP - uses: erlef/setup-beam@v1 + uses: erlef/setup-beam@fc68ffb90438ef2936bbb3251622353b3dcb2f93 with: otp-version: ${{matrix.otp}} elixir-version: ${{matrix.elixir}} - name: Cache dependencies/builds - uses: actions/cache@v5 + uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 with: path: | deps diff --git a/.github/workflows/ci_v29.0.yml b/.github/workflows/ci_v29.0.yml index e97f285..b0a91ff 100644 --- a/.github/workflows/ci_v29.0.yml +++ b/.github/workflows/ci_v29.0.yml @@ -41,7 +41,7 @@ jobs: - typesense: '29.0' otp: '28' elixir: '1.18' - lint: true + lint: false services: typesense: @@ -49,10 +49,10 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - name: Check for misspellings - uses: codespell-project/actions-codespell@v2 + uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 - name: Start Typesense run: | @@ -90,13 +90,13 @@ jobs: echo "Typesense healthcheck elapsed: ${elapsed}s" - name: Setup Elixir/OTP - uses: erlef/setup-beam@v1 + uses: erlef/setup-beam@fc68ffb90438ef2936bbb3251622353b3dcb2f93 with: otp-version: ${{matrix.otp}} elixir-version: ${{matrix.elixir}} - name: Cache dependencies/builds - uses: actions/cache@v5 + uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 with: path: | deps diff --git a/.github/workflows/ci_v30.0.yml b/.github/workflows/ci_v30.0.yml new file mode 100644 index 0000000..1a1bd97 --- /dev/null +++ b/.github/workflows/ci_v30.0.yml @@ -0,0 +1,172 @@ +name: CI v30.0 + +on: + workflow_call: + pull_request: + branches: ["main"] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + # https://docs.github.com/en/actions/managing-workflow-runs/skipping-workflow-runs + # Workflows that would otherwise be triggered using `on: push` or + # `on: pull_request` won't be triggered if you add any of the + # following strings to the commit message in a push, or the HEAD + # commit of a pull request: + # - [skip ci] + # - [ci skip] + # - [no ci] + # - [skip actions] + # - [actions skip] + + test: + if: ${{ (github.event_name == 'push' || github.event_name == 'pull_request') && github.repository == 'jaeyson/open_api_typesense' }} + runs-on: ubuntu-latest + environment: review + + env: + MIX_ENV: test + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + LATEST_TYPESENSE: '30.0' + + strategy: + matrix: + include: + - typesense: '30.0' + otp: '25' + elixir: '1.14' + lint: false + - typesense: '30.0' + otp: '28' + elixir: '1.18' + lint: true + + services: + typesense: + image: typesense/typesense:${{ matrix.typesense }} + + steps: + - name: Checkout repo + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd + + - name: Check for misspellings + uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 + + - name: Start Typesense + run: | + docker run -id \ + -p 8108:8108 \ + --name typesense \ + -v /tmp/typesense-data:/data \ + -v /tmp/typesense-analytics-data:/analytics-data \ + typesense/typesense:${{ matrix.typesense}} \ + --api-key xyz \ + --data-dir /data \ + --enable-search-analytics=true \ + --analytics-dir=/analytics-data \ + --analytics-flush-interval=60 \ + --analytics-minute-rate-limit=100 \ + --enable-cors + + - name: Wait for Typesense to be healthy + shell: bash + run: | + start_time=$(date +%s) + timeout=30 + counter=0 + until curl -s http://localhost:8108/health | grep -q '"ok":true'; do + if [ $counter -eq $timeout ]; then + echo "Timed out waiting for Typesense to be healthy" + exit 1 + fi + echo "Waiting for Typesense to be healthy..." + sleep 1 + counter=$((counter + 1)) + done + end_time=$(date +%s) + elapsed=$((end_time - start_time)) + echo "Typesense healthcheck elapsed: ${elapsed}s" + + - name: Setup Elixir/OTP + uses: erlef/setup-beam@fc68ffb90438ef2936bbb3251622353b3dcb2f93 + with: + otp-version: ${{matrix.otp}} + elixir-version: ${{matrix.elixir}} + + - name: Cache dependencies/builds + uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 + with: + path: | + deps + _build + key: ${{ runner.os }}-typesense-${{ matrix.typesense}}-${{ matrix.otp}}-${{ matrix.elixir}}-mix-${{ hashFiles('**/mix.lock') }} + restore-keys: | + ${{ runner.os }}-typesense-${{ matrix.typesense}}-${{ matrix.otp }}-${{ matrix.elixir }}-mix- + + - name: Install Dependencies + run: | + mix local.rebar --if-missing + mix local.hex --if-missing + mix deps.get + + - name: Find unused dependencies + run: mix deps.unlock --check-unused + if: ${{ matrix.lint }} + + - name: Check retired dependencies + run: mix hex.audit + if: ${{ matrix.lint }} + + - name: Security audit of dependencies + run: mix deps.audit + if: ${{ matrix.lint }} + + - name: Compile project + run: mix compile --all-warnings + + - name: Run static analysis + run: mix credo --all --strict + if: ${{ matrix.lint }} + + - name: Check format files + run: mix format --check-formatted + if: ${{ matrix.lint }} + + - name: Restore PLT cache + id: plt_cache + uses: actions/cache/restore@v4 + with: + key: | + plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} + restore-keys: | + plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}- + path: | + priv/plts + if: ${{ matrix.lint }} + + - name: Create PLTs + if: ${{ steps.plt_cache.outputs.cache-hit != 'true' && matrix.lint }} + run: mix dialyzer --plt + + - name: Save PLT cache + id: plt_cache_save + uses: actions/cache/save@v4 + if: ${{ steps.plt_cache.outputs.cache-hit != 'true' && matrix.lint }} + with: + key: | + plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} + path: | + priv/plts + + - name: Dialyzer + run: mix dialyzer --format github --format dialyxir + if: ${{ matrix.lint }} + + - name: Run tests + run: mix test --only ${{ matrix.typesense }}:true --only nls:true --trace + + - name: Post test coverage to Coveralls + run: mix coveralls.github + if: ${{ matrix.lint && github.event_name == 'push' && github.ref == 'refs/heads/main' }} diff --git a/.github/workflows/llm.yml b/.github/workflows/llm.yml index a055683..7578a56 100644 --- a/.github/workflows/llm.yml +++ b/.github/workflows/llm.yml @@ -9,7 +9,7 @@ on: jobs: ci_workflow: - uses: ./.github/workflows/ci_v29.0.yml + uses: ./.github/workflows/ci_v30.0.yml secrets: inherit llm: @@ -24,6 +24,21 @@ jobs: strategy: matrix: include: + - typesense: '30.1' + otp: '25' + elixir: '1.14' + - typesense: '30.0' + otp: '25' + elixir: '1.14' + - typesense: '29.0' + otp: '25' + elixir: '1.14' + - typesense: '30.1' + otp: '28' + elixir: '1.18' + - typesense: '30.0' + otp: '28' + elixir: '1.18' - typesense: '29.0' otp: '28' elixir: '1.18' @@ -69,16 +84,16 @@ jobs: echo "Typesense healthcheck elapsed: ${elapsed}s" - name: Checkout repo - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - name: Setup Elixir/OTP - uses: erlef/setup-beam@v1 + uses: erlef/setup-beam@fc68ffb90438ef2936bbb3251622353b3dcb2f93 with: otp-version: ${{matrix.otp}} elixir-version: ${{matrix.elixir}} - name: Cache typesense-data - uses: actions/cache@v5 + uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 with: path: | typesense-data diff --git a/.iex.exs b/.iex.exs index 1179fba..1cdeaa4 100644 --- a/.iex.exs +++ b/.iex.exs @@ -5,6 +5,7 @@ alias OpenApiTypesense.Analytics alias OpenApiTypesense.Collections alias OpenApiTypesense.Conversations alias OpenApiTypesense.Curation +alias OpenApiTypesense.CurationSets alias OpenApiTypesense.Debug alias OpenApiTypesense.Documents alias OpenApiTypesense.Health diff --git a/CHANGELOG.md b/CHANGELOG.md index 7d27806..6dab098 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## major.minor.patch (yyyy.mm.dd) +## 1.2.0 ??? + +### Deprecated + +* `Synonyms.upsert_search_synonym/4` in favor of `upsert_synonym_set/3` or `upsert_synonym_set_item/4` when using Typesense v30.0+. +* `Synonyms.get_search_synonyms/2` in favor of `retrieve_synonym_set_items/2` or `retrieve_synonym_sets/1` when using Typesense v30.0+. +* `Synonyms.get_search_synonym/3` in favor of `retrieve_synonym_set/2` or `retrieve_synonym_set_item/3` when using Typesense v30.0+. +* `Synonyms.delete_search_synonym/3` in favor of `delete_synonym_set/2` or `delete_synonym_set_item/3` when using Typesense v30.0+. + ## 1.1.0 (2026.04.06) ### Added diff --git a/README.md b/README.md index de46fe9..0fa050a 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,7 @@ Restful client for Typesense with adherence to Open API spec 3 (formerly Swagger [![Codacy Badge](https://app.codacy.com/project/badge/Grade/965dd3f8866d49c3b3e82edd0f6270cb)](https://app.codacy.com/gh/jaeyson/open_api_typesense/dashboard?utm_source=gh&utm_medium=referral&utm_content=&utm_campaign=Badge_grade) [![codescenene Average Code Health](https://codescene.io/projects/63240/status-badges/average-code-health)](https://codescene.io/projects/63240) +[![CI v30.0](https://github.com/jaeyson/open_api_typesense/actions/workflows/ci_v30.0.yml/badge.svg)](https://github.com/jaeyson/open_api_typesense/actions/workflows/ci_v30.0.yml) [![CI v29.0](https://github.com/jaeyson/open_api_typesense/actions/workflows/ci_v29.0.yml/badge.svg)](https://github.com/jaeyson/open_api_typesense/actions/workflows/ci_v29.0.yml) [![CI v28.0](https://github.com/jaeyson/open_api_typesense/actions/workflows/ci_v28.0.yml/badge.svg)](https://github.com/jaeyson/open_api_typesense/actions/workflows/ci_v28.0.yml) [![CI v27.1](https://github.com/jaeyson/open_api_typesense/actions/workflows/ci_v27.1.yml/badge.svg)](https://github.com/jaeyson/open_api_typesense/actions/workflows/ci_v27.1.yml) @@ -16,9 +17,9 @@ Restful client for Typesense with adherence to Open API spec 3 (formerly Swagger [![Dependabot](https://img.shields.io/badge/Dependabot-enabled-green)](https://github.com/jaeyson/open_api_typesense/pulls/app%2Fdependabot) [![Hex.pm](https://img.shields.io/hexpm/l/open_api_typesense)](https://hexdocs.pm/open_api_typesense/license.html) -[![Latest Typesense compatible](https://img.shields.io/badge/Latest%20Typesense%20compatible-v28.0-%230F35BC)](https://typesense.org/docs/28.0/api) +[![Latest Typesense compatible](https://img.shields.io/badge/Latest%20Typesense%20compatible-v30.0-%230F35BC)](https://typesense.org/docs/30.0/api) -**Note**: the only place where ai is used/integrated is in PR reviews. I am NOT interested in adding/integrating ai generated code in my codebase, as this little library can be fit in my mental model. ai has it’s own great use case, it’s just that I wanted to be hands-on with these projects. +**Note**: the only place where ai is used/integrated is in PR reviews. I am NOT interested in adding/integrating ai generated code in this codebase, as this little library can be fit in my mental model. ai has it’s own great use case, it’s just that I wanted to be hands-on with these projects. > #### Upgrading to v1 {: .warning} > @@ -47,7 +48,7 @@ by adding `open_api_typesense` to your list of dependencies in `mix.exs`: ```elixir def deps do [ - {:open_api_typesense, "~> 1.1"} + {:open_api_typesense, "~> 1.2"} # Or from GitHub repository, if you want the latest greatest from main branch {:open_api_typesense, git: "https://github.com/jaeyson/open_api_typesense.git"} diff --git a/docker-compose.yml b/docker-compose.yml index b922d20..5eb9fff 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: typesense: - image: docker.io/typesense/typesense:29.0 + image: docker.io/typesense/typesense:30.0 container_name: typesense restart: on-failure ports: diff --git a/lib/open_api_typesense/client.ex b/lib/open_api_typesense/client.ex index 6b2d3d7..4599dd7 100644 --- a/lib/open_api_typesense/client.ex +++ b/lib/open_api_typesense/client.ex @@ -138,9 +138,6 @@ defmodule OpenApiTypesense.Client do end defp parse_resp(%Req.Response{status: code, body: body}, %{response: resp}) do - # dbg(code) - # dbg(body) - # dbg(resp) {_status, mod} = Enum.find(resp, fn {status, _} -> status === code end) parse_body(code, mod, body) end diff --git a/lib/open_api_typesense/operations/analytics.ex b/lib/open_api_typesense/operations/analytics.ex index cdc97ed..18e1b5c 100644 --- a/lib/open_api_typesense/operations/analytics.ex +++ b/lib/open_api_typesense/operations/analytics.ex @@ -33,6 +33,7 @@ defmodule OpenApiTypesense.Analytics do method: :post, request: [{"application/json", {OpenApiTypesense.AnalyticsEvent, :t}}], response: [ + {200, {OpenApiTypesense.AnalyticsEventCreateResponse, :t}}, {201, {OpenApiTypesense.AnalyticsEventCreateResponse, :t}}, {400, {OpenApiTypesense.ApiResponse, :t}}, {401, {OpenApiTypesense.ApiResponse, :t}} @@ -78,6 +79,12 @@ defmodule OpenApiTypesense.Analytics do ]}} ], response: [ + {200, + {:union, + [ + {OpenApiTypesense.AnalyticsRule, :t}, + [union: [:map, {OpenApiTypesense.AnalyticsRule, :t}]] + ]}}, {201, {:union, [ @@ -110,6 +117,7 @@ defmodule OpenApiTypesense.Analytics do method: :delete, response: [ {200, {OpenApiTypesense.AnalyticsRule, :t}}, + {400, {OpenApiTypesense.ApiResponse, :t}}, {401, {OpenApiTypesense.ApiResponse, :t}}, {404, {OpenApiTypesense.ApiResponse, :t}} ], @@ -124,7 +132,8 @@ defmodule OpenApiTypesense.Analytics do """ @doc since: "1.1.0" @spec flush_analytics(opts :: keyword) :: - {:ok, OpenApiTypesense.AnalyticsEventCreateResponse.t()} | :error + {:ok, OpenApiTypesense.AnalyticsEventCreateResponse.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} def flush_analytics(opts \\ []) do client = opts[:client] || @default_client @@ -185,7 +194,7 @@ defmodule OpenApiTypesense.Analytics do """ @doc since: "1.1.0" @spec get_analytics_status(opts :: keyword) :: - {:ok, OpenApiTypesense.AnalyticsStatus.t()} | :error + {:ok, OpenApiTypesense.AnalyticsStatus.t()} | {:error, OpenApiTypesense.ApiResponse.t()} def get_analytics_status(opts \\ []) do client = opts[:client] || @default_client diff --git a/lib/open_api_typesense/operations/curation_sets.ex b/lib/open_api_typesense/operations/curation_sets.ex new file mode 100644 index 0000000..51f4772 --- /dev/null +++ b/lib/open_api_typesense/operations/curation_sets.ex @@ -0,0 +1,250 @@ +defmodule OpenApiTypesense.CurationSets do + @moduledoc since: "1.2.0" + + @moduledoc """ + Provides API endpoints related to curation sets + """ + + @default_client OpenApiTypesense.Client + + @doc """ + Delete a curation set + + Delete a specific curation set by its name + """ + @doc since: "1.2.0" + @spec delete_curation_set(curation_set_name :: String.t(), opts :: keyword) :: + {:ok, OpenApiTypesense.CurationSetDeleteSchema.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} + def delete_curation_set(curation_set_name, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [curation_set_name: curation_set_name], + call: {OpenApiTypesense.CurationSets, :delete_curation_set}, + url: "/curation_sets/#{curation_set_name}", + method: :delete, + response: [ + {200, {OpenApiTypesense.CurationSetDeleteSchema, :t}}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + + @doc """ + Delete a curation set item + + Delete a specific curation item by its id + """ + @doc since: "1.2.0" + @spec delete_curation_set_item( + curation_set_name :: String.t(), + item_id :: String.t(), + opts :: keyword + ) :: + {:ok, OpenApiTypesense.CurationItemDeleteSchema.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} + def delete_curation_set_item(curation_set_name, item_id, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [curation_set_name: curation_set_name, item_id: item_id], + call: {OpenApiTypesense.CurationSets, :delete_curation_set_item}, + url: "/curation_sets/#{curation_set_name}/items/#{item_id}", + method: :delete, + response: [ + {200, {OpenApiTypesense.CurationItemDeleteSchema, :t}}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + + @doc """ + Retrieve a curation set + + Retrieve a specific curation set by its name + """ + @doc since: "1.2.0" + @spec retrieve_curation_set(curation_set_name :: String.t(), opts :: keyword) :: + {:ok, OpenApiTypesense.CurationSetSchema.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} + def retrieve_curation_set(curation_set_name, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [curation_set_name: curation_set_name], + call: {OpenApiTypesense.CurationSets, :retrieve_curation_set}, + url: "/curation_sets/#{curation_set_name}", + method: :get, + response: [ + {200, {OpenApiTypesense.CurationSetSchema, :t}}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + + @doc """ + Retrieve a curation set item + + Retrieve a specific curation item by its id + """ + @doc since: "1.2.0" + @spec retrieve_curation_set_item( + curation_set_name :: String.t(), + item_id :: String.t(), + opts :: keyword + ) :: + {:ok, OpenApiTypesense.CurationItemSchema.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} + def retrieve_curation_set_item(curation_set_name, item_id, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [curation_set_name: curation_set_name, item_id: item_id], + call: {OpenApiTypesense.CurationSets, :retrieve_curation_set_item}, + url: "/curation_sets/#{curation_set_name}/items/#{item_id}", + method: :get, + response: [ + {200, {OpenApiTypesense.CurationItemSchema, :t}}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + + @doc """ + List items in a curation set + + Retrieve all curation items in a set + """ + @doc since: "1.2.0" + @spec retrieve_curation_set_items(curation_set_name :: String.t(), opts :: keyword) :: + {:ok, [OpenApiTypesense.CurationItemSchema.t()]} + | {:error, OpenApiTypesense.ApiResponse.t()} + def retrieve_curation_set_items(curation_set_name, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [curation_set_name: curation_set_name], + call: {OpenApiTypesense.CurationSets, :retrieve_curation_set_items}, + url: "/curation_sets/#{curation_set_name}/items", + method: :get, + response: [ + {200, [{OpenApiTypesense.CurationItemSchema, :t}]}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + + @doc """ + List all curation sets + + Retrieve all curation sets + """ + @doc since: "1.2.0" + @spec retrieve_curation_sets(opts :: keyword) :: + {:ok, [OpenApiTypesense.CurationSetSchema.t()]} | :error + def retrieve_curation_sets(opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [], + call: {OpenApiTypesense.CurationSets, :retrieve_curation_sets}, + url: "/curation_sets", + method: :get, + response: [ + {200, [{OpenApiTypesense.CurationSetSchema, :t}]}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + + @doc """ + Create or update a curation set + + Create or update a curation set with the given name + + ## Request Body + + **Content Types**: `application/json` + + The curation set to be created/updated + """ + @doc since: "1.2.0" + @spec upsert_curation_set( + curation_set_name :: String.t(), + body :: OpenApiTypesense.CurationSetCreateSchema.t(), + opts :: keyword + ) :: + {:ok, OpenApiTypesense.CurationSetSchema.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} + def upsert_curation_set(curation_set_name, body, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [curation_set_name: curation_set_name, body: body], + call: {OpenApiTypesense.CurationSets, :upsert_curation_set}, + url: "/curation_sets/#{curation_set_name}", + body: body, + method: :put, + request: [{"application/json", {OpenApiTypesense.CurationSetCreateSchema, :t}}], + response: [ + {200, {OpenApiTypesense.CurationSetSchema, :t}}, + {400, {OpenApiTypesense.ApiResponse, :t}}, + {401, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + + @doc """ + Create or update a curation set item + + Create or update a curation set item with the given id + + ## Request Body + + **Content Types**: `application/json` + + The curation item to be created/updated + """ + @doc since: "1.2.0" + @spec upsert_curation_set_item( + curation_set_name :: String.t(), + item_id :: String.t(), + body :: OpenApiTypesense.CurationItemCreateSchema.t(), + opts :: keyword + ) :: + {:ok, OpenApiTypesense.CurationItemSchema.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} + def upsert_curation_set_item(curation_set_name, item_id, body, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [curation_set_name: curation_set_name, item_id: item_id, body: body], + call: {OpenApiTypesense.CurationSets, :upsert_curation_set_item}, + url: "/curation_sets/#{curation_set_name}/items/#{item_id}", + body: body, + method: :put, + request: [{"application/json", {OpenApiTypesense.CurationItemCreateSchema, :t}}], + response: [ + {200, {OpenApiTypesense.CurationItemSchema, :t}}, + {400, {OpenApiTypesense.ApiResponse, :t}}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end +end diff --git a/lib/open_api_typesense/operations/documents.ex b/lib/open_api_typesense/operations/documents.ex index 5940c69..9d6f6a9 100644 --- a/lib/open_api_typesense/operations/documents.ex +++ b/lib/open_api_typesense/operations/documents.ex @@ -81,6 +81,10 @@ defmodule OpenApiTypesense.Documents do end @doc """ + > #### Warning on using Typesense v30.0 and above {: .warning} + > + > v30.0 and above doesn't use this function `delete_search_override/3`. Try [CurationSets](`OpenApiTypesense.CurationSets`) module. See https://typesense.org/docs/30.0/api/#deprecations-behavior-changes + Delete an override associated with a collection """ @doc since: "0.4.0" @@ -205,6 +209,10 @@ defmodule OpenApiTypesense.Documents do end @doc """ + > #### Warning on using Typesense v30.0 and above {: .warning} + > + > v30.0 and above doesn't use this function `get_search_overrides/2`. Try [CurationSets](`OpenApiTypesense.CurationSets`) module. See https://typesense.org/docs/30.0/api/#deprecations-behavior-changes + List all collection overrides ## Options @@ -283,7 +291,7 @@ defmodule OpenApiTypesense.Documents do body: body, method: :post, query: query, - request: [{"application/octet-stream", {:string, :generic}}], + request: [{"application/octet-stream", :string}], response: [ {200, :string}, {400, {OpenApiTypesense.ApiResponse, :t}}, @@ -516,6 +524,10 @@ defmodule OpenApiTypesense.Documents do end @doc """ + > #### Warning on using Typesense v30.0 and above {: .warning} + > + > v30.0 and above doesn't use this function `upsert_search_override/4`. Try [CurationSets](`OpenApiTypesense.CurationSets`) module. See https://typesense.org/docs/30.0/api/#deprecations-behavior-changes + Create or update an override to promote certain documents over others Create or update an override to promote certain documents over others. Using overrides, you can include or exclude specific documents for a given query. diff --git a/lib/open_api_typesense/operations/synonyms.ex b/lib/open_api_typesense/operations/synonyms.ex index 55e38e7..2d9ad12 100644 --- a/lib/open_api_typesense/operations/synonyms.ex +++ b/lib/open_api_typesense/operations/synonyms.ex @@ -7,10 +7,67 @@ defmodule OpenApiTypesense.Synonyms do @default_client OpenApiTypesense.Client + @doc """ + Delete a synonym set + + Delete a specific synonym set by its name + """ + @doc since: "1.2.0" + @spec delete_synonym_set(synonym_set_name :: String.t(), opts :: keyword) :: + {:ok, OpenApiTypesense.SynonymSetDeleteSchema.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} + def delete_synonym_set(synonym_set_name, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [synonym_set_name: synonym_set_name], + call: {OpenApiTypesense.Synonyms, :delete_synonym_set}, + url: "/synonym_sets/#{synonym_set_name}", + method: :delete, + response: [ + {200, {OpenApiTypesense.SynonymSetDeleteSchema, :t}}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + + @doc """ + Delete a synonym set item + + Delete a specific synonym item by its id + """ + @doc since: "1.2.0" + @spec delete_synonym_set_item( + synonym_set_name :: String.t(), + item_id :: String.t(), + opts :: keyword + ) :: + {:ok, OpenApiTypesense.SynonymItemDeleteSchema.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} + def delete_synonym_set_item(synonym_set_name, item_id, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [synonym_set_name: synonym_set_name, item_id: item_id], + call: {OpenApiTypesense.Synonyms, :delete_synonym_set_item}, + url: "/synonym_sets/#{synonym_set_name}/items/#{item_id}", + method: :delete, + response: [ + {200, {OpenApiTypesense.SynonymItemDeleteSchema, :t}}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + @doc """ Delete a synonym associated with a collection """ @doc since: "0.4.0" + @deprecated "Please use delete_synonym_set/2 or delete_synonym_set_item/3 when using Typesense v30.0+" @spec delete_search_synonym( collection_name :: String.t(), synonym_id :: String.t(), @@ -35,12 +92,120 @@ defmodule OpenApiTypesense.Synonyms do }) end + @doc """ + List all synonym sets + + Retrieve all synonym sets + """ + @doc since: "1.2.0" + @spec retrieve_synonym_sets(opts :: keyword) :: + {:ok, [OpenApiTypesense.SynonymSetSchema.t()]} | :error + def retrieve_synonym_sets(opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [], + call: {OpenApiTypesense.Synonyms, :retrieve_synonym_sets}, + url: "/synonym_sets", + method: :get, + response: [ + {200, [{OpenApiTypesense.SynonymSetSchema, :t}]}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + + @doc """ + Retrieve a synonym set + + Retrieve a specific synonym set by its name + """ + @doc since: "1.2.0" + @spec retrieve_synonym_set(synonym_set_name :: String.t(), opts :: keyword) :: + {:ok, OpenApiTypesense.SynonymSetSchema.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} + def retrieve_synonym_set(synonym_set_name, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [synonym_set_name: synonym_set_name], + call: {OpenApiTypesense.Synonyms, :retrieve_synonym_set}, + url: "/synonym_sets/#{synonym_set_name}", + method: :get, + response: [ + {200, {OpenApiTypesense.SynonymSetSchema, :t}}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + + @doc """ + Retrieve a synonym set item + + Retrieve a specific synonym item by its id + """ + @doc since: "1.2.0" + @spec retrieve_synonym_set_item( + synonym_set_name :: String.t(), + item_id :: String.t(), + opts :: keyword + ) :: + {:ok, OpenApiTypesense.SynonymItemSchema.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} + def retrieve_synonym_set_item(synonym_set_name, item_id, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [synonym_set_name: synonym_set_name, item_id: item_id], + call: {OpenApiTypesense.Synonyms, :retrieve_synonym_set_item}, + url: "/synonym_sets/#{synonym_set_name}/items/#{item_id}", + method: :get, + response: [ + {200, {OpenApiTypesense.SynonymItemSchema, :t}}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + + @doc """ + List items in a synonym set + + Retrieve all synonym items in a set + """ + @doc since: "1.2.0" + @spec retrieve_synonym_set_items(synonym_set_name :: String.t(), opts :: keyword) :: + {:ok, [OpenApiTypesense.SynonymItemSchema.t()]} + | {:error, OpenApiTypesense.ApiResponse.t()} + def retrieve_synonym_set_items(synonym_set_name, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [synonym_set_name: synonym_set_name], + call: {OpenApiTypesense.Synonyms, :retrieve_synonym_set_items}, + url: "/synonym_sets/#{synonym_set_name}/items", + method: :get, + response: [ + {200, [{OpenApiTypesense.SynonymItemSchema, :t}]}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + @doc """ Retrieve a single search synonym Retrieve the details of a search synonym, given its id. """ @doc since: "0.4.0" + @deprecated "Please use retrieve_synonym_set/2 or retrieve_synonym_set_item/3 when using Typesense v30.0+" @spec get_search_synonym( collection_name :: String.t(), synonym_id :: String.t(), @@ -74,6 +239,7 @@ defmodule OpenApiTypesense.Synonyms do """ @doc since: "0.4.0" + @deprecated "Please use retrieve_synonym_set_items/2 or retrieve_synonym_sets/1 when using Typesense v30.0+" @spec get_search_synonyms(collection_name :: String.t(), opts :: keyword) :: {:ok, OpenApiTypesense.SearchSynonymsResponse.t()} | {:error, OpenApiTypesense.ApiResponse.t()} @@ -96,6 +262,85 @@ defmodule OpenApiTypesense.Synonyms do }) end + @doc """ + Create or update a synonym set + + Create or update a synonym set with the given name + + ## Request Body + + **Content Types**: `application/json` + + The synonym set to be created/updated + """ + @doc since: "1.2.0" + @spec upsert_synonym_set( + synonym_set_name :: String.t(), + body :: OpenApiTypesense.SynonymSetCreateSchema.t(), + opts :: keyword + ) :: + {:ok, OpenApiTypesense.SynonymSetSchema.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} + def upsert_synonym_set(synonym_set_name, body, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [synonym_set_name: synonym_set_name, body: body], + call: {OpenApiTypesense.Synonyms, :upsert_synonym_set}, + url: "/synonym_sets/#{synonym_set_name}", + body: body, + method: :put, + request: [{"application/json", {OpenApiTypesense.SynonymSetCreateSchema, :t}}], + response: [ + {200, {OpenApiTypesense.SynonymSetSchema, :t}}, + {400, {OpenApiTypesense.ApiResponse, :t}}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + + @doc """ + Create or update a synonym set item + + Create or update a synonym set item with the given id + + ## Request Body + + **Content Types**: `application/json` + + The synonym item to be created/updated + """ + @doc since: "1.2.0" + @spec upsert_synonym_set_item( + synonym_set_name :: String.t(), + item_id :: String.t(), + body :: OpenApiTypesense.SynonymItemUpsertSchema.t(), + opts :: keyword + ) :: + {:ok, OpenApiTypesense.SynonymItemSchema.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} + def upsert_synonym_set_item(synonym_set_name, item_id, body, opts \\ []) do + client = opts[:client] || @default_client + + client.request(%{ + args: [synonym_set_name: synonym_set_name, item_id: item_id, body: body], + call: {OpenApiTypesense.Synonyms, :upsert_synonym_set_item}, + url: "/synonym_sets/#{synonym_set_name}/items/#{item_id}", + body: body, + method: :put, + request: [{"application/json", {OpenApiTypesense.SynonymItemUpsertSchema, :t}}], + response: [ + {200, {OpenApiTypesense.SynonymItemSchema, :t}}, + {400, {OpenApiTypesense.ApiResponse, :t}}, + {401, {OpenApiTypesense.ApiResponse, :t}}, + {404, {OpenApiTypesense.ApiResponse, :t}} + ], + opts: opts + }) + end + @doc """ Create or update a synonym @@ -108,6 +353,7 @@ defmodule OpenApiTypesense.Synonyms do The search synonym object to be created/updated """ @doc since: "0.4.0" + @deprecated "Please use upsert_synonym_set/3 or upsert_synonym_set_item/4 when using Typesense v30.0+" @spec upsert_search_synonym( collection_name :: String.t(), synonym_id :: String.t(), diff --git a/lib/open_api_typesense/schemas/curation_exclude.ex b/lib/open_api_typesense/schemas/curation_exclude.ex new file mode 100644 index 0000000..4b15a35 --- /dev/null +++ b/lib/open_api_typesense/schemas/curation_exclude.ex @@ -0,0 +1,18 @@ +defmodule OpenApiTypesense.CurationExclude do + @moduledoc """ + Provides struct and type for a CurationExclude + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{id: String.t()} + + defstruct [:id] + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [id: :string] + end +end diff --git a/lib/open_api_typesense/schemas/curation_include.ex b/lib/open_api_typesense/schemas/curation_include.ex new file mode 100644 index 0000000..14d9334 --- /dev/null +++ b/lib/open_api_typesense/schemas/curation_include.ex @@ -0,0 +1,18 @@ +defmodule OpenApiTypesense.CurationInclude do + @moduledoc """ + Provides struct and type for a CurationInclude + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{id: String.t(), position: integer} + + defstruct [:id, :position] + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [id: :string, position: :integer] + end +end diff --git a/lib/open_api_typesense/schemas/curation_item_create_schema.ex b/lib/open_api_typesense/schemas/curation_item_create_schema.ex new file mode 100644 index 0000000..aef8b57 --- /dev/null +++ b/lib/open_api_typesense/schemas/curation_item_create_schema.ex @@ -0,0 +1,96 @@ +defmodule OpenApiTypesense.CurationItemCreateSchema do + @moduledoc """ + Provides struct and type for a CurationItemCreateSchema + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{ + effective_from_ts: integer, + effective_to_ts: integer, + excludes: [OpenApiTypesense.CurationExclude.t()], + filter_by: String.t(), + filter_curated_hits: boolean, + id: String.t(), + includes: [OpenApiTypesense.CurationInclude.t()], + metadata: map, + remove_matched_tokens: boolean, + replace_query: String.t(), + rule: OpenApiTypesense.CurationRule.t(), + sort_by: String.t(), + stop_processing: boolean + } + + defstruct [ + :effective_from_ts, + :effective_to_ts, + :excludes, + :filter_by, + :filter_curated_hits, + :id, + :includes, + :metadata, + :remove_matched_tokens, + :replace_query, + :rule, + :sort_by, + :stop_processing + ] + + defimpl(Poison.Decoder, for: OpenApiTypesense.CurationItemCreateSchema) do + def decode(value, %{as: struct}) do + mod = + case struct do + [m] -> m + m -> m + end + + filtered_type = + mod.__struct__.__fields__() + |> Enum.filter(fn {_field, v} -> + case v do + [{mod, :t}] when is_atom(mod) -> true + _ -> false + end + end) + + case filtered_type do + [{_key, [{module, :t}]} | _rest] = list when is_list(list) and is_atom(module) -> + Enum.reduce(list, value, fn {key, [{mod, :t}]}, acc -> + Map.update!(acc, key, fn data -> + body = OpenApiTypesense.Converter.to_atom_keys(data || [], safe: false) + + case body do + [] -> [] + _ -> Enum.map(body, &struct(mod, &1)) + end + end) + end) + + [] -> + value + end + end + end + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [ + effective_from_ts: :integer, + effective_to_ts: :integer, + excludes: [{OpenApiTypesense.CurationExclude, :t}], + filter_by: :string, + filter_curated_hits: :boolean, + id: :string, + includes: [{OpenApiTypesense.CurationInclude, :t}], + metadata: :map, + remove_matched_tokens: :boolean, + replace_query: :string, + rule: {OpenApiTypesense.CurationRule, :t}, + sort_by: :string, + stop_processing: :boolean + ] + end +end diff --git a/lib/open_api_typesense/schemas/curation_item_delete_schema.ex b/lib/open_api_typesense/schemas/curation_item_delete_schema.ex new file mode 100644 index 0000000..5769819 --- /dev/null +++ b/lib/open_api_typesense/schemas/curation_item_delete_schema.ex @@ -0,0 +1,18 @@ +defmodule OpenApiTypesense.CurationItemDeleteSchema do + @moduledoc """ + Provides struct and type for a CurationItemDeleteSchema + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{id: String.t()} + + defstruct [:id] + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [id: :string] + end +end diff --git a/lib/open_api_typesense/schemas/curation_item_schema.ex b/lib/open_api_typesense/schemas/curation_item_schema.ex new file mode 100644 index 0000000..a769e92 --- /dev/null +++ b/lib/open_api_typesense/schemas/curation_item_schema.ex @@ -0,0 +1,96 @@ +defmodule OpenApiTypesense.CurationItemSchema do + @moduledoc """ + Provides struct and type for a CurationItemSchema + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{ + effective_from_ts: integer, + effective_to_ts: integer, + excludes: [OpenApiTypesense.CurationExclude.t()], + filter_by: String.t(), + filter_curated_hits: boolean, + id: String.t(), + includes: [OpenApiTypesense.CurationInclude.t()], + metadata: map, + remove_matched_tokens: boolean, + replace_query: String.t(), + rule: OpenApiTypesense.CurationRule.t(), + sort_by: String.t(), + stop_processing: boolean + } + + defstruct [ + :effective_from_ts, + :effective_to_ts, + :excludes, + :filter_by, + :filter_curated_hits, + :id, + :includes, + :metadata, + :remove_matched_tokens, + :replace_query, + :rule, + :sort_by, + :stop_processing + ] + + defimpl(Poison.Decoder, for: OpenApiTypesense.CurationItemSchema) do + def decode(value, %{as: struct}) do + mod = + case struct do + [m] -> m + m -> m + end + + filtered_type = + mod.__struct__.__fields__() + |> Enum.filter(fn {_field, v} -> + case v do + [{mod, :t}] when is_atom(mod) -> true + _ -> false + end + end) + + case filtered_type do + [{_key, [{module, :t}]} | _rest] = list when is_list(list) and is_atom(module) -> + Enum.reduce(list, value, fn {key, [{mod, :t}]}, acc -> + Map.update!(acc, key, fn data -> + body = OpenApiTypesense.Converter.to_atom_keys(data || [], safe: false) + + case body do + [] -> [] + _ -> Enum.map(body, &struct(mod, &1)) + end + end) + end) + + [] -> + value + end + end + end + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [ + effective_from_ts: :integer, + effective_to_ts: :integer, + excludes: [{OpenApiTypesense.CurationExclude, :t}], + filter_by: :string, + filter_curated_hits: :boolean, + id: :string, + includes: [{OpenApiTypesense.CurationInclude, :t}], + metadata: :map, + remove_matched_tokens: :boolean, + replace_query: :string, + rule: {OpenApiTypesense.CurationRule, :t}, + sort_by: :string, + stop_processing: :boolean + ] + end +end diff --git a/lib/open_api_typesense/schemas/curation_rule.ex b/lib/open_api_typesense/schemas/curation_rule.ex new file mode 100644 index 0000000..0a4591a --- /dev/null +++ b/lib/open_api_typesense/schemas/curation_rule.ex @@ -0,0 +1,23 @@ +defmodule OpenApiTypesense.CurationRule do + @moduledoc """ + Provides struct and type for a CurationRule + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{ + filter_by: String.t(), + match: String.t(), + query: String.t(), + tags: [String.t()] + } + + defstruct [:filter_by, :match, :query, :tags] + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [filter_by: :string, match: {:enum, ["exact", "contains"]}, query: :string, tags: [:string]] + end +end diff --git a/lib/open_api_typesense/schemas/curation_set_create_schema.ex b/lib/open_api_typesense/schemas/curation_set_create_schema.ex new file mode 100644 index 0000000..f0b45ff --- /dev/null +++ b/lib/open_api_typesense/schemas/curation_set_create_schema.ex @@ -0,0 +1,57 @@ +defmodule OpenApiTypesense.CurationSetCreateSchema do + @moduledoc """ + Provides struct and type for a CurationSetCreateSchema + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{ + description: String.t(), + items: [OpenApiTypesense.CurationItemCreateSchema.t()] + } + + defstruct [:description, :items] + + defimpl(Poison.Decoder, for: OpenApiTypesense.CurationSetCreateSchema) do + def decode(value, %{as: struct}) do + mod = + case struct do + [m] -> m + m -> m + end + + filtered_type = + mod.__struct__.__fields__() + |> Enum.filter(fn {_field, v} -> + case v do + [{mod, :t}] when is_atom(mod) -> true + _ -> false + end + end) + + case filtered_type do + [{_key, [{module, :t}]} | _rest] = list when is_list(list) and is_atom(module) -> + Enum.reduce(list, value, fn {key, [{mod, :t}]}, acc -> + Map.update!(acc, key, fn data -> + body = OpenApiTypesense.Converter.to_atom_keys(data || [], safe: false) + + case body do + [] -> [] + _ -> Enum.map(body, &struct(mod, &1)) + end + end) + end) + + [] -> + value + end + end + end + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [description: :string, items: [{OpenApiTypesense.CurationItemCreateSchema, :t}]] + end +end diff --git a/lib/open_api_typesense/schemas/curation_set_delete_schema.ex b/lib/open_api_typesense/schemas/curation_set_delete_schema.ex new file mode 100644 index 0000000..ccd4a3f --- /dev/null +++ b/lib/open_api_typesense/schemas/curation_set_delete_schema.ex @@ -0,0 +1,18 @@ +defmodule OpenApiTypesense.CurationSetDeleteSchema do + @moduledoc """ + Provides struct and type for a CurationSetDeleteSchema + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{name: String.t()} + + defstruct [:name] + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [name: :string] + end +end diff --git a/lib/open_api_typesense/schemas/curation_set_schema.ex b/lib/open_api_typesense/schemas/curation_set_schema.ex new file mode 100644 index 0000000..c8c7b36 --- /dev/null +++ b/lib/open_api_typesense/schemas/curation_set_schema.ex @@ -0,0 +1,62 @@ +defmodule OpenApiTypesense.CurationSetSchema do + @moduledoc """ + Provides struct and type for a CurationSetSchema + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{ + description: String.t(), + items: [OpenApiTypesense.CurationItemCreateSchema.t()], + name: String.t() + } + + defstruct [:description, :items, :name] + + defimpl(Poison.Decoder, for: OpenApiTypesense.CurationSetSchema) do + def decode(value, %{as: struct}) do + mod = + case struct do + [m] -> m + m -> m + end + + filtered_type = + mod.__struct__.__fields__() + |> Enum.filter(fn {_field, v} -> + case v do + [{mod, :t}] when is_atom(mod) -> true + _ -> false + end + end) + + case filtered_type do + [{_key, [{module, :t}]} | _rest] = list when is_list(list) and is_atom(module) -> + Enum.reduce(list, value, fn {key, [{mod, :t}]}, acc -> + Map.update!(acc, key, fn data -> + body = OpenApiTypesense.Converter.to_atom_keys(data || [], safe: false) + + case body do + [] -> [] + _ -> Enum.map(body, &struct(mod, &1)) + end + end) + end) + + [] -> + value + end + end + end + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [ + description: :string, + items: [{OpenApiTypesense.CurationItemCreateSchema, :t}], + name: :string + ] + end +end diff --git a/lib/open_api_typesense/schemas/facet_counts.ex b/lib/open_api_typesense/schemas/facet_counts.ex index 1ffba14..052fdd9 100644 --- a/lib/open_api_typesense/schemas/facet_counts.ex +++ b/lib/open_api_typesense/schemas/facet_counts.ex @@ -7,10 +7,11 @@ defmodule OpenApiTypesense.FacetCounts do @type t :: %__MODULE__{ counts: [OpenApiTypesense.FacetCountsCounts.t()], field_name: String.t(), + sampled: boolean, stats: OpenApiTypesense.FacetCountsStats.t() } - defstruct [:counts, :field_name, :stats] + defstruct [:counts, :field_name, :sampled, :stats] defimpl(Poison.Decoder, for: OpenApiTypesense.FacetCounts) do def decode(value, %{as: struct}) do @@ -56,6 +57,7 @@ defmodule OpenApiTypesense.FacetCounts do [ counts: [{OpenApiTypesense.FacetCountsCounts, :t}], field_name: :string, + sampled: :boolean, stats: {OpenApiTypesense.FacetCountsStats, :t} ] end diff --git a/lib/open_api_typesense/schemas/field.ex b/lib/open_api_typesense/schemas/field.ex index 05a6bce..e71fc7e 100644 --- a/lib/open_api_typesense/schemas/field.ex +++ b/lib/open_api_typesense/schemas/field.ex @@ -5,6 +5,7 @@ defmodule OpenApiTypesense.Field do use OpenApiTypesense.Encoder @type t :: %__MODULE__{ + async_reference: boolean, drop: boolean, embed: OpenApiTypesense.FieldEmbed.t(), facet: boolean, @@ -27,6 +28,7 @@ defmodule OpenApiTypesense.Field do } defstruct [ + :async_reference, :drop, :embed, :facet, @@ -54,6 +56,7 @@ defmodule OpenApiTypesense.Field do def __fields__(:t) do [ + async_reference: :boolean, drop: :boolean, embed: {OpenApiTypesense.FieldEmbed, :t}, facet: :boolean, diff --git a/lib/open_api_typesense/schemas/multi_search_collection_parameters.ex b/lib/open_api_typesense/schemas/multi_search_collection_parameters.ex index 5809d6a..931af71 100644 --- a/lib/open_api_typesense/schemas/multi_search_collection_parameters.ex +++ b/lib/open_api_typesense/schemas/multi_search_collection_parameters.ex @@ -10,10 +10,11 @@ defmodule OpenApiTypesense.MultiSearchCollectionParameters do conversation: boolean, conversation_id: String.t(), conversation_model_id: String.t(), - drop_tokens_mode: OpenApiTypesense.DropTokensMode.t(), + curation_tags: String.t(), + drop_tokens_mode: String.t(), drop_tokens_threshold: integer, enable_analytics: boolean, - enable_overrides: boolean, + enable_curations: boolean, enable_synonyms: boolean, enable_typos_for_alpha_numerical_tokens: boolean, enable_typos_for_numerical_tokens: boolean, @@ -44,7 +45,6 @@ defmodule OpenApiTypesense.MultiSearchCollectionParameters do min_len_2typo: integer, num_typos: String.t(), offset: integer, - override_tags: String.t(), page: integer, per_page: integer, pinned_hits: String.t(), @@ -69,6 +69,7 @@ defmodule OpenApiTypesense.MultiSearchCollectionParameters do text_match_type: String.t(), typo_tokens_threshold: integer, use_cache: boolean, + validate_field_names: boolean, vector_query: String.t(), voice_query: String.t(), "x-typesense-api-key": String.t() @@ -80,6 +81,7 @@ defmodule OpenApiTypesense.MultiSearchCollectionParameters do :conversation, :conversation_id, :conversation_model_id, + :curation_tags, :drop_tokens_mode, :drop_tokens_threshold, :enable_synonyms, @@ -111,7 +113,6 @@ defmodule OpenApiTypesense.MultiSearchCollectionParameters do :min_len_2typo, :num_typos, :offset, - :override_tags, :page, :per_page, :pinned_hits, @@ -131,11 +132,12 @@ defmodule OpenApiTypesense.MultiSearchCollectionParameters do :text_match_type, :typo_tokens_threshold, :use_cache, + :validate_field_names, :vector_query, :voice_query, :"x-typesense-api-key", enable_analytics: true, - enable_overrides: false, + enable_curations: false, enable_typos_for_numerical_tokens: true, pre_segmented_query: false, prioritize_exact_match: true, @@ -155,10 +157,11 @@ defmodule OpenApiTypesense.MultiSearchCollectionParameters do conversation: :boolean, conversation_id: :string, conversation_model_id: :string, - drop_tokens_mode: {OpenApiTypesense.DropTokensMode, :t}, + curation_tags: :string, + drop_tokens_mode: {:enum, ["right_to_left", "left_to_right", "both_sides:3"]}, drop_tokens_threshold: :integer, enable_analytics: :boolean, - enable_overrides: :boolean, + enable_curations: :boolean, enable_synonyms: :boolean, enable_typos_for_alpha_numerical_tokens: :boolean, enable_typos_for_numerical_tokens: :boolean, @@ -189,7 +192,6 @@ defmodule OpenApiTypesense.MultiSearchCollectionParameters do min_len_2typo: :integer, num_typos: :string, offset: :integer, - override_tags: :string, page: :integer, per_page: :integer, pinned_hits: :string, @@ -214,6 +216,7 @@ defmodule OpenApiTypesense.MultiSearchCollectionParameters do text_match_type: :string, typo_tokens_threshold: :integer, use_cache: :boolean, + validate_field_names: :boolean, vector_query: :string, voice_query: :string, "x-typesense-api-key": :string diff --git a/lib/open_api_typesense/schemas/search_parameters.ex b/lib/open_api_typesense/schemas/search_parameters.ex index d8136ed..fc14efe 100644 --- a/lib/open_api_typesense/schemas/search_parameters.ex +++ b/lib/open_api_typesense/schemas/search_parameters.ex @@ -9,11 +9,12 @@ defmodule OpenApiTypesense.SearchParameters do conversation: boolean, conversation_id: String.t(), conversation_model_id: String.t(), - drop_tokens_mode: OpenApiTypesense.DropTokensMode.t(), + curation_tags: String.t(), + drop_tokens_mode: String.t(), drop_tokens_threshold: integer, enable_analytics: boolean, + enable_curations: boolean, enable_highlight_v1: boolean, - enable_overrides: boolean, enable_synonyms: boolean, enable_typos_for_alpha_numerical_tokens: boolean, enable_typos_for_numerical_tokens: boolean, @@ -48,7 +49,6 @@ defmodule OpenApiTypesense.SearchParameters do nl_query: boolean, num_typos: String.t(), offset: integer, - override_tags: String.t(), page: integer, per_page: integer, pinned_hits: String.t(), @@ -74,6 +74,7 @@ defmodule OpenApiTypesense.SearchParameters do text_match_type: String.t(), typo_tokens_threshold: integer, use_cache: boolean, + validate_field_names: boolean, vector_query: String.t(), voice_query: String.t() } @@ -83,6 +84,7 @@ defmodule OpenApiTypesense.SearchParameters do :conversation, :conversation_id, :conversation_model_id, + :curation_tags, :drop_tokens_mode, :drop_tokens_threshold, :enable_synonyms, @@ -118,7 +120,6 @@ defmodule OpenApiTypesense.SearchParameters do :nl_query, :num_typos, :offset, - :override_tags, :page, :per_page, :pinned_hits, @@ -141,11 +142,12 @@ defmodule OpenApiTypesense.SearchParameters do :text_match_type, :typo_tokens_threshold, :use_cache, + :validate_field_names, :vector_query, :voice_query, enable_analytics: true, + enable_curations: false, enable_highlight_v1: true, - enable_overrides: false, enable_typos_for_numerical_tokens: true, prioritize_exact_match: true, prioritize_num_matching_fields: true, @@ -162,11 +164,12 @@ defmodule OpenApiTypesense.SearchParameters do conversation: :boolean, conversation_id: :string, conversation_model_id: :string, - drop_tokens_mode: {OpenApiTypesense.DropTokensMode, :t}, + curation_tags: :string, + drop_tokens_mode: {:enum, ["right_to_left", "left_to_right", "both_sides:3"]}, drop_tokens_threshold: :integer, enable_analytics: :boolean, + enable_curations: :boolean, enable_highlight_v1: :boolean, - enable_overrides: :boolean, enable_synonyms: :boolean, enable_typos_for_alpha_numerical_tokens: :boolean, enable_typos_for_numerical_tokens: :boolean, @@ -201,7 +204,6 @@ defmodule OpenApiTypesense.SearchParameters do nl_query: :boolean, num_typos: :string, offset: :integer, - override_tags: :string, page: :integer, per_page: :integer, pinned_hits: :string, @@ -227,6 +229,7 @@ defmodule OpenApiTypesense.SearchParameters do text_match_type: :string, typo_tokens_threshold: :integer, use_cache: :boolean, + validate_field_names: :boolean, vector_query: :string, voice_query: :string ] diff --git a/lib/open_api_typesense/schemas/search_request_params.ex b/lib/open_api_typesense/schemas/search_request_params.ex index 7f4f854..fc5cb70 100644 --- a/lib/open_api_typesense/schemas/search_request_params.ex +++ b/lib/open_api_typesense/schemas/search_request_params.ex @@ -6,12 +6,13 @@ defmodule OpenApiTypesense.SearchRequestParams do @type t :: %__MODULE__{ collection_name: String.t(), + first_q: String.t(), per_page: integer, q: String.t(), voice_query: OpenApiTypesense.SearchRequestParamsVoiceQuery.t() } - defstruct [:collection_name, :per_page, :q, :voice_query] + defstruct [:collection_name, :first_q, :per_page, :q, :voice_query] @doc false @spec __fields__(atom) :: keyword @@ -20,6 +21,7 @@ defmodule OpenApiTypesense.SearchRequestParams do def __fields__(:t) do [ collection_name: :string, + first_q: :string, per_page: :integer, q: :string, voice_query: {OpenApiTypesense.SearchRequestParamsVoiceQuery, :t} diff --git a/lib/open_api_typesense/schemas/synonym_item_delete_schema.ex b/lib/open_api_typesense/schemas/synonym_item_delete_schema.ex new file mode 100644 index 0000000..4468eb3 --- /dev/null +++ b/lib/open_api_typesense/schemas/synonym_item_delete_schema.ex @@ -0,0 +1,18 @@ +defmodule OpenApiTypesense.SynonymItemDeleteSchema do + @moduledoc """ + Provides struct and type for a SynonymItemDeleteSchema + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{id: String.t()} + + defstruct [:id] + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [id: :string] + end +end diff --git a/lib/open_api_typesense/schemas/synonym_item_schema.ex b/lib/open_api_typesense/schemas/synonym_item_schema.ex new file mode 100644 index 0000000..a9df28b --- /dev/null +++ b/lib/open_api_typesense/schemas/synonym_item_schema.ex @@ -0,0 +1,30 @@ +defmodule OpenApiTypesense.SynonymItemSchema do + @moduledoc """ + Provides struct and type for a SynonymItemSchema + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{ + id: String.t(), + locale: String.t(), + root: String.t(), + symbols_to_index: [String.t()], + synonyms: [String.t()] + } + + defstruct [:id, :locale, :root, :symbols_to_index, :synonyms] + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [ + id: :string, + locale: :string, + root: :string, + symbols_to_index: [:string], + synonyms: [:string] + ] + end +end diff --git a/lib/open_api_typesense/schemas/synonym_item_upsert_schema.ex b/lib/open_api_typesense/schemas/synonym_item_upsert_schema.ex new file mode 100644 index 0000000..d19607d --- /dev/null +++ b/lib/open_api_typesense/schemas/synonym_item_upsert_schema.ex @@ -0,0 +1,23 @@ +defmodule OpenApiTypesense.SynonymItemUpsertSchema do + @moduledoc """ + Provides struct and type for a SynonymItemUpsertSchema + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{ + locale: String.t(), + root: String.t(), + symbols_to_index: [String.t()], + synonyms: [String.t()] + } + + defstruct [:locale, :root, :symbols_to_index, :synonyms] + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [locale: :string, root: :string, symbols_to_index: [:string], synonyms: [:string]] + end +end diff --git a/lib/open_api_typesense/schemas/synonym_set_create_schema.ex b/lib/open_api_typesense/schemas/synonym_set_create_schema.ex new file mode 100644 index 0000000..a7313b2 --- /dev/null +++ b/lib/open_api_typesense/schemas/synonym_set_create_schema.ex @@ -0,0 +1,54 @@ +defmodule OpenApiTypesense.SynonymSetCreateSchema do + @moduledoc """ + Provides struct and type for a SynonymSetCreateSchema + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{items: [OpenApiTypesense.SynonymItemSchema.t()]} + + defstruct [:items] + + defimpl(Poison.Decoder, for: OpenApiTypesense.SynonymSetCreateSchema) do + def decode(value, %{as: struct}) do + mod = + case struct do + [m] -> m + m -> m + end + + filtered_type = + mod.__struct__.__fields__() + |> Enum.filter(fn {_field, v} -> + case v do + [{mod, :t}] when is_atom(mod) -> true + _ -> false + end + end) + + case filtered_type do + [{_key, [{module, :t}]} | _rest] = list when is_list(list) and is_atom(module) -> + Enum.reduce(list, value, fn {key, [{mod, :t}]}, acc -> + Map.update!(acc, key, fn data -> + body = OpenApiTypesense.Converter.to_atom_keys(data || [], safe: false) + + case body do + [] -> [] + _ -> Enum.map(body, &struct(mod, &1)) + end + end) + end) + + [] -> + value + end + end + end + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [items: [{OpenApiTypesense.SynonymItemSchema, :t}]] + end +end diff --git a/lib/open_api_typesense/schemas/synonym_set_delete_schema.ex b/lib/open_api_typesense/schemas/synonym_set_delete_schema.ex new file mode 100644 index 0000000..201607d --- /dev/null +++ b/lib/open_api_typesense/schemas/synonym_set_delete_schema.ex @@ -0,0 +1,18 @@ +defmodule OpenApiTypesense.SynonymSetDeleteSchema do + @moduledoc """ + Provides struct and type for a SynonymSetDeleteSchema + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{name: String.t()} + + defstruct [:name] + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [name: :string] + end +end diff --git a/lib/open_api_typesense/schemas/synonym_set_schema.ex b/lib/open_api_typesense/schemas/synonym_set_schema.ex new file mode 100644 index 0000000..8ccb309 --- /dev/null +++ b/lib/open_api_typesense/schemas/synonym_set_schema.ex @@ -0,0 +1,54 @@ +defmodule OpenApiTypesense.SynonymSetSchema do + @moduledoc """ + Provides struct and type for a SynonymSetSchema + """ + use OpenApiTypesense.Encoder + + @type t :: %__MODULE__{items: [OpenApiTypesense.SynonymItemSchema.t()], name: String.t()} + + defstruct [:items, :name] + + defimpl(Poison.Decoder, for: OpenApiTypesense.SynonymSetSchema) do + def decode(value, %{as: struct}) do + mod = + case struct do + [m] -> m + m -> m + end + + filtered_type = + mod.__struct__.__fields__() + |> Enum.filter(fn {_field, v} -> + case v do + [{mod, :t}] when is_atom(mod) -> true + _ -> false + end + end) + + case filtered_type do + [{_key, [{module, :t}]} | _rest] = list when is_list(list) and is_atom(module) -> + Enum.reduce(list, value, fn {key, [{mod, :t}]}, acc -> + Map.update!(acc, key, fn data -> + body = OpenApiTypesense.Converter.to_atom_keys(data || [], safe: false) + + case body do + [] -> [] + _ -> Enum.map(body, &struct(mod, &1)) + end + end) + end) + + [] -> + value + end + end + end + + @doc false + @spec __fields__(atom) :: keyword + def __fields__(type \\ :t) + + def __fields__(:t) do + [items: [{OpenApiTypesense.SynonymItemSchema, :t}], name: :string] + end +end diff --git a/mix.exs b/mix.exs index 1c58500..a43c328 100644 --- a/mix.exs +++ b/mix.exs @@ -3,7 +3,7 @@ defmodule OpenApiTypesense.MixProject do @source_url "https://github.com/jaeyson/open_api_typesense" @hex_url "https://hexdocs.pm/open_api_typesense" - @version "1.1.0" + @version "1.2.0" def project do [ diff --git a/priv/open_api.yml b/priv/open_api.yml index 2ca3885..e0a8b5b 100644 --- a/priv/open_api.yml +++ b/priv/open_api.yml @@ -35,11 +35,6 @@ tags: externalDocs: description: Find out more url: https://typesense.org/api/#index-document - - name: curation - description: Hand-curate search results based on conditional business rules - externalDocs: - description: Find out more - url: https://typesense.org/docs/0.23.0/api/#curation - name: analytics description: Typesense can aggregate search queries for both analytics purposes and for query suggestions. externalDocs: @@ -77,6 +72,8 @@ tags: externalDocs: description: Find out more url: https://typesense.org/docs/28.0/api/synonyms.html + - name: curation_sets + description: Manage curation sets - name: stemming description: Manage stemming dictionaries externalDocs: @@ -557,296 +554,556 @@ paths: application/json: schema: $ref: "#/components/schemas/ApiResponse" - /collections/{collectionName}/overrides: + + /synonym_sets: get: tags: - - documents - - curation - summary: List all collection overrides - operationId: getSearchOverrides + - synonyms + summary: List all synonym sets + description: Retrieve all synonym sets + operationId: retrieveSynonymSets + responses: + "200": + description: List of all synonym sets + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/SynonymSetSchema" + "401": + description: Missing API key + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + "404": + description: Method not found + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + + /synonym_sets/{synonymSetName}: + get: + tags: + - synonyms + summary: Retrieve a synonym set + description: Retrieve a specific synonym set by its name + operationId: retrieveSynonymSet parameters: - - name: collectionName + - name: synonymSetName in: path - description: The name of the collection + description: The name of the synonym set to retrieve required: true schema: type: string - - name: limit - in: query - description: Limit results in paginating on collection listing. + responses: + "200": + description: Synonym set fetched + content: + application/json: + schema: + $ref: "#/components/schemas/SynonymSetSchema" + "401": + description: Missing API key + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + "404": + description: Method not found + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + + put: + tags: + - synonyms + summary: Create or update a synonym set + description: Create or update a synonym set with the given name + operationId: upsertSynonymSet + parameters: + - name: synonymSetName + in: path + description: The name of the synonym set to create/update + required: true schema: type: string - - name: offset - in: query - description: Skip a certain number of results and start after that. + requestBody: + description: The synonym set to be created/updated + content: + application/json: + schema: + $ref: "#/components/schemas/SynonymSetCreateSchema" + required: true + responses: + "200": + description: Synonym set successfully created/updated + content: + application/json: + schema: + $ref: "#/components/schemas/SynonymSetSchema" + "400": + description: Bad request, see error message for details + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + "401": + description: Missing API key + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + "404": + description: Method not found + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + delete: + tags: + - synonyms + summary: Delete a synonym set + description: Delete a specific synonym set by its name + operationId: deleteSynonymSet + parameters: + - name: synonymSetName + in: path + description: The name of the synonym set to delete + required: true schema: type: string responses: - '200': - description: List of all search overrides + "200": + description: Synonym set successfully deleted content: application/json: schema: - $ref: "#/components/schemas/SearchOverridesResponse" - '401': + $ref: "#/components/schemas/SynonymSetDeleteSchema" + "401": description: Missing API key content: application/json: schema: $ref: "#/components/schemas/ApiResponse" - '404': - description: List of search overrides not found + "404": + description: Method not found content: application/json: schema: $ref: "#/components/schemas/ApiResponse" - /collections/{collectionName}/overrides/{overrideId}: + + /synonym_sets/{synonymSetName}/items: get: tags: - - documents - - override - summary: Retrieve a single search override - description: Retrieve the details of a search override, given its id. - operationId: getSearchOverride + - synonyms + summary: List items in a synonym set + description: Retrieve all synonym items in a set + operationId: retrieveSynonymSetItems parameters: - - name: collectionName + - name: synonymSetName in: path - description: The name of the collection + description: The name of the synonym set to retrieve items for + required: true + schema: + type: string + responses: + "200": + description: List of synonym items + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/SynonymItemSchema" + "401": + description: Missing API key + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + "404": + description: Synonym set not found + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + + /synonym_sets/{synonymSetName}/items/{itemId}: + get: + tags: + - synonyms + summary: Retrieve a synonym set item + description: Retrieve a specific synonym item by its id + operationId: retrieveSynonymSetItem + parameters: + - name: synonymSetName + in: path + description: The name of the synonym set required: true schema: type: string - - name: overrideId + - name: itemId in: path - description: The id of the search override + description: The id of the synonym item to retrieve required: true schema: type: string responses: - '200': - description: Search override fetched + "200": + description: Synonym item fetched content: application/json: schema: - $ref: "#/components/schemas/SearchOverride" - '401': + $ref: "#/components/schemas/SynonymItemSchema" + "401": description: Missing API key content: application/json: schema: $ref: "#/components/schemas/ApiResponse" - '404': - description: Search override not found + "404": + description: Synonym set item not found content: application/json: schema: $ref: "#/components/schemas/ApiResponse" put: tags: - - documents - - curation - summary: Create or update an override to promote certain documents over others - description: - Create or update an override to promote certain documents over others. - Using overrides, you can include or exclude specific documents for a given query. - operationId: upsertSearchOverride + - synonyms + summary: Create or update a synonym set item + description: Create or update a synonym set item with the given id + operationId: upsertSynonymSetItem parameters: - - name: collectionName + - name: synonymSetName in: path - description: The name of the collection + description: The name of the synonym set required: true schema: type: string - - name: overrideId + - name: itemId in: path - description: The ID of the search override to create/update + description: The id of the synonym item to upsert required: true schema: type: string requestBody: - description: The search override object to be created/updated + description: The synonym item to be created/updated content: application/json: schema: - $ref: "#/components/schemas/SearchOverrideSchema" + $ref: "#/components/schemas/SynonymItemUpsertSchema" required: true responses: - '200': - description: Created/updated search override + "200": + description: Synonym item successfully created/updated content: application/json: schema: - $ref: "#/components/schemas/SearchOverride" - '400': + $ref: "#/components/schemas/SynonymItemSchema" + "400": description: Bad request, see error message for details content: application/json: schema: $ref: "#/components/schemas/ApiResponse" - '401': + "401": description: Missing API key content: application/json: schema: $ref: "#/components/schemas/ApiResponse" - '404': - description: Search override not found + "404": + description: Method not found content: application/json: schema: $ref: "#/components/schemas/ApiResponse" delete: tags: - - documents - - curation - summary: Delete an override associated with a collection - operationId: deleteSearchOverride + - synonyms + summary: Delete a synonym set item + description: Delete a specific synonym item by its id + operationId: deleteSynonymSetItem parameters: - - name: collectionName + - name: synonymSetName in: path - description: The name of the collection + description: The name of the synonym set required: true schema: type: string - - name: overrideId + - name: itemId in: path - description: The ID of the search override to delete + description: The id of the synonym item to delete required: true schema: type: string responses: - '200': - description: The ID of the deleted search override + "200": + description: Synonym item successfully deleted content: application/json: schema: - $ref: "#/components/schemas/SearchOverrideDeleteResponse" - '401': + $ref: "#/components/schemas/SynonymItemDeleteSchema" + "401": description: Missing API key content: application/json: schema: $ref: "#/components/schemas/ApiResponse" - '404': - description: Search override not found + "404": + description: Synonym item not found content: application/json: schema: $ref: "#/components/schemas/ApiResponse" - /collections/{collectionName}/synonyms: + + /curation_sets: get: tags: - - synonyms - summary: List all collection synonyms - operationId: getSearchSynonyms + - curation_sets + summary: List all curation sets + description: Retrieve all curation sets + operationId: retrieveCurationSets + responses: + "200": + description: List of all curation sets + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/CurationSetSchema" + "401": + description: Missing API key + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + "404": + description: Curation sets not found + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + + /curation_sets/{curationSetName}: + get: + tags: + - curation_sets + summary: Retrieve a curation set + description: Retrieve a specific curation set by its name + operationId: retrieveCurationSet parameters: - - name: collectionName + - name: curationSetName in: path - description: The name of the collection + description: The name of the curation set to retrieve required: true schema: type: string - - name: limit - in: query - description: Limit results in paginating on collection listing. + responses: + "200": + description: Curation set fetched + content: + application/json: + schema: + $ref: "#/components/schemas/CurationSetSchema" + "401": + description: Missing API key + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + "404": + description: Curation set item not found + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + put: + tags: + - curation_sets + summary: Create or update a curation set + description: Create or update a curation set with the given name + operationId: upsertCurationSet + parameters: + - name: curationSetName + in: path + description: The name of the curation set to create/update + required: true schema: type: string - - name: offset - in: query - description: Skip a certain number of results and start after that. + requestBody: + description: The curation set to be created/updated + content: + application/json: + schema: + $ref: "#/components/schemas/CurationSetCreateSchema" + required: true + responses: + "200": + description: Curation set successfully created/updated + content: + application/json: + schema: + $ref: "#/components/schemas/CurationSetSchema" + "400": + description: Bad request, see error message for details + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + "401": + description: Missing API key + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + "404": + description: Curation set not found + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + delete: + tags: + - curation_sets + summary: Delete a curation set + description: Delete a specific curation set by its name + operationId: deleteCurationSet + parameters: + - name: curationSetName + in: path + description: The name of the curation set to delete + required: true schema: type: string responses: - '200': - description: List of all search synonyms + "200": + description: Curation set successfully deleted content: application/json: schema: - $ref: "#/components/schemas/SearchSynonymsResponse" - '401': + $ref: "#/components/schemas/CurationSetDeleteSchema" + "401": description: Missing API key content: application/json: schema: $ref: "#/components/schemas/ApiResponse" - '404': - description: Search synonyms was not found + "404": + description: Curation set not found content: application/json: schema: $ref: "#/components/schemas/ApiResponse" - /collections/{collectionName}/synonyms/{synonymId}: + + /curation_sets/{curationSetName}/items: get: tags: - - synonyms - summary: Retrieve a single search synonym - description: Retrieve the details of a search synonym, given its id. - operationId: getSearchSynonym + - curation_sets + summary: List items in a curation set + description: Retrieve all curation items in a set + operationId: retrieveCurationSetItems parameters: - - name: collectionName + - name: curationSetName in: path - description: The name of the collection + description: The name of the curation set to retrieve items for + required: true + schema: + type: string + responses: + "200": + description: List of curation items + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/CurationItemSchema" + "404": + description: Curation set not found + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" + + /curation_sets/{curationSetName}/items/{itemId}: + get: + tags: + - curation_sets + summary: Retrieve a curation set item + description: Retrieve a specific curation item by its id + operationId: retrieveCurationSetItem + parameters: + - name: curationSetName + in: path + description: The name of the curation set required: true schema: type: string - - name: synonymId + - name: itemId in: path - description: The id of the search synonym + description: The id of the curation item to retrieve required: true schema: type: string responses: - '200': - description: Search synonym fetched + "200": + description: Curation item fetched content: application/json: schema: - $ref: "#/components/schemas/SearchSynonym" - '401': + $ref: "#/components/schemas/CurationItemSchema" + "401": description: Missing API key content: application/json: schema: $ref: "#/components/schemas/ApiResponse" - '404': - description: Search synonym was not found + "404": + description: Curation item not found content: application/json: schema: $ref: "#/components/schemas/ApiResponse" put: tags: - - synonyms - summary: Create or update a synonym - description: Create or update a synonym to define search terms that should be considered equivalent. - operationId: upsertSearchSynonym + - curation_sets + summary: Create or update a curation set item + description: Create or update a curation set item with the given id + operationId: upsertCurationSetItem parameters: - - name: collectionName + - name: curationSetName in: path - description: The name of the collection + description: The name of the curation set required: true schema: type: string - - name: synonymId + - name: itemId in: path - description: The ID of the search synonym to create/update + description: The id of the curation item to upsert required: true schema: type: string requestBody: - description: The search synonym object to be created/updated + description: The curation item to be created/updated content: application/json: schema: - $ref: "#/components/schemas/SearchSynonymSchema" + $ref: "#/components/schemas/CurationItemCreateSchema" required: true responses: - '200': - description: Created/updated search synonym + "200": + description: Curation item successfully created/updated content: application/json: schema: - $ref: "#/components/schemas/SearchSynonym" - '400': + $ref: "#/components/schemas/CurationItemSchema" + "400": description: Bad request, see error message for details content: application/json: @@ -866,37 +1123,38 @@ paths: $ref: "#/components/schemas/ApiResponse" delete: tags: - - synonyms - summary: Delete a synonym associated with a collection - operationId: deleteSearchSynonym + - curation_sets + summary: Delete a curation set item + description: Delete a specific curation item by its id + operationId: deleteCurationSetItem parameters: - - name: collectionName + - name: curationSetName in: path - description: The name of the collection + description: The name of the curation set required: true schema: type: string - - name: synonymId + - name: itemId in: path - description: The ID of the search synonym to delete + description: The id of the curation item to delete required: true schema: type: string responses: - '200': - description: The ID of the deleted search synonym + "200": + description: Curation item successfully deleted content: application/json: schema: - $ref: "#/components/schemas/SearchSynonymDeleteResponse" - '401': + $ref: "#/components/schemas/CurationItemDeleteSchema" + "401": description: Missing API key content: application/json: schema: $ref: "#/components/schemas/ApiResponse" - '404': - description: Search synonym not found + "404": + description: Curation item not found content: application/json: schema: @@ -1854,8 +2112,8 @@ paths: example: | {"log-slow-requests-time-ms": 2000} responses: - '201': - description: Updated slow request log + '200': + description: Toggle Slow Request Log database succeeded. content: application/json: schema: @@ -1938,6 +2196,12 @@ paths: $ref: '#/components/schemas/AnalyticsEvent' required: true responses: + '200': + description: Analytics event successfully created + content: + application/json: + schema: + $ref: '#/components/schemas/AnalyticsEventCreateResponse' '201': description: Analytics event successfully created content: @@ -2076,7 +2340,7 @@ paths: $ref: "#/components/schemas/AnalyticsRuleCreate" required: true responses: - '201': + '200': description: Analytics rule(s) successfully created content: application/json: @@ -2085,7 +2349,22 @@ paths: - $ref: "#/components/schemas/AnalyticsRule" - type: array items: - anyOf: + oneOf: + - $ref: "#/components/schemas/AnalyticsRule" + - type: object + properties: + error: + type: string + '201': + description: Analytics rule(s) successfully created + content: + application/json: + schema: + oneOf: + - $ref: "#/components/schemas/AnalyticsRule" + - type: array + items: + oneOf: - $ref: "#/components/schemas/AnalyticsRule" - type: object properties: @@ -2232,6 +2511,12 @@ paths: application/json: schema: $ref: "#/components/schemas/AnalyticsRule" + '400': + description: Bad request, see error message for details. + content: + application/json: + schema: + $ref: "#/components/schemas/ApiResponse" '401': description: "Missing API key" content: @@ -2992,6 +3277,10 @@ components: type: string description: > Name of a field in another collection that should be linked to this collection so that it can be joined during query. + async_reference: + type: boolean + description: > + Allow documents to be indexed successfully even when the referenced document doesn't exist yet. num_dim: type: integer example: 256 @@ -3150,7 +3439,7 @@ components: items: $ref: "#/components/schemas/SearchResultHit" request_params: - $ref: "#/components/schemas/SearchRequestParams" + $ref: "#/components/schemas/SearchRequestParams" conversation: $ref: "#/components/schemas/SearchResultConversation" union_request_params: @@ -3171,6 +3460,8 @@ components: properties: collection_name: type: string + first_q: + type: string q: type: string per_page: @@ -3327,136 +3618,6 @@ components: items: type: object x-go-type: "interface{}" - SearchOverrideSchema: - type: object - required: - - rule - properties: - rule: - $ref: "#/components/schemas/SearchOverrideRule" - includes: - type: array - description: - List of document `id`s that should be included in the search results with their - corresponding `position`s. - items: - $ref: "#/components/schemas/SearchOverrideInclude" - excludes: - type: array - description: List of document `id`s that should be excluded from the search results. - items: - $ref: "#/components/schemas/SearchOverrideExclude" - filter_by: - type: string - description: > - A filter by clause that is applied to any search query that matches the override rule. - remove_matched_tokens: - type: boolean - description: > - Indicates whether search query tokens that exist in the override's rule should be removed from the search query. - metadata: - type: object - description: > - Return a custom JSON object in the Search API response, when this rule is triggered. This can can be used to display a pre-defined message (eg: a promotion banner) on the front-end when a particular rule is triggered. - sort_by: - type: string - description: > - A sort by clause that is applied to any search query that matches the override rule. - replace_query: - type: string - description: > - Replaces the current search query with this value, when the search query matches the override rule. - filter_curated_hits: - type: boolean - description: > - When set to true, the filter conditions of the query is applied to the curated records as well. - Default: false. - effective_from_ts: - type: integer - description: > - A Unix timestamp that indicates the date/time from which the override will be active. You can use this to create override rules that start applying from a future point in time. - effective_to_ts: - type: integer - description: > - A Unix timestamp that indicates the date/time until which the override will be active. You can use this to create override rules that stop applying after a period of time. - stop_processing: - type: boolean - description: > - When set to true, override processing will stop at the first matching rule. When set to false override processing will continue and multiple override actions will be triggered in sequence. - Overrides are processed in the lexical sort order of their id field. - Default: true. - SearchOverride: - allOf: - - $ref: "#/components/schemas/SearchOverrideSchema" - - type: object - required: - - id - properties: - id: - type: string - readOnly: true - SearchOverrideDeleteResponse: - type: object - required: - - id - properties: - id: - type: string - description: The id of the override that was deleted - SearchOverrideRule: - type: object - properties: - tags: - type: array - description: List of tag values to associate with this override rule. - items: - type: string - query: - type: string - description: Indicates what search queries should be overridden - match: - type: string - description: > - Indicates whether the match on the query term should be `exact` or `contains`. - If we want to match all queries that contained - the word `apple`, we will use the `contains` match instead. - enum: - - exact - - contains - filter_by: - type: string - description: > - Indicates that the override should apply when the filter_by parameter in a search query exactly matches the string specified here (including backticks, spaces, brackets, etc). - SearchOverrideInclude: - type: object - required: - - id - - position - properties: - id: - type: string - description: document id that should be included - position: - type: integer - description: position number where document should be included in the search results - SearchOverrideExclude: - type: object - required: - - id - properties: - id: - type: string - description: document id that should be excluded from the search results. - SearchOverridesResponse: - type: object - required: - - overrides - properties: - overrides: - type: array - x-go-type: "[]*SearchOverride" - items: - $ref: "#/components/schemas/SearchOverride" SearchSynonymSchema: type: object required: @@ -3629,6 +3790,13 @@ components: against. Multiple fields are separated with a comma. type: string + validate_field_names: + description: > + Controls whether Typesense should validate if the fields exist in the schema. + When set to false, Typesense will not throw an error if a field is missing. + This is useful for programmatic grouping where not all fields may exist. + type: boolean + nl_query: description: Whether to use natural language processing to parse the query. type: boolean @@ -3847,7 +4015,7 @@ components: filter_curated_hits: type: boolean description: > - Whether the filter_by condition of the search query should be applicable to curated results (override definitions, pinned hits, hidden hits, etc.). Default: false + Whether the filter_by condition of the search query should be applicable to curated results (curation definitions, pinned hits, hidden hits, etc.). Default: false enable_synonyms: type: boolean description: > @@ -3870,8 +4038,8 @@ components: at Position 1 and another record with ID 456 at Position 5, you'd specify `123:1,456:5`. - You could also use the Overrides feature to override search results based - on rules. Overrides are applied first, followed by `pinned_hits` and + You could also use the Curation feature to override search results based + on rules. Curations are applied first, followed by `pinned_hits` and finally `hidden_hits`. type: string @@ -3881,12 +4049,12 @@ components: A list of `record_id`s to hide. Eg: to hide records with IDs 123 and 456, you'd specify `123,456`. - You could also use the Overrides feature to override search results based - on rules. Overrides are applied first, followed by `pinned_hits` and + You could also use the Curation feature to override search results based + on rules. Curations are applied first, followed by `pinned_hits` and finally `hidden_hits`. type: string - override_tags: + curation_tags: description: Comma separated list of tags to trigger the curations rules that match the tags. type: string @@ -3919,9 +4087,9 @@ components: the name of the existing Preset. type: string - enable_overrides: + enable_curations: description: > - If you have some overrides defined but want to disable all of them during + If you have some curation sets defined but want to disable all of them during query time, you can do that by setting this parameter to false type: boolean default: false @@ -4224,7 +4392,7 @@ components: filter_curated_hits: type: boolean description: > - Whether the filter_by condition of the search query should be applicable to curated results (override definitions, pinned hits, hidden hits, etc.). Default: false + Whether the filter_by condition of the search query should be applicable to curated results (curation definitions, pinned hits, hidden hits, etc.). Default: false enable_synonyms: type: boolean description: > @@ -4255,8 +4423,8 @@ components: at Position 1 and another record with ID 456 at Position 5, you'd specify `123:1,456:5`. - You could also use the Overrides feature to override search results based - on rules. Overrides are applied first, followed by `pinned_hits` and + You could also use the Curation feature to override search results based + on rules. Curations are applied first, followed by `pinned_hits` and finally `hidden_hits`. type: string @@ -4266,12 +4434,12 @@ components: A list of `record_id`s to hide. Eg: to hide records with IDs 123 and 456, you'd specify `123,456`. - You could also use the Overrides feature to override search results based - on rules. Overrides are applied first, followed by `pinned_hits` and + You could also use the Curation feature to override search results based + on rules. Curations are applied first, followed by `pinned_hits` and finally `hidden_hits`. type: string - override_tags: + curation_tags: description: Comma separated list of tags to trigger the curations rules that match the tags. type: string @@ -4297,9 +4465,9 @@ components: the name of the existing Preset. type: string - enable_overrides: + enable_curations: description: > - If you have some overrides defined but want to disable all of them during + If you have some curation sets defined but want to disable all of them during query time, you can do that by setting this parameter to false type: boolean default: false @@ -4401,6 +4569,13 @@ components: description: > The Id of a previous conversation to continue, this tells Typesense to include prior context when communicating with the LLM. type: string + validate_field_names: + description: > + Controls whether Typesense should validate if the fields exist in the schema. + When set to false, Typesense will not throw an error if a field is missing. + This is useful for programmatic grouping where not all fields may exist. + type: boolean + MultiSearchSearchesParameter: type: object required: @@ -4451,6 +4626,8 @@ components: type: object field_name: type: string + sampled: + type: boolean stats: type: object properties: @@ -4535,8 +4712,7 @@ components: name: type: string type: - type: string - enum: [popular_queries, nohits_queries, counter, log] + $ref: "#/components/schemas/AnalyticsRuleType" collection: type: string event_type: @@ -4561,6 +4737,9 @@ components: type: string weight: type: integer + AnalyticsRuleType: + type: string + enum: [popular_queries, nohits_queries, counter, log] AnalyticsRuleUpdate: type: object description: Fields allowed to update on an analytics rule @@ -4601,7 +4780,7 @@ components: query_counter_events: { type: integer } doc_log_events: { type: integer } doc_counter_events: { type: integer } - + APIStatsResponse: type: object properties: @@ -4734,19 +4913,13 @@ components: type: string enum: [create, update, upsert, emplace] DropTokensMode: - type: object - properties: - match: - type: string - description: > - Dictates the direction in which the words in the query must be dropped when the original words in the query do not appear in any document. - Values: right_to_left (default), left_to_right, both_sides:3 - A note on both_sides:3 - for queries up to 3 tokens (words) in length, this mode will drop tokens from both sides and exhaustively rank all matching results. - If query length is greater than 3 words, Typesense will just fallback to default behavior of right_to_left - enum: - - right_to_left - - left_to_right - - both_sides:3 + type: string + enum: [right_to_left, left_to_right, both_sides:3] + description: > + Dictates the direction in which the words in the query must be dropped when the original words in the query do not appear in any document. + Values: right_to_left (default), left_to_right, both_sides:3 + A note on both_sides:3 - for queries up to 3 tokens (words) in length, this mode will drop tokens from both sides and exhaustively rank all matching results. + If query length is greater than 3 words, Typesense will just fallback to default behavior of right_to_left ConversationModelCreateSchema: required: - model_name @@ -4930,15 +5103,11 @@ components: type: string description: ID of the deleted NL search model - SynonymItemSchema: + SynonymItemUpsertSchema: type: object required: - - id - synonyms properties: - id: - type: string - description: Unique identifier for the synonym item synonyms: type: array description: Array of words that should be considered as synonyms @@ -4956,6 +5125,17 @@ components: items: type: string + SynonymItemSchema: + allOf: + - type: object + required: + - id + properties: + id: + type: string + description: Unique identifier for the synonym item + - $ref: "#/components/schemas/SynonymItemUpsertSchema" + SynonymSetCreateSchema: type: object required: @@ -4989,9 +5169,6 @@ components: items: $ref: "#/components/schemas/SynonymSetSchema" - SynonymSetRetrieveSchema: - $ref: "#/components/schemas/SynonymSetCreateSchema" - SynonymSetDeleteSchema: type: object required: @@ -5001,8 +5178,176 @@ components: type: string description: Name of the deleted synonym set + SynonymItemDeleteSchema: + type: object + required: + - id + properties: + id: + type: string + description: ID of the deleted synonym item + + CurationItemCreateSchema: + type: object + required: + - rule + properties: + rule: + $ref: '#/components/schemas/CurationRule' + includes: + type: array + description: + List of document `id`s that should be included in the search results with their + corresponding `position`s. + items: + $ref: '#/components/schemas/CurationInclude' + excludes: + type: array + description: List of document `id`s that should be excluded from the search results. + items: + $ref: '#/components/schemas/CurationExclude' + filter_by: + type: string + description: > + A filter by clause that is applied to any search query that matches the curation rule. + remove_matched_tokens: + type: boolean + description: > + Indicates whether search query tokens that exist in the curation's rule should be removed from the search query. + metadata: + type: object + description: > + Return a custom JSON object in the Search API response, when this rule is triggered. This can can be used to display a pre-defined message (eg: a promotion banner) on the front-end when a particular rule is triggered. + sort_by: + type: string + description: > + A sort by clause that is applied to any search query that matches the curation rule. + replace_query: + type: string + description: > + Replaces the current search query with this value, when the search query matches the curation rule. + filter_curated_hits: + type: boolean + description: > + When set to true, the filter conditions of the query is applied to the curated records as well. + Default: false. + effective_from_ts: + type: integer + description: > + A Unix timestamp that indicates the date/time from which the curation will be active. You can use this to create rules that start applying from a future point in time. + effective_to_ts: + type: integer + description: > + A Unix timestamp that indicates the date/time until which the curation will be active. You can use this to create rules that stop applying after a period of time. + stop_processing: + type: boolean + description: > + When set to true, curation processing will stop at the first matching rule. When set to false curation processing will continue and multiple curation actions will be triggered in sequence. + Curations are processed in the lexical sort order of their id field. + id: + type: string + description: ID of the curation item + + + CurationItemSchema: + allOf: + - $ref: '#/components/schemas/CurationItemCreateSchema' + - type: object + required: + - id + properties: + id: + type: string + + CurationSetCreateSchema: + type: object + required: + - items + properties: + items: + type: array + description: Array of curation items + items: + $ref: '#/components/schemas/CurationItemCreateSchema' + description: + type: string + description: Optional description for the curation set + + CurationSetSchema: + allOf: + - $ref: '#/components/schemas/CurationSetCreateSchema' + - type: object + required: + - name + properties: + name: + type: string + + CurationRule: + type: object + properties: + tags: + type: array + description: List of tag values to associate with this curation rule. + items: + type: string + query: + type: string + description: Indicates what search queries should be curated + match: + type: string + description: > + Indicates whether the match on the query term should be `exact` or `contains`. + If we want to match all queries that contained the word `apple`, we will use the `contains` match instead. + enum: + - exact + - contains + filter_by: + type: string + description: > + Indicates that the curation should apply when the filter_by parameter in a search query exactly matches the string specified here (including backticks, spaces, brackets, etc). + + CurationInclude: + type: object + required: + - id + - position + properties: + id: + type: string + description: document id that should be included + position: + type: integer + description: position number where document should be included in the search results + + CurationExclude: + type: object + required: + - id + properties: + id: + type: string + description: document id that should be excluded from the search results. + + CurationSetDeleteSchema: + type: object + required: + - name + properties: + name: + type: string + description: Name of the deleted curation set + CurationItemDeleteSchema: + type: object + required: + - id + properties: + id: + type: string + description: ID of the deleted curation item + securitySchemes: api_key_header: type: apiKey name: X-TYPESENSE-API-KEY - in: header \ No newline at end of file + in: header diff --git a/test/connection_test.exs b/test/connection_test.exs index 5255f6e..b109fae 100644 --- a/test/connection_test.exs +++ b/test/connection_test.exs @@ -11,7 +11,7 @@ defmodule ConnectionTest do message: "Forbidden - a valid `x-typesense-api-key` header must be sent." } - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "new/0 using the default config to creates a connection struct" do assert Connection.new() === %Connection{ api_key: "xyz", @@ -25,7 +25,7 @@ defmodule ConnectionTest do } end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "new/1 with custom fields creates a connection struct" do conn = Connection.new(%{ @@ -47,7 +47,7 @@ defmodule ConnectionTest do } end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: wrong api key was configured" do conn = %{ host: "localhost", @@ -59,7 +59,7 @@ defmodule ConnectionTest do assert {:error, @forbidden} == Collections.get_collections(conn: conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: overriding config with a wrong API key" do conn = %{ host: "localhost", @@ -71,34 +71,34 @@ defmodule ConnectionTest do assert {:error, @forbidden} = Collections.get_collections(conn: conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: health check, with incorrect port number" do conn = %{api_key: "xyz", host: "localhost", port: 8100, scheme: "http"} assert {:error, "connection refused"} = Health.health(conn: conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: health check, with incorrect host" do conn = %{api_key: "xyz", host: "my_test_host", port: 8108, scheme: "http"} assert {:error, "non-existing domain"} = Health.health(conn: conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "new/1 with Connection struct" do conn = Connection.new() assert %Connection{} = Connection.new(conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "new/1 with empty map raises ArgumentError" do error = assert_raise ArgumentError, fn -> Connection.new(%{}) end assert error.message === "Missing required fields: [:api_key, :host, :port, :scheme]" end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "new/1 with invalid data type raises ArgumentError" do invalid_inputs = [ nil, diff --git a/test/custom_client_test.exs b/test/custom_client_test.exs index 2c7ce83..02c5fdb 100644 --- a/test/custom_client_test.exs +++ b/test/custom_client_test.exs @@ -74,7 +74,7 @@ defmodule CustomClientTest do end) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "returns the configured options" do Application.put_env(:open_api_typesense, :options, finch: MyApp.CustomFinch, @@ -86,7 +86,7 @@ defmodule CustomClientTest do assert options === [finch: MyApp.CustomFinch, receive_timeout: 5_000] end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "returns an empty map if options is not configured" do Application.delete_env(:open_api_typesense, :options) @@ -95,7 +95,7 @@ defmodule CustomClientTest do assert options === nil end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "use another HTTP client" do map_conn = %{ api_key: "xyz", diff --git a/test/default_client_test.exs b/test/default_client_test.exs index d125da8..6633456 100644 --- a/test/default_client_test.exs +++ b/test/default_client_test.exs @@ -7,7 +7,7 @@ defmodule DefaultClientTest do require Logger describe "request/2" do - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "default to req http client if no custom client set" do conn = Connection.new() @@ -27,7 +27,7 @@ defmodule DefaultClientTest do end describe "build_req_client/2" do - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "override req options through req field" do req = Client.build_req_client(Connection.new(), @@ -43,7 +43,7 @@ defmodule DefaultClientTest do end end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "get api key" do assert "xyz" = Client.api_key() end diff --git a/test/operations/analytics_test.exs b/test/operations/analytics_test.exs index c8847f1..3f04aa0 100644 --- a/test/operations/analytics_test.exs +++ b/test/operations/analytics_test.exs @@ -9,6 +9,8 @@ defmodule AnalyticsTest do alias OpenApiTypesense.Connection alias OpenApiTypesense.AnalyticsEventCreateResponse alias OpenApiTypesense.AnalyticsEventsResponse + alias OpenApiTypesense.AnalyticsEventCreateResponse + alias OpenApiTypesense.AnalyticsStatus setup_all do conn = Connection.new() @@ -77,6 +79,35 @@ defmodule AnalyticsTest do %{conn: conn, map_conn: map_conn} end + @tag ["30.0": true] + test "error (v30.0): create analytics rule with non-existent collection", %{ + conn: conn, + map_conn: map_conn + } do + name = "products_missing_query" + collection_name = "non_existent_collection" + + body = + %{ + "name" => name, + "collection" => "products", + "type" => "popular_queries", + "event_type" => "search", + "params" => %{ + "destination_collection" => collection_name, + "expand_query" => false, + "limit" => 1_000, + "capture_search_requests" => true + } + } + + error = {:error, %ApiResponse{message: "Destination collection does not exist"}} + assert ^error = Analytics.create_analytics_rule(body) + assert ^error = Analytics.create_analytics_rule(body, []) + assert ^error = Analytics.create_analytics_rule(body, conn: conn) + assert ^error = Analytics.create_analytics_rule(body, conn: map_conn) + end + @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: create analytics rule with non-existent collection", %{ conn: conn, @@ -99,12 +130,39 @@ defmodule AnalyticsTest do } } - assert {:error, %ApiResponse{message: _}} = Analytics.create_analytics_rule(body) - assert {:error, %ApiResponse{message: _}} = Analytics.create_analytics_rule(body, []) - assert {:error, %ApiResponse{message: _}} = Analytics.create_analytics_rule(body, conn: conn) + error = {:error, %OpenApiTypesense.ApiResponse{message: "Bad or missing events."}} + assert ^error = Analytics.create_analytics_rule(body) + assert ^error = Analytics.create_analytics_rule(body, []) + assert ^error = Analytics.create_analytics_rule(body, conn: conn) + assert ^error = Analytics.create_analytics_rule(body, conn: map_conn) + end + + @tag ["30.0": true] + test "success (v30.0): upsert analytics rule", %{conn: conn, map_conn: map_conn} do + name = "product_no_hits" + + body = + %{ + "name" => name, + "type" => "nohits_queries", + "collection" => "products", + "event_type" => "search", + "params" => %{ + "destination_collection" => "product_queries", + "expand_query" => false, + "limit" => 1_000, + "capture_search_requests" => true + } + } + + assert {:ok, %AnalyticsRule{name: ^name}} = Analytics.upsert_analytics_rule(name, body) + assert {:ok, %AnalyticsRule{name: ^name}} = Analytics.upsert_analytics_rule(name, body, []) - assert {:error, %ApiResponse{message: _}} = - Analytics.create_analytics_rule(body, conn: map_conn) + assert {:ok, %AnalyticsRule{name: ^name}} = + Analytics.upsert_analytics_rule(name, body, conn: conn) + + assert {:ok, %AnalyticsRule{name: ^name}} = + Analytics.upsert_analytics_rule(name, body, conn: map_conn) end @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] @@ -125,11 +183,8 @@ defmodule AnalyticsTest do } } - assert {:ok, %AnalyticsRule{name: ^name}} = - Analytics.upsert_analytics_rule(name, body) - - assert {:ok, %AnalyticsRule{name: ^name}} = - Analytics.upsert_analytics_rule(name, body, []) + assert {:ok, %AnalyticsRule{name: ^name}} = Analytics.upsert_analytics_rule(name, body) + assert {:ok, %AnalyticsRule{name: ^name}} = Analytics.upsert_analytics_rule(name, body, []) assert {:ok, %AnalyticsRule{name: ^name}} = Analytics.upsert_analytics_rule(name, body, conn: conn) @@ -164,7 +219,7 @@ defmodule AnalyticsTest do assert {:error, %ApiResponse{message: _}} = Analytics.create_analytics_rule(body) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: list analytics rules", %{conn: conn, map_conn: map_conn} do assert {:ok, rules} = Analytics.retrieve_analytics_rules() @@ -173,44 +228,140 @@ defmodule AnalyticsTest do assert {:ok, ^rules} = Analytics.retrieve_analytics_rules(conn: map_conn) end + @tag ["30.0": true] + test "success (v30.0): flush analytics", %{conn: conn, map_conn: map_conn} do + response = {:ok, %AnalyticsEventCreateResponse{ok: true}} + + assert ^response = Analytics.flush_analytics() + assert ^response = Analytics.flush_analytics([]) + assert ^response = Analytics.flush_analytics(conn: conn) + assert ^response = Analytics.flush_analytics(map_conn: map_conn) + end + @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": false] test "error: flush analytics", %{conn: conn, map_conn: map_conn} do - reason = %ApiResponse{message: "Not Found"} + reason = {:error, %ApiResponse{message: "Not Found"}} - assert {:error, ^reason} = Analytics.flush_analytics() - assert {:error, ^reason} = Analytics.flush_analytics([]) - assert {:error, ^reason} = Analytics.flush_analytics(conn: conn) - assert {:error, ^reason} = Analytics.flush_analytics(map_conn: map_conn) + assert ^reason = Analytics.flush_analytics() + assert ^reason = Analytics.flush_analytics([]) + assert ^reason = Analytics.flush_analytics(conn: conn) + assert ^reason = Analytics.flush_analytics(map_conn: map_conn) + end + + @tag ["30.0": true] + test "error (v30.0): get analytics events", %{conn: conn, map_conn: map_conn} do + reason = {:error, %ApiResponse{message: "Rule not found"}} + + opts = [user_id: "9903", name: "product_popularity", n: 1] + assert ^reason = Analytics.get_analytics_events(opts) + assert ^reason = Analytics.get_analytics_events(opts ++ [conn: conn]) + assert ^reason = Analytics.get_analytics_events(opts ++ [map_conn: map_conn]) end @tag ["29.0": true, "28.0": true, "27.1": false, "27.0": false, "26.0": false] - test "success: (v28.0+) get analytics events", %{conn: conn, map_conn: map_conn} do - events_response = %AnalyticsEventsResponse{events: []} + test "success (v28.0) : get analytics events", %{conn: conn, map_conn: map_conn} do + events_response = {:ok, %AnalyticsEventsResponse{events: []}} - assert {:ok, ^events_response} = Analytics.get_analytics_events() - assert {:ok, ^events_response} = Analytics.get_analytics_events([]) - assert {:ok, ^events_response} = Analytics.get_analytics_events(conn: conn) - assert {:ok, ^events_response} = Analytics.get_analytics_events(map_conn: map_conn) + assert ^events_response = Analytics.get_analytics_events() + assert ^events_response = Analytics.get_analytics_events([]) + assert ^events_response = Analytics.get_analytics_events(conn: conn) + assert ^events_response = Analytics.get_analytics_events(map_conn: map_conn) end @tag ["29.0": false, "28.0": false, "27.1": true, "27.0": true, "26.0": false] - test "success: get analytics events", %{conn: conn, map_conn: map_conn} do - reason = %ApiResponse{message: "Not Found"} + test "error: get analytics events", %{conn: conn, map_conn: map_conn} do + reason = {:error, %ApiResponse{message: "Not Found"}} + + assert ^reason = Analytics.get_analytics_events() + assert ^reason = Analytics.get_analytics_events([]) + assert ^reason = Analytics.get_analytics_events(conn: conn) + assert ^reason = Analytics.get_analytics_events(map_conn: map_conn) + end - assert {:error, ^reason} = Analytics.get_analytics_events() - assert {:error, ^reason} = Analytics.get_analytics_events([]) - assert {:error, ^reason} = Analytics.get_analytics_events(conn: conn) - assert {:error, ^reason} = Analytics.get_analytics_events(map_conn: map_conn) + @tag ["30.0": true] + test "error (v30.0): get analytics status", %{conn: conn, map_conn: map_conn} do + status = %AnalyticsStatus{ + doc_counter_events: 0, + doc_log_events: 0, + log_prefix_queries: 0, + nohits_prefix_queries: 0, + popular_prefix_queries: 0, + query_counter_events: 0, + query_log_events: 0 + } + + assert {:ok, ^status} = Analytics.get_analytics_status() + assert {:ok, ^status} = Analytics.get_analytics_status([]) + assert {:ok, ^status} = Analytics.get_analytics_status(conn: conn) + assert {:ok, ^status} = Analytics.get_analytics_status(map_conn: map_conn) end @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": false] test "error: get analytics status", %{conn: conn, map_conn: map_conn} do - reason = %ApiResponse{message: "Not Found"} + reason = {:error, %ApiResponse{message: "Not Found"}} - assert {:error, ^reason} = Analytics.get_analytics_status() - assert {:error, ^reason} = Analytics.get_analytics_status([]) - assert {:error, ^reason} = Analytics.get_analytics_status(conn: conn) - assert {:error, ^reason} = Analytics.get_analytics_status(map_conn: map_conn) + assert ^reason = Analytics.get_analytics_status() + assert ^reason = Analytics.get_analytics_status([]) + assert ^reason = Analytics.get_analytics_status(conn: conn) + assert ^reason = Analytics.get_analytics_status(map_conn: map_conn) + end + + @tag ["30.0": true] + test "success (v30.0): send click events", %{conn: conn, map_conn: map_conn} do + name = "products_clicks" + + body = + %{ + "name" => name, + "type" => "counter", + "collection" => "products", + "event_type" => "click", + "params" => %{ + "destination_collection" => "products", + "counter_field" => "popularity", + "weight" => 1 + } + } + + assert {:ok, %AnalyticsRule{name: ^name}} = Analytics.create_analytics_rule(body) + assert {:ok, %AnalyticsRule{name: ^name}} = Analytics.retrieve_analytics_rule(name) + assert {:ok, %AnalyticsRule{name: ^name}} = Analytics.retrieve_analytics_rule(name, []) + + assert {:ok, %AnalyticsRule{name: ^name}} = + Analytics.retrieve_analytics_rule(name, conn: conn) + + assert {:ok, %AnalyticsRule{name: ^name}} = Analytics.retrieve_analytics_rule(name, []) + + assert {:ok, %AnalyticsRule{name: ^name}} = + Analytics.retrieve_analytics_rule(name, conn: map_conn) + + body = + %{ + "name" => name, + "event_type" => "click", + "data" => %{ + "doc_id" => "1024", + "user_id" => "111112" + } + } + + # Here's the reason why v26.0 is not tested + # Docs v26.0: https://typesense.org/docs/26.0/api/analytics-query-suggestions.html#sending-click-events + # Problem: the response JSON body is actually {"ok": true + # where it is missing a closing curly bracket "}" + response = {:ok, %AnalyticsEventCreateResponse{ok: true}} + assert ^response = Analytics.create_analytics_event(body) + assert ^response = Analytics.create_analytics_event(body, []) + assert ^response = Analytics.create_analytics_event(body, conn: conn) + assert ^response = Analytics.create_analytics_event(body, conn: map_conn) + + assert {:ok, %AnalyticsRule{name: ^name}} = + Analytics.delete_analytics_rule(name) + + error = {:error, %ApiResponse{message: "Rule not found"}} + assert ^error = Analytics.delete_analytics_rule(name, []) + assert ^error = Analytics.delete_analytics_rule(name, conn: conn) + assert ^error = Analytics.delete_analytics_rule(name, conn: map_conn) end @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": false] @@ -244,8 +395,7 @@ defmodule AnalyticsTest do assert {:ok, %AnalyticsRule{name: ^name}} = Analytics.retrieve_analytics_rule(name, conn: conn) - assert {:ok, %AnalyticsRule{name: ^name}} = - Analytics.retrieve_analytics_rule(name, []) + assert {:ok, %AnalyticsRule{name: ^name}} = Analytics.retrieve_analytics_rule(name, []) assert {:ok, %AnalyticsRule{name: ^name}} = Analytics.retrieve_analytics_rule(name, conn: map_conn) @@ -265,29 +415,18 @@ defmodule AnalyticsTest do # Docs v26.0: https://typesense.org/docs/26.0/api/analytics-query-suggestions.html#sending-click-events # Problem: the response JSON body is actually {"ok": true # where it is missing a closing curly bracket "}" - assert {:ok, %AnalyticsEventCreateResponse{ok: true}} = - Analytics.create_analytics_event(body) - - assert {:ok, %AnalyticsEventCreateResponse{ok: true}} = - Analytics.create_analytics_event(body, []) - - assert {:ok, %AnalyticsEventCreateResponse{ok: true}} = - Analytics.create_analytics_event(body, conn: conn) - - assert {:ok, %AnalyticsEventCreateResponse{ok: true}} = - Analytics.create_analytics_event(body, conn: map_conn) + response = {:ok, %AnalyticsEventCreateResponse{ok: true}} + assert ^response = Analytics.create_analytics_event(body) + assert ^response = Analytics.create_analytics_event(body, []) + assert ^response = Analytics.create_analytics_event(body, conn: conn) + assert ^response = Analytics.create_analytics_event(body, conn: map_conn) assert {:ok, %AnalyticsRule{name: ^name}} = Analytics.delete_analytics_rule(name) - reason = "Rule not found." - - assert {:error, %ApiResponse{message: ^reason}} = Analytics.delete_analytics_rule(name, []) - - assert {:error, %ApiResponse{message: ^reason}} = - Analytics.delete_analytics_rule(name, conn: conn) - - assert {:error, %ApiResponse{message: ^reason}} = - Analytics.delete_analytics_rule(name, conn: map_conn) + error = {:error, %ApiResponse{message: "Rule not found."}} + assert ^error = Analytics.delete_analytics_rule(name, []) + assert ^error = Analytics.delete_analytics_rule(name, conn: conn) + assert ^error = Analytics.delete_analytics_rule(name, conn: map_conn) end end diff --git a/test/operations/collections_test.exs b/test/operations/collections_test.exs index 2e036a7..bedeaf7 100644 --- a/test/operations/collections_test.exs +++ b/test/operations/collections_test.exs @@ -32,7 +32,7 @@ defmodule CollectionsTest do %{schema: schema, alias_name: "foo_bar", conn: conn, map_conn: map_conn} end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: clone a collection schema" do schema = %{ "name" => "vehicles", @@ -56,7 +56,7 @@ defmodule CollectionsTest do assert {:ok, _} = Collections.delete_collection(payload["name"]) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: create a collection", %{schema: schema, conn: conn, map_conn: map_conn} do name = schema["name"] @@ -72,7 +72,7 @@ defmodule CollectionsTest do Collections.get_collections() end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: list collections", %{conn: conn, map_conn: map_conn} do assert {:ok, collections} = Collections.get_collections() @@ -86,7 +86,7 @@ defmodule CollectionsTest do assert {:ok, _} = Collections.get_collections(conn: map_conn, limit: 1) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: update an existing collection", %{conn: conn, map_conn: map_conn} do name = "burgers" @@ -119,7 +119,7 @@ defmodule CollectionsTest do Collections.delete_collection(name) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: list empty aliases", %{conn: conn, map_conn: map_conn} do assert {:ok, %CollectionAliasesResponse{aliases: aliases}} = Collections.get_aliases() @@ -132,22 +132,17 @@ defmodule CollectionsTest do Collections.get_aliases(conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: delete a missing collection", %{conn: conn, map_conn: map_conn} do - assert Collections.delete_collection("non-existing-collection") == - {:error, - %ApiResponse{ - message: "No collection with name `non-existing-collection` found." - }} + error = {:error, %ApiResponse{message: "No collection with name `xyz` found."}} - assert {:error, %ApiResponse{message: _}} = Collections.delete_collection("xyz", []) - assert {:error, %ApiResponse{message: _}} = Collections.delete_collection("xyz", conn: conn) - - assert {:error, %ApiResponse{message: _}} = - Collections.delete_collection("xyz", conn: map_conn) + assert ^error = Collections.delete_collection("xyz") + assert ^error = Collections.delete_collection("xyz", []) + assert ^error = Collections.delete_collection("xyz", conn: conn) + assert ^error = Collections.delete_collection("xyz", conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: upsert an alias", %{ schema: schema, alias_name: alias_name, @@ -183,8 +178,8 @@ defmodule CollectionsTest do assert {:error, %ApiResponse{message: _}} = Collections.get_alias("xyz", conn: map_conn) end - @tag ["29.0": true] - test "error: get a non-existing alias (> v28.0)", %{conn: conn, map_conn: map_conn} do + @tag ["30.0": true, "29.0": true] + test "error (v28.0): get a non-existing alias", %{conn: conn, map_conn: map_conn} do assert Collections.get_alias("non-existing-alias") == {:error, %ApiResponse{message: "Collection not found"}} @@ -204,8 +199,8 @@ defmodule CollectionsTest do assert {:error, %ApiResponse{message: _}} = Collections.get_collection("xyz", conn: map_conn) end - @tag ["29.0": true] - test "error: get a non-existing collection (> v28.0)", %{conn: conn, map_conn: map_conn} do + @tag ["30.0": true, "29.0": true] + test "error (v28.0): get a non-existing collection", %{conn: conn, map_conn: map_conn} do assert Collections.get_collection("non-existing-collection") == {:error, %ApiResponse{message: "Collection not found"}} diff --git a/test/operations/conversations_test.exs b/test/operations/conversations_test.exs index 60a5bf3..9171890 100644 --- a/test/operations/conversations_test.exs +++ b/test/operations/conversations_test.exs @@ -34,7 +34,7 @@ defmodule ConversationsTest do %{conn: conn, map_conn: map_conn} end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: list conversation models", %{conn: conn, map_conn: map_conn} do assert {:ok, models} = Conversations.retrieve_all_conversation_models() @@ -43,7 +43,7 @@ defmodule ConversationsTest do assert {:ok, ^models} = Conversations.retrieve_all_conversation_models(conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: get a non-existent conversation model", %{conn: conn, map_conn: map_conn} do assert {:error, %ApiResponse{message: "Model not found"}} = Conversations.retrieve_conversation_model("non-existent") @@ -53,7 +53,7 @@ defmodule ConversationsTest do assert {:error, _} = Conversations.retrieve_conversation_model("xyz", conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: delete a conversation model", %{conn: conn, map_conn: map_conn} do assert {:error, %ApiResponse{message: "Model not found"}} = Conversations.delete_conversation_model("non-existent") @@ -63,7 +63,7 @@ defmodule ConversationsTest do assert {:error, _} = Conversations.delete_conversation_model("xyz", conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: create a conversation model with incorrect API key", %{ conn: conn, map_conn: map_conn @@ -95,7 +95,7 @@ defmodule ConversationsTest do assert {:error, _} = Conversations.create_conversation_model(body, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: update a conversation model with incorrect API key", %{ conn: conn, map_conn: map_conn diff --git a/test/operations/curation_sets_test.exs b/test/operations/curation_sets_test.exs new file mode 100644 index 0000000..ba03aa8 --- /dev/null +++ b/test/operations/curation_sets_test.exs @@ -0,0 +1,387 @@ +defmodule OpenApiTypesense.CurationSetsTest do + use ExUnit.Case, async: true + + alias OpenApiTypesense.ApiResponse + alias OpenApiTypesense.Connection + alias OpenApiTypesense.CurationItemDeleteSchema + alias OpenApiTypesense.CurationItemSchema + alias OpenApiTypesense.CurationSetDeleteSchema + alias OpenApiTypesense.CurationSets + alias OpenApiTypesense.CurationSetSchema + + setup_all do + conn = Connection.new() + map_conn = %{api_key: "xyz", host: "localhost", port: 8108, scheme: "http"} + + on_exit(fn -> + curation_sets = + case CurationSets.retrieve_curation_sets() do + {:ok, sets} -> + sets + + {:error, _reason} -> + [] + end + + if Enum.any?(curation_sets) do + Enum.each(curation_sets, fn set -> + {:ok, _set} = CurationSets.delete_curation_set(set.name) + end) + end + end) + + %{conn: conn, map_conn: map_conn} + end + + @tag ["30.0": true] + test "success: retrieve a curation set", %{conn: conn, map_conn: map_conn} do + name = "curate_catalog" + + curation_set = %{ + "items" => [ + %{ + "id" => "customize-magazine", + "rule" => %{ + "query" => "Fixie weekly", + "match" => "exact" + }, + "includes" => [ + %{"id" => "602", "position" => 1}, + %{"id" => "12", "position" => 2} + ], + "excludes" => [ + %{"id" => "999"} + ] + } + ] + } + + assert {:ok, %CurationSetSchema{name: ^name}} = + CurationSets.upsert_curation_set(name, curation_set) + + assert {:ok, %CurationSetSchema{name: ^name}} = CurationSets.retrieve_curation_set(name) + assert {:ok, %CurationSetSchema{name: ^name}} = CurationSets.retrieve_curation_set(name, []) + + assert {:ok, %CurationSetSchema{name: ^name}} = + CurationSets.retrieve_curation_set(name, conn: conn) + + assert {:ok, %CurationSetSchema{name: ^name}} = + CurationSets.retrieve_curation_set(name, map_conn: map_conn) + end + + @tag ["30.0": true] + test "error: retrieve a non-existing curation set", %{conn: conn, map_conn: map_conn} do + set_name = "unkown-set" + error = {:error, %ApiResponse{message: "Curation index not found"}} + assert ^error = CurationSets.retrieve_curation_set(set_name) + assert ^error = CurationSets.retrieve_curation_set(set_name, []) + assert ^error = CurationSets.retrieve_curation_set(set_name, conn: conn) + assert ^error = CurationSets.retrieve_curation_set(set_name, map_conn: map_conn) + end + + @tag ["30.0": true] + test "error: retrieve a non-existing curation set item", %{conn: conn, map_conn: map_conn} do + name = "curate_products_error" + + curation_set = %{ + "items" => [ + %{ + "id" => "customize-apple", + "rule" => %{ + "query" => "apple", + "match" => "exact" + }, + "includes" => [ + %{"id" => "422", "position" => 1}, + %{"id" => "54", "position" => 2} + ], + "excludes" => [ + %{"id" => "287"} + ] + } + ] + } + + assert {:ok, %CurationSetSchema{name: ^name}} = + CurationSets.upsert_curation_set(name, curation_set) + + item_id = "unkown-item" + error = {:error, %ApiResponse{message: "Not Found"}} + assert ^error = CurationSets.retrieve_curation_set_item(name, item_id) + assert ^error = CurationSets.retrieve_curation_set_item(name, item_id, []) + assert ^error = CurationSets.retrieve_curation_set_item(name, item_id, conn: conn) + assert ^error = CurationSets.retrieve_curation_set_item(name, item_id, map_conn: map_conn) + end + + @tag ["30.0": true] + test "error: retrieving an item_id with a whitespace on its name", %{ + conn: conn, + map_conn: map_conn + } do + set_name = "curate_products" + item_id = "white space" + + error = + {:error, ~s(invalid request target: "/curation_sets/curate_products/items/white space")} + + assert ^error = CurationSets.retrieve_curation_set_item(set_name, item_id) + assert ^error = CurationSets.retrieve_curation_set_item(set_name, item_id, []) + assert ^error = CurationSets.retrieve_curation_set_item(set_name, item_id, conn: conn) + assert ^error = CurationSets.retrieve_curation_set_item(set_name, item_id, map_conn: map_conn) + end + + @tag ["30.0": true] + test "success: list all curation sets", %{conn: conn, map_conn: map_conn} do + name = "curate_products" + + curation_set = %{ + "items" => [ + %{ + "id" => "customize-apple", + "rule" => %{ + "query" => "apple", + "match" => "exact" + }, + "includes" => [ + %{"id" => "422", "position" => 1}, + %{"id" => "54", "position" => 2} + ], + "excludes" => [ + %{"id" => "287"} + ] + } + ] + } + + assert {:ok, %CurationSetSchema{name: ^name}} = + CurationSets.upsert_curation_set(name, curation_set) + + assert {:ok, curation_sets} = CurationSets.retrieve_curation_sets() + assert Enum.any?(curation_sets) + assert {:ok, _} = CurationSets.retrieve_curation_sets([]) + assert {:ok, _} = CurationSets.retrieve_curation_sets(conn: conn) + assert {:ok, _} = CurationSets.retrieve_curation_sets(map_conn: map_conn) + end + + @tag ["30.0": true] + test "success: list all curation set items", %{conn: conn, map_conn: map_conn} do + set_name = "curate_products" + + curation_set = %{ + "items" => [ + %{ + "id" => "customize-apple", + "rule" => %{ + "query" => "apple", + "match" => "exact" + }, + "includes" => [ + %{"id" => "422", "position" => 1}, + %{"id" => "54", "position" => 2} + ], + "excludes" => [ + %{"id" => "287"} + ] + } + ] + } + + assert {:ok, %CurationSetSchema{name: ^set_name}} = + CurationSets.upsert_curation_set(set_name, curation_set) + + item_id = "dynamic-sort-filter-demo" + + body = %{ + "rule" => %{ + "filter_by" => "store:={store}", + "query" => "apple", + "match" => "exact" + }, + "remove_matched_tokens" => true, + "sort_by" => "sales.{store}:desc, inventory.{store}:desc" + } + + assert {:ok, %CurationItemSchema{id: ^item_id}} = + CurationSets.upsert_curation_set_item(set_name, item_id, body) + + assert {:ok, set_items} = CurationSets.retrieve_curation_set_items(set_name) + assert Enum.any?(set_items) + assert {:ok, _set_items} = CurationSets.retrieve_curation_set_items(set_name, []) + assert {:ok, _set_items} = CurationSets.retrieve_curation_set_items(set_name, conn: conn) + + assert {:ok, _set_items} = + CurationSets.retrieve_curation_set_items(set_name, map_conn: map_conn) + end + + @tag ["30.0": true] + test "success: create or update a curation set", %{conn: conn, map_conn: map_conn} do + name = "curate_products" + + curation_set = %{ + "items" => [ + %{ + "id" => "customize-apple", + "rule" => %{ + "query" => "apple", + "match" => "exact" + }, + "includes" => [ + %{"id" => "422", "position" => 1}, + %{"id" => "54", "position" => 2} + ], + "excludes" => [ + %{"id" => "287"} + ] + } + ] + } + + assert {:ok, %CurationSetSchema{name: ^name}} = + CurationSets.upsert_curation_set(name, curation_set) + + assert {:ok, %CurationSetSchema{name: ^name}} = + CurationSets.upsert_curation_set(name, curation_set, []) + + assert {:ok, %CurationSetSchema{name: ^name}} = + CurationSets.upsert_curation_set(name, curation_set, conn: conn) + + assert {:ok, %CurationSetSchema{name: ^name}} = + CurationSets.upsert_curation_set(name, curation_set, map_conn: map_conn) + end + + @tag ["30.0": true] + test "success: upsert a curation set item", %{conn: conn, map_conn: map_conn} do + set_name = "curate_products" + + curation_set = %{ + "items" => [ + %{ + "id" => "customize-apple", + "rule" => %{ + "query" => "apple", + "match" => "exact" + }, + "includes" => [ + %{"id" => "422", "position" => 1}, + %{"id" => "54", "position" => 2} + ], + "excludes" => [ + %{"id" => "287"} + ] + } + ] + } + + assert {:ok, %CurationSetSchema{name: ^set_name}} = + CurationSets.upsert_curation_set(set_name, curation_set) + + item_id = "dynamic-sort-filter" + + body = %{ + "rule" => %{ + "filter_by" => "store:={store}", + "query" => "apple", + "match" => "exact" + }, + "remove_matched_tokens" => true, + "sort_by" => "sales.{store}:desc, inventory.{store}:desc" + } + + assert {:ok, %CurationItemSchema{id: ^item_id}} = + CurationSets.upsert_curation_set_item(set_name, item_id, body) + + assert {:ok, %CurationItemSchema{id: ^item_id}} = + CurationSets.upsert_curation_set_item(set_name, item_id, body, []) + + assert {:ok, %CurationItemSchema{id: ^item_id}} = + CurationSets.upsert_curation_set_item(set_name, item_id, body, conn: conn) + + assert {:ok, %CurationItemSchema{id: ^item_id}} = + CurationSets.upsert_curation_set_item(set_name, item_id, body, map_conn: map_conn) + end + + @tag ["30.0": true] + test "success: delete a curation set", %{conn: conn, map_conn: map_conn} do + name = "curate_products_delete" + + curation_set = %{ + "items" => [ + %{ + "id" => "customize-apple", + "rule" => %{ + "query" => "apple", + "match" => "exact" + }, + "includes" => [ + %{"id" => "422", "position" => 1}, + %{"id" => "54", "position" => 2} + ], + "excludes" => [ + %{"id" => "287"} + ] + } + ] + } + + assert {:ok, %CurationSetSchema{name: ^name}} = + CurationSets.upsert_curation_set(name, curation_set) + + assert {:ok, %CurationSetDeleteSchema{name: ^name}} = CurationSets.delete_curation_set(name) + + error = {:error, %ApiResponse{message: "Curation index not found"}} + assert ^error = CurationSets.delete_curation_set(name, []) + assert ^error = CurationSets.delete_curation_set(name, conn: conn) + assert ^error = CurationSets.delete_curation_set(name, map_conn: map_conn) + end + + @tag ["30.0": true] + test "success: delete a curation set item", %{conn: conn, map_conn: map_conn} do + name = "curate_products_item" + + curation_set = %{ + "items" => [ + %{ + "id" => "customize-apple", + "rule" => %{ + "query" => "apple", + "match" => "exact" + }, + "includes" => [ + %{"id" => "422", "position" => 1}, + %{"id" => "54", "position" => 2} + ], + "excludes" => [ + %{"id" => "287"} + ] + } + ] + } + + assert {:ok, %CurationSetSchema{name: ^name}} = + CurationSets.upsert_curation_set(name, curation_set) + + item_id = "dynamic-sort-filter-item" + + body = %{ + "rule" => %{ + "filter_by" => "store:={store}", + "query" => "apple", + "match" => "exact" + }, + "remove_matched_tokens" => true, + "sort_by" => "sales.{store}:desc, inventory.{store}:desc" + } + + assert {:ok, %CurationItemSchema{id: ^item_id}} = + CurationSets.upsert_curation_set_item(name, item_id, body) + + assert {:ok, %CurationItemDeleteSchema{id: ^item_id}} = + CurationSets.delete_curation_set_item(name, item_id) + + error = {:error, %ApiResponse{message: "Could not find that `id`."}} + + assert ^error = CurationSets.delete_curation_set_item(name, item_id, []) + assert ^error = CurationSets.delete_curation_set_item(name, item_id, conn: conn) + assert ^error = CurationSets.delete_curation_set_item(name, item_id, map_conn: map_conn) + end +end diff --git a/test/operations/curation_test.exs b/test/operations/curation_test.exs index 2fd4d59..029ab6e 100644 --- a/test/operations/curation_test.exs +++ b/test/operations/curation_test.exs @@ -34,6 +34,31 @@ defmodule CurationTest do %{schema_name: name, conn: conn, map_conn: map_conn} end + @tag ["30.0": true] + test "error (v30.0): deprecated function for upsert search override", %{ + schema_name: schema_name + } do + override_id = "customize-loca-cola" + + body = + %{ + "rule" => %{ + "query" => "Loca Cola", + "match" => "exact" + }, + "includes" => [ + %{"id" => "422", "position" => 1}, + %{"id" => "54", "position" => 2} + ], + "excludes" => [ + %{"id" => "287"} + ] + } + + error = {:error, %ApiResponse{message: "Not Found"}} + assert ^error = Curation.upsert_search_override(schema_name, override_id, body) + end + @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: upsert search override", %{ schema_name: schema_name, @@ -67,6 +92,16 @@ defmodule CurationTest do Curation.upsert_search_override(schema_name, override_id, body, conn: map_conn) end + @tag ["30.0": true] + test "error (v30.0): deprecated function for delete search override", %{ + schema_name: schema_name + } do + message = "Not Found" + + assert {:error, %ApiResponse{message: ^message}} = + Curation.delete_search_override(schema_name, "test") + end + @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: delete search override", %{ schema_name: schema_name, @@ -83,6 +118,14 @@ defmodule CurationTest do assert {:error, _} = Curation.delete_search_override(schema_name, "test", conn: map_conn) end + @tag ["30.0": true] + test "error (v30.0): deprecated function for list collection overrides", %{ + schema_name: schema_name + } do + error = {:error, %ApiResponse{message: "Not Found"}} + assert ^error = Curation.get_search_overrides(schema_name) + end + @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: list collection overrides", %{ schema_name: schema_name, diff --git a/test/operations/debug_test.exs b/test/operations/debug_test.exs index 78d316d..03b8fbd 100644 --- a/test/operations/debug_test.exs +++ b/test/operations/debug_test.exs @@ -12,7 +12,7 @@ defmodule DebugTest do %{conn: conn, map_conn: map_conn} end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: list stopwords sets", %{conn: conn, map_conn: map_conn} do assert {:ok, %{"state" => 1, "version" => _}} = Debug.debug() assert {:ok, %{"state" => 1, "version" => _}} = Debug.debug([]) @@ -20,7 +20,7 @@ defmodule DebugTest do assert {:ok, %{"state" => 1, "version" => _}} = Debug.debug(conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "field" do assert [version: :string] = Debug.__fields__(:debug_200_json_resp) end diff --git a/test/operations/documents_test.exs b/test/operations/documents_test.exs index 2d2df03..9e7f58e 100644 --- a/test/operations/documents_test.exs +++ b/test/operations/documents_test.exs @@ -38,7 +38,7 @@ defmodule DocumentsTest do %{coll_name: name, conn: conn, map_conn: map_conn} end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: update a document", %{coll_name: coll_name} do body = %{ "shoes_id" => 12_299, @@ -57,7 +57,7 @@ defmodule DocumentsTest do Documents.update_document(coll_name, document_id, body) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: update a non-existent document", %{ coll_name: coll_name, conn: conn, @@ -81,7 +81,7 @@ defmodule DocumentsTest do assert {:error, _} = Documents.update_document(coll_name, document_id, body, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: search a document", %{coll_name: coll_name, conn: conn, map_conn: map_conn} do body = [ @@ -123,7 +123,7 @@ defmodule DocumentsTest do assert {:ok, _} = Documents.search_collection(coll_name, List.flatten([conn: map_conn], opts)) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: update non-existent documents", %{ coll_name: coll_name, conn: conn, @@ -145,7 +145,7 @@ defmodule DocumentsTest do assert {:ok, _} = Documents.import_documents(coll_name, body, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: multi-search with no documents", %{conn: conn, map_conn: map_conn} do body = %{ @@ -170,7 +170,7 @@ defmodule DocumentsTest do assert {:ok, _} = Documents.multi_search(body, List.flatten([conn: map_conn], params)) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: update documents by query", %{ coll_name: coll_name, conn: conn, @@ -229,7 +229,7 @@ defmodule DocumentsTest do ) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: importing empty documents", %{coll_name: coll_name} do assert {:ok, ""} = Documents.import_documents(coll_name, []) @@ -240,7 +240,7 @@ defmodule DocumentsTest do Documents.import_documents(coll_name, [%{}]) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: import documents where payload is JSONL", %{coll_name: coll_name} do body = [ @@ -276,7 +276,7 @@ defmodule DocumentsTest do assert {:ok, _} = Documents.import_documents(coll_name, body) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: import documents", %{coll_name: coll_name} do body = [ @@ -311,7 +311,7 @@ defmodule DocumentsTest do assert {:ok, _} = Documents.import_documents(coll_name, body, action: "create") end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: index a document", %{coll_name: coll_name, conn: conn, map_conn: map_conn} do shoes_id = 220 @@ -335,6 +335,12 @@ defmodule DocumentsTest do assert {:ok, _} = Documents.index_document(coll_name, body, conn: map_conn) end + @tag ["30.0": true] + test "error (v30.0): deprecated function for list collection overrides" do + assert {:error, %ApiResponse{message: "Not Found"}} = + Documents.get_search_overrides("wrong_collection") + end + @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: list collection overrides", %{ coll_name: coll_name, @@ -354,7 +360,7 @@ defmodule DocumentsTest do assert {:error, _} = Documents.get_search_overrides("xyz", conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: get a non-existent override", %{ coll_name: coll_name, conn: conn, @@ -368,6 +374,14 @@ defmodule DocumentsTest do assert {:error, _} = Documents.get_search_override(coll_name, "xyz", conn: map_conn) end + @tag ["30.0": true] + test "error (v30.0): deprecated function for delete a non-existent override", %{ + coll_name: coll_name + } do + assert {:error, %ApiResponse{message: "Not Found"}} = + Documents.delete_search_override(coll_name, "non-existent") + end + @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: delete a non-existent override", %{ coll_name: coll_name, @@ -382,7 +396,7 @@ defmodule DocumentsTest do assert {:error, _} = Documents.delete_search_override(coll_name, "xyz", conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: delete a document", %{coll_name: coll_name, conn: conn, map_conn: map_conn} do shoes_id = 420 @@ -414,7 +428,7 @@ defmodule DocumentsTest do assert {:error, _} = Documents.delete_document(coll_name, id, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: delete all documents", %{coll_name: coll_name, conn: conn, map_conn: map_conn} do body = [ @@ -463,7 +477,7 @@ defmodule DocumentsTest do Documents.delete_documents(coll_name, List.flatten([conn: map_conn], opts)) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: get a non-existent document", %{ coll_name: coll_name, conn: conn, @@ -480,7 +494,7 @@ defmodule DocumentsTest do assert {:error, _} = Documents.get_document(coll_name, document_id, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: export document from a non-existent collection", %{conn: conn, map_conn: map_conn} do opts = [exclude_fields: "fields"] @@ -494,6 +508,27 @@ defmodule DocumentsTest do assert {:error, _} = Documents.export_documents("xyz", List.flatten([conn: map_conn], opts)) end + @tag ["30.0": true] + test "error (v30.0): deprecated function for upsert a search override", %{coll_name: coll_name} do + body = + %{ + "rule" => %{ + "query" => "apple", + "match" => "exact" + }, + "includes" => [ + %{"id" => "422", "position" => 1}, + %{"id" => "54", "position" => 2} + ], + "excludes" => [ + %{"id" => "287"} + ] + } + + assert {:error, %ApiResponse{message: "Not Found"}} = + Documents.upsert_search_override(coll_name, "customize-apple", body) + end + @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: upsert a search override", %{ coll_name: coll_name, @@ -527,7 +562,7 @@ defmodule DocumentsTest do Documents.upsert_search_override(coll_name, "customize-apple", body, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "field" do assert [num_deleted: :integer] = Documents.__fields__(:delete_documents_200_json_resp) assert [num_updated: :integer] = Documents.__fields__(:update_documents_200_json_resp) diff --git a/test/operations/health_test.exs b/test/operations/health_test.exs index 7f7c04d..9b388ba 100644 --- a/test/operations/health_test.exs +++ b/test/operations/health_test.exs @@ -12,7 +12,7 @@ defmodule HealthTest do %{conn: conn, map_conn: map_conn} end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: health check", %{conn: conn, map_conn: map_conn} do assert {:ok, %HealthStatus{ok: true}} = Health.health() assert {:ok, %HealthStatus{ok: true}} = Health.health([]) @@ -20,7 +20,7 @@ defmodule HealthTest do assert {:ok, %HealthStatus{ok: true}} = Health.health(conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: health check error" do conn = Connection.new(%{ @@ -38,7 +38,7 @@ defmodule HealthTest do ]) === true end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: health check connection refused" do conn = Connection.new(%{ @@ -51,7 +51,7 @@ defmodule HealthTest do assert {:error, "connection refused"} = Health.health(conn: conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: health check non-existing domain" do conn = %{ api_key: "wrong_key", diff --git a/test/operations/keys_test.exs b/test/operations/keys_test.exs index ca9db2a..376425b 100644 --- a/test/operations/keys_test.exs +++ b/test/operations/keys_test.exs @@ -29,7 +29,7 @@ defmodule KeysTest do %{api_key_schema: api_key_schema, conn: conn, map_conn: map_conn} end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: get a specific key", %{ api_key_schema: api_key_schema, conn: conn, @@ -45,7 +45,7 @@ defmodule KeysTest do assert {:ok, %ApiKey{id: ^key_id}} = Keys.get_key(key_id, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: list API keys", %{conn: conn, map_conn: map_conn} do assert {:ok, %ApiKeysResponse{keys: keys}} = Keys.get_keys() @@ -54,7 +54,7 @@ defmodule KeysTest do assert {:ok, %ApiKeysResponse{keys: ^keys}} = Keys.get_keys(conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: delete an API key", %{ api_key_schema: api_key_schema, conn: conn, @@ -70,7 +70,7 @@ defmodule KeysTest do assert {:error, _} = Keys.delete_key(key_id, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: create an search-only API key", %{ api_key_schema: api_key_schema, conn: conn, @@ -82,7 +82,7 @@ defmodule KeysTest do assert {:ok, %ApiKey{}} = Keys.create_key(api_key_schema, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: create an admin API key", %{api_key_schema: api_key_schema} do body = api_key_schema diff --git a/test/operations/operations_test.exs b/test/operations/operations_test.exs index 4b0154d..25c5523 100644 --- a/test/operations/operations_test.exs +++ b/test/operations/operations_test.exs @@ -16,7 +16,7 @@ defmodule OperationsTest do %{conn: conn, map_conn: map_conn} end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: retrieve api stats", %{conn: conn, map_conn: map_conn} do assert {:ok, %APIStatsResponse{}} = Operations.retrieve_api_stats() assert {:ok, %APIStatsResponse{}} = Operations.retrieve_api_stats([]) @@ -24,7 +24,7 @@ defmodule OperationsTest do assert {:ok, %APIStatsResponse{}} = Operations.retrieve_api_stats(conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: retrieve metrics", %{conn: conn, map_conn: map_conn} do assert {:ok, %{system_cpu_active_percentage: _}} = Operations.retrieve_metrics() assert {:ok, %{system_cpu_active_percentage: _}} = Operations.retrieve_metrics([]) @@ -32,7 +32,7 @@ defmodule OperationsTest do assert {:ok, %{system_cpu_active_percentage: _}} = Operations.retrieve_metrics(conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: toggle threshold time for request log", %{conn: conn, map_conn: map_conn} do assert {:ok, %SuccessStatus{success: true}} = Operations.toggle_slow_request_log(%{"log-slow-requests-time-ms" => 2_000}) @@ -47,7 +47,7 @@ defmodule OperationsTest do Operations.toggle_slow_request_log(body, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: clear cache", %{conn: conn, map_conn: map_conn} do assert {:ok, %SuccessStatus{success: true}} = Operations.clear_cache() assert {:ok, %SuccessStatus{success: true}} = Operations.clear_cache([]) @@ -55,7 +55,7 @@ defmodule OperationsTest do assert {:ok, %SuccessStatus{success: true}} = Operations.clear_cache(conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: compact database", %{conn: conn, map_conn: map_conn} do assert {:ok, %SuccessStatus{success: true}} = Operations.compact_db() assert {:ok, %SuccessStatus{success: true}} = Operations.compact_db([]) @@ -63,7 +63,7 @@ defmodule OperationsTest do assert {:ok, %SuccessStatus{success: true}} = Operations.compact_db(conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: take snapshot", %{conn: conn, map_conn: map_conn} do # we have to add sleep timer for github actions # otherwise it will return like: @@ -87,7 +87,7 @@ defmodule OperationsTest do Operations.take_snapshot(List.flatten([conn: map_conn], params)) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: re-elect leader", %{conn: conn, map_conn: map_conn} do assert {:ok, %SuccessStatus{success: false}} = Operations.vote() assert {:ok, %SuccessStatus{success: false}} = Operations.vote([]) @@ -95,7 +95,7 @@ defmodule OperationsTest do assert {:ok, %SuccessStatus{success: false}} = Operations.vote(conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": false, "27.0": false, "26.0": false] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": false, "27.0": false, "26.0": false] test "success: (v28.0) get schema changes", %{conn: conn, map_conn: map_conn} do assert {:ok, schemas} = Operations.get_schema_changes() diff --git a/test/operations/override_test.exs b/test/operations/override_test.exs index 3b61b4e..36774ab 100644 --- a/test/operations/override_test.exs +++ b/test/operations/override_test.exs @@ -12,23 +12,23 @@ defmodule OverrideTest do %{conn: conn, map_conn: map_conn} end - @tag ["28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "error: retrieve an override", %{conn: conn, map_conn: map_conn} do - assert {:error, %ApiResponse{message: "Not Found"}} = - Override.get_search_override("helmets", "custom-helmet") + error = {:error, %ApiResponse{message: "Not Found"}} - assert {:error, _} = Override.get_search_override("helmets", "custom-helmet", []) - assert {:error, _} = Override.get_search_override("helmets", "custom-helmet", conn: conn) - assert {:error, _} = Override.get_search_override("helmets", "custom-helmet", conn: map_conn) + assert ^error = Override.get_search_override("helmets", "custom-helmet") + assert ^error = Override.get_search_override("helmets", "custom-helmet", []) + assert ^error = Override.get_search_override("helmets", "custom-helmet", conn: conn) + assert ^error = Override.get_search_override("helmets", "custom-helmet", conn: map_conn) end @tag ["29.0": true] - test "error: retrieve an override (> v28.0)", %{conn: conn, map_conn: map_conn} do - assert {:error, %ApiResponse{message: "Collection not found"}} = - Override.get_search_override("helmets", "custom-helmet") + test "error (v29.0): retrieve an override", %{conn: conn, map_conn: map_conn} do + error = {:error, %ApiResponse{message: "Collection not found"}} - assert {:error, _} = Override.get_search_override("helmets", "custom-helmet", []) - assert {:error, _} = Override.get_search_override("helmets", "custom-helmet", conn: conn) - assert {:error, _} = Override.get_search_override("helmets", "custom-helmet", conn: map_conn) + assert ^error = Override.get_search_override("helmets", "custom-helmet") + assert ^error = Override.get_search_override("helmets", "custom-helmet", []) + assert ^error = Override.get_search_override("helmets", "custom-helmet", conn: conn) + assert ^error = Override.get_search_override("helmets", "custom-helmet", conn: map_conn) end end diff --git a/test/operations/presets_test.exs b/test/operations/presets_test.exs index 5ee9036..f4c093b 100644 --- a/test/operations/presets_test.exs +++ b/test/operations/presets_test.exs @@ -33,7 +33,7 @@ defmodule PresetsTest do %{conn: conn, map_conn: map_conn} end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: list presets", %{conn: conn, map_conn: map_conn} do assert {:ok, %PresetsRetrieveSchema{presets: presets}} = Presets.retrieve_all_presets() refute Enum.empty?(presets) @@ -43,7 +43,7 @@ defmodule PresetsTest do assert {:ok, _} = Presets.retrieve_all_presets(conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: get a preset", %{conn: conn, map_conn: map_conn} do assert {:error, %ApiResponse{message: "Not found."}} = Presets.retrieve_preset("listing_view") assert {:error, _} = Presets.retrieve_preset("listing_view", []) @@ -51,7 +51,7 @@ defmodule PresetsTest do assert {:error, _} = Presets.retrieve_preset("listing_view", conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: upsert a preset", %{conn: conn, map_conn: map_conn} do body = %{ @@ -70,7 +70,7 @@ defmodule PresetsTest do assert {:ok, _} = Presets.upsert_preset("restaurant_view", body, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: delete a preset", %{conn: conn, map_conn: map_conn} do body = %{ diff --git a/test/operations/stemming_test.exs b/test/operations/stemming_test.exs index 235bd4c..8c4cbf4 100644 --- a/test/operations/stemming_test.exs +++ b/test/operations/stemming_test.exs @@ -32,7 +32,7 @@ defmodule StemmingTest do %{id: id, conn: conn, map_conn: map_conn} end - @tag ["29.0": true, "28.0": true, "27.1": false, "27.0": false, "26.0": false] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": false, "27.0": false, "26.0": false] test "success: create stemming dictionaries", %{conn: conn, map_conn: map_conn} do id = "example-stemming" @@ -64,7 +64,7 @@ defmodule StemmingTest do ]} = Stemming.import_stemming_dictionary(body, id: id, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": false, "27.0": false, "26.0": false] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": false, "27.0": false, "26.0": false] test "success: list stemming dictionaries", %{conn: conn, map_conn: map_conn} do assert {:ok, %{"dictionaries" => _}} = Stemming.list_stemming_dictionaries() assert {:ok, %{"dictionaries" => _}} = Stemming.list_stemming_dictionaries([]) @@ -72,19 +72,25 @@ defmodule StemmingTest do assert {:ok, %{"dictionaries" => _}} = Stemming.list_stemming_dictionaries(conn: map_conn) end - @tag ["29.0": true, "28.0": false, "27.1": false, "27.0": false, "26.0": false] + @tag ["30.0": true, "29.0": true, "28.0": false, "27.1": false, "27.0": false, "26.0": false] test "error: (v29.0) non-existent stemming dictionary" do assert {:error, %ApiResponse{message: "Collection not found"}} = Stemming.get_stemming_dictionary("non-existent") end + @tag ["30.0": true] + test "error: (v30.0) non-existent stemming dictionary" do + assert {:error, %ApiResponse{message: "Collection not found"}} = + Stemming.get_stemming_dictionary("non-existent") + end + @tag ["29.0": false, "28.0": true, "27.1": false, "27.0": false, "26.0": false] test "error: (v28.0) non-existent stemming dictionary" do assert {:error, %ApiResponse{message: "Not Found"}} = Stemming.get_stemming_dictionary("non-existent") end - @tag ["29.0": true, "28.0": true, "27.1": false, "27.0": false, "26.0": false] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": false, "27.0": false, "26.0": false] test "success: get specific stemming dictionary", %{id: id, conn: conn, map_conn: map_conn} do body = [ %{"word" => "mice", "root" => "mouse"}, @@ -107,7 +113,7 @@ defmodule StemmingTest do Stemming.get_stemming_dictionary(id, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": false, "27.0": false, "26.0": false] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": false, "27.0": false, "26.0": false] test "field" do assert [dictionaries: [:string]] = Stemming.__fields__(:list_stemming_dictionaries_200_json_resp) diff --git a/test/operations/stopwords_test.exs b/test/operations/stopwords_test.exs index 046b5ca..df89203 100644 --- a/test/operations/stopwords_test.exs +++ b/test/operations/stopwords_test.exs @@ -25,7 +25,7 @@ defmodule StopwordsTest do %{conn: conn, map_conn: map_conn} end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: list stopwords sets", %{conn: conn, map_conn: map_conn} do assert {:ok, %StopwordsSetsRetrieveAllSchema{stopwords: _stopwords}} = Stopwords.retrieve_stopwords_sets() @@ -35,7 +35,7 @@ defmodule StopwordsTest do assert {:ok, _} = Stopwords.retrieve_stopwords_sets(conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: add stopwords", %{conn: conn, map_conn: map_conn} do set_id = "stopword_set_countries" @@ -51,7 +51,7 @@ defmodule StopwordsTest do assert {:ok, _} = Stopwords.upsert_stopwords_set(set_id, body, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: retrieve specific stopwords set", %{conn: conn, map_conn: map_conn} do set_id = "stopword_set_names" @@ -76,7 +76,7 @@ defmodule StopwordsTest do Stopwords.retrieve_stopwords_set(set_id, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: delete specific stopwords set", %{conn: conn, map_conn: map_conn} do set_id = "stopword_set_companies" @@ -95,7 +95,7 @@ defmodule StopwordsTest do assert {:error, ^reason} = Stopwords.delete_stopwords_set(set_id, conn: map_conn) end - @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] + @tag ["30.0": true, "29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "field" do assert [id: :string] = Stopwords.__fields__(:delete_stopwords_set_200_json_resp) end diff --git a/test/operations/synonyms_test.exs b/test/operations/synonyms_test.exs index a050aa2..3d82388 100644 --- a/test/operations/synonyms_test.exs +++ b/test/operations/synonyms_test.exs @@ -1,12 +1,17 @@ defmodule SynonymsTest do use ExUnit.Case + alias OpenApiTypesense.ApiResponse alias OpenApiTypesense.Synonyms alias OpenApiTypesense.Collections alias OpenApiTypesense.CollectionResponse alias OpenApiTypesense.Connection alias OpenApiTypesense.SearchSynonym alias OpenApiTypesense.SearchSynonymsResponse + alias OpenApiTypesense.SynonymItemDeleteSchema + alias OpenApiTypesense.SynonymItemSchema + alias OpenApiTypesense.SynonymSetDeleteSchema + alias OpenApiTypesense.SynonymSetSchema setup_all do conn = Connection.new() @@ -27,11 +32,32 @@ defmodule SynonymsTest do on_exit(fn -> {:ok, _} = Collections.delete_collection(collection_name) + + synonym_sets = + case Synonyms.retrieve_synonym_sets() do + {:ok, sets} -> + sets + + {:error, _reason} -> + [] + end + + if Enum.any?(synonym_sets) do + Enum.each(synonym_sets, fn set -> + {:ok, _set} = Synonyms.delete_synonym_set(set.name) + end) + end end) %{coll_name: collection_name, conn: conn, map_conn: map_conn} end + @tag ["30.0": true] + test "error (v30.0): deprecated function for list collection synonyms", %{coll_name: coll_name} do + error = {:error, %ApiResponse{message: "Not Found"}} + assert ^error = Synonyms.get_search_synonyms(coll_name) + end + @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: list collection synonyms", %{ coll_name: coll_name, @@ -51,6 +77,22 @@ defmodule SynonymsTest do Synonyms.get_search_synonyms(coll_name, conn: map_conn) end + @tag ["30.0": true] + test "error (v30.0): deprecated function for upsert a collection synonym", %{ + coll_name: coll_name + } do + body = + %{ + "root" => "hat", + "synonyms" => ["fedora", "cap", "visor"] + } + + synonym_id = "hat-synonyms" + + error = {:error, %ApiResponse{message: "Not Found"}} + assert ^error = Synonyms.upsert_search_synonym(coll_name, synonym_id, body) + end + @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: upsert a collection synonym", %{ coll_name: coll_name, @@ -73,6 +115,22 @@ defmodule SynonymsTest do assert {:ok, _} = Synonyms.upsert_search_synonym(coll_name, synonym_id, body, conn: map_conn) end + @tag ["30.0": true] + test "error (v30.0): deprecated function for delete a collection synonym", %{ + coll_name: coll_name + } do + body = + %{ + "root" => "sweater", + "synonyms" => ["ribbed", "turtleneck", "v-neck", "half-zip"] + } + + synonym_id = "sweater-synonyms" + + error = {:error, %ApiResponse{message: "Not Found"}} + assert ^error = Synonyms.upsert_search_synonym(coll_name, synonym_id, body) + end + @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: delete a collection synonym", %{ coll_name: coll_name, @@ -96,6 +154,160 @@ defmodule SynonymsTest do assert {:error, _} = Synonyms.delete_search_synonym(coll_name, synonym_id, conn: map_conn) end + @tag ["30.0": true] + test "success: delete a synonym set item", %{conn: conn, map_conn: map_conn} do + name = "tech-synonyms" + + body = %{ + "items" => [ + %{ + "id" => "smart-phone-synonyms", + "root" => "smart phone", + "synonyms" => ["iphone", "android"] + } + ] + } + + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.upsert_synonym_set(name, body) + + item_id = "smart-phone-synonyms" + + body = %{ + "root" => "smart phone", + "synonyms" => ["iphone", "android"] + } + + assert {:ok, %SynonymItemSchema{id: ^item_id}} = + Synonyms.upsert_synonym_set_item(name, item_id, body) + + assert {:ok, %SynonymItemDeleteSchema{id: ^item_id}} = + Synonyms.delete_synonym_set_item(name, item_id) + + error = {:error, %ApiResponse{message: "Could not find that `id`."}} + assert ^error = Synonyms.delete_synonym_set_item(name, item_id, []) + assert ^error = Synonyms.delete_synonym_set_item(name, item_id, conn: conn) + assert ^error = Synonyms.delete_synonym_set_item(name, item_id, map_conn: map_conn) + end + + @tag ["30.0": true] + test "error (v30.0): deprecate function delete a synonym associated with a collection", %{ + coll_name: coll_name + } do + synonym_id = "t-shirt-synonyms" + error = {:error, %ApiResponse{message: "Not Found"}} + assert ^error = Synonyms.delete_search_synonym(coll_name, synonym_id) + end + + @tag ["30.0": true] + test "success: list all synonym sets", %{conn: conn, map_conn: map_conn} do + assert {:ok, synonym_sets} = Synonyms.retrieve_synonym_sets() + assert Enum.any?(synonym_sets) + assert {:ok, _} = Synonyms.retrieve_synonym_sets([]) + assert {:ok, _} = Synonyms.retrieve_synonym_sets(conn: conn) + assert {:ok, _} = Synonyms.retrieve_synonym_sets(map_conn: map_conn) + end + + @tag ["30.0": true] + test "success: retrieve a synonym set", %{conn: conn, map_conn: map_conn} do + name = "sample" + + body = %{ + "items" => [ + %{ + "id" => "coat-synonyms", + "synonyms" => ["blazer", "coat", "jacket"] + } + ] + } + + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.upsert_synonym_set(name, body) + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.retrieve_synonym_set(name) + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.retrieve_synonym_set(name, []) + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.retrieve_synonym_set(name, conn: conn) + + assert {:ok, %SynonymSetSchema{name: ^name}} = + Synonyms.retrieve_synonym_set(name, map_conn: map_conn) + end + + @tag ["30.0": true] + test "success: retrieve a synonym set item", %{conn: conn, map_conn: map_conn} do + name = "tech-synonyms" + + body = %{ + "items" => [ + %{ + "id" => "smart-phone-synonyms", + "root" => "smart phone", + "synonyms" => ["iphone", "android"] + } + ] + } + + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.upsert_synonym_set(name, body) + + item_id = "smart-phone-synonyms" + + body = %{ + "root" => "smart phone", + "synonyms" => ["iphone", "android"] + } + + assert {:ok, %SynonymItemSchema{id: ^item_id}} = + Synonyms.upsert_synonym_set_item(name, item_id, body) + + assert {:ok, %SynonymItemSchema{id: ^item_id}} = + Synonyms.retrieve_synonym_set_item(name, item_id) + + assert {:ok, %SynonymItemSchema{id: ^item_id}} = + Synonyms.retrieve_synonym_set_item(name, item_id, []) + + assert {:ok, %SynonymItemSchema{id: ^item_id}} = + Synonyms.retrieve_synonym_set_item(name, item_id, conn: conn) + + assert {:ok, %SynonymItemSchema{id: ^item_id}} = + Synonyms.retrieve_synonym_set_item(name, item_id, map_conn: map_conn) + end + + @tag ["30.0": true] + test "success: retrieve a synonym set items", %{conn: conn, map_conn: map_conn} do + name = "tech-synonyms" + + body = %{ + "items" => [ + %{ + "id" => "smart-phone-synonyms", + "root" => "smart phone", + "synonyms" => ["iphone", "android"] + } + ] + } + + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.upsert_synonym_set(name, body) + + item_id = "smart-phone-synonyms" + + body = %{ + "root" => "smart phone", + "synonyms" => ["iphone", "android"] + } + + assert {:ok, %SynonymItemSchema{id: ^item_id}} = + Synonyms.upsert_synonym_set_item(name, item_id, body) + + assert {:ok, set_items} = Synonyms.retrieve_synonym_set_items(name) + assert Enum.any?(set_items) + assert {:ok, _} = Synonyms.retrieve_synonym_set_items(name, []) + assert {:ok, _} = Synonyms.retrieve_synonym_set_items(name, conn: conn) + assert {:ok, _} = Synonyms.retrieve_synonym_set_items(name, map_conn: map_conn) + end + + @tag ["30.0": true] + test "error (v30.0): deprecated function for get a collection synonym", %{coll_name: coll_name} do + synonym_id = "t-shirt-synonyms" + error = {:error, %ApiResponse{message: "Not Found"}} + assert ^error = Synonyms.get_search_synonym(coll_name, synonym_id) + end + @tag ["29.0": true, "28.0": true, "27.1": true, "27.0": true, "26.0": true] test "success: get a collection synonym", %{ coll_name: coll_name, @@ -110,8 +322,8 @@ defmodule SynonymsTest do synonym_id = "t-shirt-synonyms" - {:ok, %SearchSynonym{}} = - Synonyms.upsert_search_synonym(coll_name, synonym_id, body) + assert {:ok, %SearchSynonym{}} = + Synonyms.upsert_search_synonym(coll_name, synonym_id, body) assert {:ok, %SearchSynonym{id: ^synonym_id}} = Synonyms.get_search_synonym(coll_name, synonym_id) @@ -120,4 +332,116 @@ defmodule SynonymsTest do assert {:ok, _} = Synonyms.get_search_synonym(coll_name, synonym_id, conn: conn) assert {:ok, _} = Synonyms.get_search_synonym(coll_name, synonym_id, conn: map_conn) end + + @tag ["30.0": true] + test "success: create or update a synonym set (multi-way synonym)", %{ + conn: conn, + map_conn: map_conn + } do + name = "sample" + + body = %{ + "items" => [ + %{ + "id" => "coat-synonyms", + "synonyms" => ["blazer", "coat", "jacket"] + } + ] + } + + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.upsert_synonym_set(name, body) + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.upsert_synonym_set(name, body, []) + + assert {:ok, %SynonymSetSchema{name: ^name}} = + Synonyms.upsert_synonym_set(name, body, conn: conn) + + assert {:ok, %SynonymSetSchema{name: ^name}} = + Synonyms.upsert_synonym_set(name, body, map_conn: map_conn) + end + + @tag ["30.0": true] + test "success: create or update a synonym set (one-way synonym)", %{ + conn: conn, + map_conn: map_conn + } do + name = "tech-synonyms" + + body = %{ + "items" => [ + %{ + "id" => "smart-phone-synonyms", + "root" => "smart phone", + "synonyms" => ["iphone", "android"] + } + ] + } + + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.upsert_synonym_set(name, body) + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.upsert_synonym_set(name, body, []) + + assert {:ok, %SynonymSetSchema{name: ^name}} = + Synonyms.upsert_synonym_set(name, body, conn: conn) + + assert {:ok, %SynonymSetSchema{name: ^name}} = + Synonyms.upsert_synonym_set(name, body, map_conn: map_conn) + end + + @tag ["30.0": true] + test "success: upsert a synonym set item", %{conn: conn, map_conn: map_conn} do + name = "tech-synonyms" + + body = %{ + "items" => [ + %{ + "id" => "smart-phone-synonyms", + "root" => "smart phone", + "synonyms" => ["iphone", "android"] + } + ] + } + + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.upsert_synonym_set(name, body) + + item_id = "smart-phone-synonyms" + + body = %{ + "root" => "smart phone", + "synonyms" => ["iphone", "android"] + } + + assert {:ok, %SynonymItemSchema{id: ^item_id}} = + Synonyms.upsert_synonym_set_item(name, item_id, body) + + assert {:ok, %SynonymItemSchema{id: ^item_id}} = + Synonyms.upsert_synonym_set_item(name, item_id, body, []) + + assert {:ok, %SynonymItemSchema{id: ^item_id}} = + Synonyms.upsert_synonym_set_item(name, item_id, body, conn: conn) + + assert {:ok, %SynonymItemSchema{id: ^item_id}} = + Synonyms.upsert_synonym_set_item(name, item_id, body, map_conn: map_conn) + end + + @tag ["30.0": true] + test "success: delete a synonym set", %{conn: conn, map_conn: map_conn} do + name = "tech-synonyms" + + body = %{ + "items" => [ + %{ + "id" => "smart-phone-synonyms", + "root" => "smart phone", + "synonyms" => ["iphone", "android"] + } + ] + } + + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.upsert_synonym_set(name, body) + assert {:ok, %SynonymSetDeleteSchema{name: ^name}} = Synonyms.delete_synonym_set(name) + + error = {:error, %ApiResponse{message: "Synonym index not found"}} + assert ^error = Synonyms.delete_synonym_set(name, []) + assert ^error = Synonyms.delete_synonym_set(name, conn: conn) + assert ^error = Synonyms.delete_synonym_set(name, map_conn: map_conn) + end end From 3ac506c466457087dbad52d14bd1249cb373826e Mon Sep 17 00:00:00 2001 From: jaeyson Date: Sun, 12 Apr 2026 09:37:41 +0800 Subject: [PATCH 02/13] consolidate caches for plt and mix dependencies --- .github/workflows/ci_v28.0.yml | 30 ------------------------- .github/workflows/ci_v29.0.yml | 30 ------------------------- .github/workflows/ci_v30.0.yml | 41 ++++++++++++++-------------------- 3 files changed, 17 insertions(+), 84 deletions(-) diff --git a/.github/workflows/ci_v28.0.yml b/.github/workflows/ci_v28.0.yml index 6f90fcb..660effa 100644 --- a/.github/workflows/ci_v28.0.yml +++ b/.github/workflows/ci_v28.0.yml @@ -133,36 +133,6 @@ jobs: run: mix format --check-formatted if: ${{ matrix.lint }} - - name: Restore PLT cache - id: plt_cache - uses: actions/cache/restore@v5 - with: - key: | - plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} - restore-keys: | - plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}- - path: | - priv/plts - if: ${{ matrix.lint }} - - - name: Create PLTs - if: ${{ steps.plt_cache.outputs.cache-hit != 'true' && matrix.lint }} - run: mix dialyzer --plt - - - name: Save PLT cache - id: plt_cache_save - uses: actions/cache/save@v5 - if: ${{ steps.plt_cache.outputs.cache-hit != 'true' && matrix.lint }} - with: - key: | - plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} - path: | - priv/plts - - - name: Dialyzer - run: mix dialyzer --format github --format dialyxir - if: ${{ matrix.lint }} - - name: Run tests run: mix test --only ${{ matrix.typesense }}:true --trace diff --git a/.github/workflows/ci_v29.0.yml b/.github/workflows/ci_v29.0.yml index b0a91ff..b5bcb47 100644 --- a/.github/workflows/ci_v29.0.yml +++ b/.github/workflows/ci_v29.0.yml @@ -134,36 +134,6 @@ jobs: run: mix format --check-formatted if: ${{ matrix.lint }} - - name: Restore PLT cache - id: plt_cache - uses: actions/cache/restore@v5 - with: - key: | - plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} - restore-keys: | - plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}- - path: | - priv/plts - if: ${{ matrix.lint }} - - - name: Create PLTs - if: ${{ steps.plt_cache.outputs.cache-hit != 'true' && matrix.lint }} - run: mix dialyzer --plt - - - name: Save PLT cache - id: plt_cache_save - uses: actions/cache/save@v5 - if: ${{ steps.plt_cache.outputs.cache-hit != 'true' && matrix.lint }} - with: - key: | - plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} - path: | - priv/plts - - - name: Dialyzer - run: mix dialyzer --format github --format dialyxir - if: ${{ matrix.lint }} - - name: Run tests run: mix test --only ${{ matrix.typesense }}:true --only nls:true --trace diff --git a/.github/workflows/ci_v30.0.yml b/.github/workflows/ci_v30.0.yml index 1a1bd97..67a6212 100644 --- a/.github/workflows/ci_v30.0.yml +++ b/.github/workflows/ci_v30.0.yml @@ -95,12 +95,15 @@ jobs: otp-version: ${{matrix.otp}} elixir-version: ${{matrix.elixir}} - - name: Cache dependencies/builds - uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 + - name: Restore cache + id: cache_restore + uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 + if: ${{ matrix.lint }} with: path: | deps _build + priv/plts key: ${{ runner.os }}-typesense-${{ matrix.typesense}}-${{ matrix.otp}}-${{ matrix.elixir}}-mix-${{ hashFiles('**/mix.lock') }} restore-keys: | ${{ runner.os }}-typesense-${{ matrix.typesense}}-${{ matrix.otp }}-${{ matrix.elixir }}-mix- @@ -134,32 +137,10 @@ jobs: run: mix format --check-formatted if: ${{ matrix.lint }} - - name: Restore PLT cache - id: plt_cache - uses: actions/cache/restore@v4 - with: - key: | - plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} - restore-keys: | - plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}- - path: | - priv/plts - if: ${{ matrix.lint }} - - name: Create PLTs if: ${{ steps.plt_cache.outputs.cache-hit != 'true' && matrix.lint }} run: mix dialyzer --plt - - name: Save PLT cache - id: plt_cache_save - uses: actions/cache/save@v4 - if: ${{ steps.plt_cache.outputs.cache-hit != 'true' && matrix.lint }} - with: - key: | - plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} - path: | - priv/plts - - name: Dialyzer run: mix dialyzer --format github --format dialyxir if: ${{ matrix.lint }} @@ -170,3 +151,15 @@ jobs: - name: Post test coverage to Coveralls run: mix coveralls.github if: ${{ matrix.lint && github.event_name == 'push' && github.ref == 'refs/heads/main' }} + + - name: Save cache + id: cache_save + uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 + if: ${{ steps.plt_cache.outputs.cache-hit != 'true' && matrix.lint }} + with: + path: | + deps + _build + priv/plts + key: | + plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} From 90318131f14a01cd0be6ddf71154d7112d12c915 Mon Sep 17 00:00:00 2001 From: jaeyson Date: Sun, 12 Apr 2026 10:18:31 +0800 Subject: [PATCH 03/13] fix test failing on empty list --- test/operations/synonyms_test.exs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/test/operations/synonyms_test.exs b/test/operations/synonyms_test.exs index 3d82388..699a774 100644 --- a/test/operations/synonyms_test.exs +++ b/test/operations/synonyms_test.exs @@ -200,6 +200,19 @@ defmodule SynonymsTest do @tag ["30.0": true] test "success: list all synonym sets", %{conn: conn, map_conn: map_conn} do + name = "sample" + + body = %{ + "items" => [ + %{ + "id" => "coat-synonyms", + "synonyms" => ["blazer", "coat", "jacket"] + } + ] + } + + assert {:ok, %SynonymSetSchema{name: ^name}} = Synonyms.upsert_synonym_set(name, body) + assert {:ok, synonym_sets} = Synonyms.retrieve_synonym_sets() assert Enum.any?(synonym_sets) assert {:ok, _} = Synonyms.retrieve_synonym_sets([]) From e7572d4ad706c28f934c2e58550bb7261b46a3cc Mon Sep 17 00:00:00 2001 From: jaeyson Date: Sun, 12 Apr 2026 10:46:16 +0800 Subject: [PATCH 04/13] update CHANGELOG.md --- CHANGELOG.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6dab098..1d2b601 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## major.minor.patch (yyyy.mm.dd) -## 1.2.0 ??? +## 1.2.0 (2026.04.12) ### Deprecated @@ -16,6 +16,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * `Synonyms.get_search_synonym/3` in favor of `retrieve_synonym_set/2` or `retrieve_synonym_set_item/3` when using Typesense v30.0+. * `Synonyms.delete_search_synonym/3` in favor of `delete_synonym_set/2` or `delete_synonym_set_item/3` when using Typesense v30.0+. +### Chore + +* Updated `priv/open_api.yml` for structure changes (v30.0). See https://typesense.org/docs/30.0/api/#deprecations-behavior-changes + ## 1.1.0 (2026.04.06) ### Added From 8061dc6eee6f43441deaa77ecc3b2f4e5824d10c Mon Sep 17 00:00:00 2001 From: jaeyson Date: Sun, 12 Apr 2026 11:02:03 +0800 Subject: [PATCH 05/13] update ci_v30.0.yml --- .github/workflows/ci_v30.0.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci_v30.0.yml b/.github/workflows/ci_v30.0.yml index 67a6212..cda4da4 100644 --- a/.github/workflows/ci_v30.0.yml +++ b/.github/workflows/ci_v30.0.yml @@ -138,7 +138,7 @@ jobs: if: ${{ matrix.lint }} - name: Create PLTs - if: ${{ steps.plt_cache.outputs.cache-hit != 'true' && matrix.lint }} + if: ${{ steps.cache_restore.outputs.cache-hit != 'true' && matrix.lint }} run: mix dialyzer --plt - name: Dialyzer @@ -155,11 +155,11 @@ jobs: - name: Save cache id: cache_save uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 - if: ${{ steps.plt_cache.outputs.cache-hit != 'true' && matrix.lint }} + if: ${{ matrix.lint }} with: path: | deps _build priv/plts key: | - plt-${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} + ${{ runner.os }}-${{ steps.beam.outputs.otp-version }}-${{ steps.beam.outputs.elixir-version }}-${{ hashFiles('**/mix.lock') }} From 4b7564f0ef86ab0b939c919a12ae95e856463c14 Mon Sep 17 00:00:00 2001 From: jaeyson Date: Sun, 12 Apr 2026 11:34:40 +0800 Subject: [PATCH 06/13] add accessibility in url in markdown in CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1d2b601..1efa4b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,7 +18,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Chore -* Updated `priv/open_api.yml` for structure changes (v30.0). See https://typesense.org/docs/30.0/api/#deprecations-behavior-changes +* Updated `priv/open_api.yml` for structure changes (v30.0). See ## 1.1.0 (2026.04.06) From 8af78cbbc7e4d7d3231c19fcd22ee2a35c31faa6 Mon Sep 17 00:00:00 2001 From: jaeyson Date: Sun, 12 Apr 2026 13:06:25 +0800 Subject: [PATCH 07/13] update analytics --- lib/open_api_typesense/operations/analytics.ex | 9 +++++++-- test/operations/analytics_test.exs | 2 +- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/lib/open_api_typesense/operations/analytics.ex b/lib/open_api_typesense/operations/analytics.ex index 18e1b5c..f0223f2 100644 --- a/lib/open_api_typesense/operations/analytics.ex +++ b/lib/open_api_typesense/operations/analytics.ex @@ -59,7 +59,10 @@ defmodule OpenApiTypesense.Analytics do OpenApiTypesense.AnalyticsRuleCreate.t() | [OpenApiTypesense.AnalyticsRuleCreate.t()], opts :: keyword ) :: - {:ok, OpenApiTypesense.AnalyticsRule.t() | [map | OpenApiTypesense.AnalyticsRule.t()]} + {:ok, OpenApiTypesense.AnalyticsRuleSchema.t()} + | {:ok, + OpenApiTypesense.AnalyticsRule.t() + | [map | OpenApiTypesense.AnalyticsRule.t()]} | {:error, OpenApiTypesense.ApiResponse.t()} def create_analytics_rule(body, opts \\ []) do client = opts[:client] || @default_client @@ -106,7 +109,9 @@ defmodule OpenApiTypesense.Analytics do """ @doc since: "0.4.0" @spec delete_analytics_rule(rule_name :: String.t(), opts :: keyword) :: - {:ok, OpenApiTypesense.AnalyticsRule.t()} | {:error, OpenApiTypesense.ApiResponse.t()} + {:ok, OpenApiTypesense.AnalyticsRuleDeleteResponse.t()} + | {:ok, OpenApiTypesense.AnalyticsRule.t()} + | {:error, OpenApiTypesense.ApiResponse.t()} def delete_analytics_rule(rule_name, opts \\ []) do client = opts[:client] || @default_client diff --git a/test/operations/analytics_test.exs b/test/operations/analytics_test.exs index 3f04aa0..5d4e9a6 100644 --- a/test/operations/analytics_test.exs +++ b/test/operations/analytics_test.exs @@ -279,7 +279,7 @@ defmodule AnalyticsTest do end @tag ["30.0": true] - test "error (v30.0): get analytics status", %{conn: conn, map_conn: map_conn} do + test "success (v30.0): get analytics status", %{conn: conn, map_conn: map_conn} do status = %AnalyticsStatus{ doc_counter_events: 0, doc_log_events: 0, From 07579762b7685a562aefe76004b5671f311b55d7 Mon Sep 17 00:00:00 2001 From: jaeyson Date: Mon, 13 Apr 2026 11:41:36 +0800 Subject: [PATCH 08/13] update natural language search model test --- .dev.env.example | 1 + .github/workflows/llm.yml | 7 +- docker-compose.yml | 2 +- lib/open_api_typesense/client.ex | 3 + .../operations/nl_search_models.ex | 1 + priv/open_api.yml | 6 + test/operations/nl_search_models_test.exs | 147 +++++++++++++----- 7 files changed, 124 insertions(+), 43 deletions(-) create mode 100644 .dev.env.example diff --git a/.dev.env.example b/.dev.env.example new file mode 100644 index 0000000..9ca6087 --- /dev/null +++ b/.dev.env.example @@ -0,0 +1 @@ +export GOOGLE_GEMINI_API="YOUR_GOOGLE_AI_STUDIO_API_KEY" diff --git a/.github/workflows/llm.yml b/.github/workflows/llm.yml index 7578a56..8a497a8 100644 --- a/.github/workflows/llm.yml +++ b/.github/workflows/llm.yml @@ -3,9 +3,9 @@ on: pull_request: branches: ["main"] -# concurrency: -# group: ${{ github.workflow }}-${{ github.ref }} -# cancel-in-progress: true +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: ci_workflow: @@ -20,6 +20,7 @@ jobs: env: MIX_ENV: test + GOOGLE_GEMINI_API: ${{ secrets.GOOGLE_GEMINI_API }} strategy: matrix: diff --git a/docker-compose.yml b/docker-compose.yml index 5eb9fff..b922d20 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: typesense: - image: docker.io/typesense/typesense:30.0 + image: docker.io/typesense/typesense:29.0 container_name: typesense restart: on-failure ports: diff --git a/lib/open_api_typesense/client.ex b/lib/open_api_typesense/client.ex index 4599dd7..37e02b8 100644 --- a/lib/open_api_typesense/client.ex +++ b/lib/open_api_typesense/client.ex @@ -138,6 +138,9 @@ defmodule OpenApiTypesense.Client do end defp parse_resp(%Req.Response{status: code, body: body}, %{response: resp}) do + dbg(code) + dbg(body) + dbg(resp) {_status, mod} = Enum.find(resp, fn {status, _} -> status === code end) parse_body(code, mod, body) end diff --git a/lib/open_api_typesense/operations/nl_search_models.ex b/lib/open_api_typesense/operations/nl_search_models.ex index 93ed133..719f9f7 100644 --- a/lib/open_api_typesense/operations/nl_search_models.ex +++ b/lib/open_api_typesense/operations/nl_search_models.ex @@ -36,6 +36,7 @@ defmodule OpenApiTypesense.NlSearchModels do method: :post, request: [{"application/json", {OpenApiTypesense.NLSearchModelCreateSchema, :t}}], response: [ + {200, {OpenApiTypesense.NLSearchModelSchema, :t}}, {201, {OpenApiTypesense.NLSearchModelSchema, :t}}, {400, {OpenApiTypesense.ApiResponse, :t}}, {401, {OpenApiTypesense.ApiResponse, :t}}, diff --git a/priv/open_api.yml b/priv/open_api.yml index e0a8b5b..90c20b4 100644 --- a/priv/open_api.yml +++ b/priv/open_api.yml @@ -2981,6 +2981,12 @@ paths: $ref: '#/components/schemas/NLSearchModelCreateSchema' required: true responses: + '200': + description: NL search model successfully created + content: + application/json: + schema: + $ref: '#/components/schemas/NLSearchModelSchema' '201': description: NL search model successfully created content: diff --git a/test/operations/nl_search_models_test.exs b/test/operations/nl_search_models_test.exs index 784ea20..cf2a4ca 100644 --- a/test/operations/nl_search_models_test.exs +++ b/test/operations/nl_search_models_test.exs @@ -4,6 +4,7 @@ defmodule NlSearchModelsTest do alias OpenApiTypesense.ApiResponse alias OpenApiTypesense.Connection alias OpenApiTypesense.NlSearchModels + alias OpenApiTypesense.NLSearchModelSchema setup_all do conn = Connection.new() @@ -11,53 +12,88 @@ defmodule NlSearchModelsTest do model = %{ "id" => "gemini-model", - "model_name" => "google/gemini-2.5-flash", - "api_key" => "YOUR_GOOGLE_AI_STUDIO_API_KEY", + "model_name" => "google/gemini-2.5-flash-lite", + "api_key" => System.get_env("GOOGLE_GEMINI_API"), "max_bytes" => 16_000, "temperature" => 0.0 } + # TOO EXPENSIVE TO INVOKE FREE TIER REQUESTS!!! + # on_exit(fn -> + # {:ok, models} = NlSearchModels.retrieve_all_nl_search_models() + + # models + # |> Enum.each(fn model -> + # model_id = model.id + # {:ok, %{id: ^model_id}} = NlSearchModels.delete_nl_search_model(model.id) + # end) + # end) + %{conn: conn, map_conn: map_conn, model: model} end @tag [nls: true] - test "error: create natural language search model", %{ + test "success: create natural language search model", %{ conn: conn, map_conn: map_conn, model: model } do - reason = %ApiResponse{ - message: "Google Gemini API error: API key not valid. Please pass a valid API key." - } - - assert {:error, ^reason} = NlSearchModels.create_nl_search_model(model) - assert {:error, ^reason} = NlSearchModels.create_nl_search_model(model, []) - assert {:error, ^reason} = NlSearchModels.create_nl_search_model(model, conn: conn) - assert {:error, ^reason} = NlSearchModels.create_nl_search_model(model, map_conn: map_conn) + case NlSearchModels.create_nl_search_model(model) do + {:error, error} -> + assert String.contains?(String.downcase(error.message), [ + "already exists", + "please pass a valid api key", + "not found" + ]) === true + + assert {:error, ^error} = NlSearchModels.create_nl_search_model(model) + assert {:error, ^error} = NlSearchModels.create_nl_search_model(model, []) + assert {:error, ^error} = NlSearchModels.create_nl_search_model(model, conn: conn) + assert {:error, ^error} = NlSearchModels.create_nl_search_model(model, map_conn: map_conn) + + {:ok, %NLSearchModelSchema{id: id}} -> + assert "gemini-model" === model["id"] + assert {:error, error} = NlSearchModels.create_nl_search_model(model) + + assert String.contains?(String.downcase(error.message), [ + "already exists", + "please pass a valid api key", + "not found" + ]) === true + + assert {:error, ^error} = NlSearchModels.create_nl_search_model(model, []) + assert {:error, ^error} = NlSearchModels.create_nl_search_model(model, conn: conn) + assert {:error, ^error} = NlSearchModels.create_nl_search_model(model, map_conn: map_conn) + end end @tag [nls: true] - test "error: delete natural language search model", %{ - conn: conn, - map_conn: map_conn, - model: model - } do + test "error: delete unknown search model", %{conn: conn, map_conn: map_conn} do reason = %ApiResponse{message: "Model not found"} - assert {:error, ^reason} = NlSearchModels.delete_nl_search_model(model["id"]) - assert {:error, ^reason} = NlSearchModels.delete_nl_search_model(model["id"], []) - assert {:error, ^reason} = NlSearchModels.delete_nl_search_model(model["id"], conn: conn) + model_id = "unknown" - assert {:error, ^reason} = - NlSearchModels.delete_nl_search_model(model["id"], map_conn: map_conn) + assert {:error, ^reason} = NlSearchModels.delete_nl_search_model(model_id) + assert {:error, ^reason} = NlSearchModels.delete_nl_search_model(model_id, []) + assert {:error, ^reason} = NlSearchModels.delete_nl_search_model(model_id, conn: conn) + assert {:error, ^reason} = NlSearchModels.delete_nl_search_model(model_id, map_conn: map_conn) end @tag [nls: true] test "success: retrieve all natural language search models", %{conn: conn, map_conn: map_conn} do - assert {:ok, []} = NlSearchModels.retrieve_all_nl_search_models() - assert {:ok, []} = NlSearchModels.retrieve_all_nl_search_models([]) - assert {:ok, []} = NlSearchModels.retrieve_all_nl_search_models(conn: conn) - assert {:ok, []} = NlSearchModels.retrieve_all_nl_search_models(map_conn: map_conn) + case NlSearchModels.retrieve_all_nl_search_models() do + {:ok, []} -> + assert {:ok, []} = NlSearchModels.retrieve_all_nl_search_models() + assert {:ok, []} = NlSearchModels.retrieve_all_nl_search_models([]) + assert {:ok, []} = NlSearchModels.retrieve_all_nl_search_models(conn: conn) + assert {:ok, []} = NlSearchModels.retrieve_all_nl_search_models(map_conn: map_conn) + + {:ok, [first | _]} when is_struct(first, NLSearchModelSchema) -> + assert {:ok, _} = NlSearchModels.retrieve_all_nl_search_models() + assert {:ok, _} = NlSearchModels.retrieve_all_nl_search_models([]) + assert {:ok, _} = NlSearchModels.retrieve_all_nl_search_models(conn: conn) + assert {:ok, _} = NlSearchModels.retrieve_all_nl_search_models(map_conn: map_conn) + end end @tag [nls: true] @@ -66,18 +102,37 @@ defmodule NlSearchModelsTest do map_conn: map_conn, model: model } do - reason = %ApiResponse{message: "Model not found"} + case NlSearchModels.retrieve_nl_search_model(model["id"]) do + {:error, reason} -> + assert %ApiResponse{message: "Model not found"} === reason + assert {:error, ^reason} = NlSearchModels.retrieve_nl_search_model(model["id"]) + assert {:error, ^reason} = NlSearchModels.retrieve_nl_search_model(model["id"], []) - assert {:error, ^reason} = NlSearchModels.retrieve_nl_search_model(model["id"]) - assert {:error, ^reason} = NlSearchModels.retrieve_nl_search_model(model["id"], []) - assert {:error, ^reason} = NlSearchModels.retrieve_nl_search_model(model["id"], conn: conn) + assert {:error, ^reason} = + NlSearchModels.retrieve_nl_search_model(model["id"], conn: conn) - assert {:error, ^reason} = - NlSearchModels.retrieve_nl_search_model(model["id"], map_conn: map_conn) + assert {:error, ^reason} = + NlSearchModels.retrieve_nl_search_model(model["id"], map_conn: map_conn) + + {:ok, %NLSearchModelSchema{id: id}} -> + assert "gemini-model" === id + + assert {:ok, %NLSearchModelSchema{id: ^id}} = + NlSearchModels.retrieve_nl_search_model(model["id"]) + + assert {:ok, %NLSearchModelSchema{id: ^id}} = + NlSearchModels.retrieve_nl_search_model(model["id"], []) + + assert {:ok, %NLSearchModelSchema{id: ^id}} = + NlSearchModels.retrieve_nl_search_model(model["id"], conn: conn) + + assert {:ok, %NLSearchModelSchema{id: ^id}} = + NlSearchModels.retrieve_nl_search_model(model["id"], map_conn: map_conn) + end end @tag [nls: true] - test "error: update a natural language search model", %{ + test "success: update a natural language search model", %{ conn: conn, map_conn: map_conn, model: model @@ -87,15 +142,29 @@ defmodule NlSearchModelsTest do "system_prompt" => "New system prompt instructions" } - reason = %ApiResponse{message: "Model not found"} + NlSearchModels.create_nl_search_model(model) + + case NlSearchModels.update_nl_search_model(model["id"], body) do + {:error, error} -> + assert String.contains?(String.downcase(error.message), [ + "please pass a valid api key", + "not found" + ]) === true + + assert {:error, ^error} = NlSearchModels.update_nl_search_model(model["id"], body) + assert {:error, ^error} = NlSearchModels.update_nl_search_model(model["id"], body, []) + + assert {:error, ^error} = + NlSearchModels.update_nl_search_model(model["id"], body, conn: conn) - assert {:error, ^reason} = NlSearchModels.update_nl_search_model(model["id"], body) - assert {:error, ^reason} = NlSearchModels.update_nl_search_model(model["id"], body, []) + assert {:error, ^error} = + NlSearchModels.update_nl_search_model(model["id"], body, map_conn: map_conn) - assert {:error, ^reason} = - NlSearchModels.update_nl_search_model(model["id"], body, conn: conn) + {:ok, resp} -> + model_id = resp.id - assert {:error, ^reason} = - NlSearchModels.update_nl_search_model(model["id"], body, map_conn: map_conn) + assert {:ok, %NLSearchModelSchema{id: ^model_id}} = + NlSearchModels.update_nl_search_model(model["id"], body) + end end end From 472c873fbbb302090828a7cb0d33ea8cc5202c4a Mon Sep 17 00:00:00 2001 From: jaeyson Date: Mon, 13 Apr 2026 13:28:26 +0800 Subject: [PATCH 09/13] fix line break or newlines in json decoding --- lib/open_api_typesense/client.ex | 4 +--- test/operations/analytics_test.exs | 20 ++++++++++---------- 2 files changed, 11 insertions(+), 13 deletions(-) diff --git a/lib/open_api_typesense/client.ex b/lib/open_api_typesense/client.ex index 37e02b8..458eaf8 100644 --- a/lib/open_api_typesense/client.ex +++ b/lib/open_api_typesense/client.ex @@ -138,9 +138,6 @@ defmodule OpenApiTypesense.Client do end defp parse_resp(%Req.Response{status: code, body: body}, %{response: resp}) do - dbg(code) - dbg(body) - dbg(resp) {_status, mod} = Enum.find(resp, fn {status, _} -> status === code end) parse_body(code, mod, body) end @@ -152,6 +149,7 @@ defmodule OpenApiTypesense.Client do defp parse_body(code, {mod, :t}, body) when code in 400..499 do payload = body + |> String.replace("\n", "\\n") |> Jason.decode!() |> OpenApiTypesense.Converter.to_atom_keys() diff --git a/test/operations/analytics_test.exs b/test/operations/analytics_test.exs index 5d4e9a6..28f03d6 100644 --- a/test/operations/analytics_test.exs +++ b/test/operations/analytics_test.exs @@ -15,7 +15,7 @@ defmodule AnalyticsTest do setup_all do conn = Connection.new() map_conn = %{api_key: "xyz", host: "localhost", port: 8108, scheme: "http"} - product_name = "products" + product_name = "analytics_products" product_schema = %{ @@ -90,7 +90,7 @@ defmodule AnalyticsTest do body = %{ "name" => name, - "collection" => "products", + "collection" => "analytics_products", "type" => "popular_queries", "event_type" => "search", "params" => %{ @@ -122,7 +122,7 @@ defmodule AnalyticsTest do "type" => "counter", "params" => %{ "source" => %{ - "collections" => ["products"] + "collections" => ["analytics_products"] }, "destination" => %{ "collection" => collection_name @@ -145,7 +145,7 @@ defmodule AnalyticsTest do %{ "name" => name, "type" => "nohits_queries", - "collection" => "products", + "collection" => "analytics_products", "event_type" => "search", "params" => %{ "destination_collection" => "product_queries", @@ -174,7 +174,7 @@ defmodule AnalyticsTest do "type" => "nohits_queries", "params" => %{ "source" => %{ - "collections" => ["products"] + "collections" => ["analytics_products"] }, "destination" => %{ "collection" => "no_hits_queries" @@ -204,7 +204,7 @@ defmodule AnalyticsTest do "type" => "counter", "params" => %{ "source" => %{ - "collections" => ["products"], + "collections" => ["analytics_products"], "events" => [ %{"type" => "click", "weight" => 1, "name" => "products_downloads_event"} ] @@ -314,10 +314,10 @@ defmodule AnalyticsTest do %{ "name" => name, "type" => "counter", - "collection" => "products", + "collection" => "analytics_products", "event_type" => "click", "params" => %{ - "destination_collection" => "products", + "destination_collection" => "analytics_products", "counter_field" => "popularity", "weight" => 1 } @@ -376,13 +376,13 @@ defmodule AnalyticsTest do "type" => "counter", "params" => %{ "source" => %{ - "collections" => ["products"], + "collections" => ["analytics_products"], "events" => [ %{"type" => "click", "weight" => 1, "name" => event_name} ] }, "destination" => %{ - "collection" => "products", + "collection" => "analytics_products", "counter_field" => "popularity" } } From a284cadb53fca29805600fb62e617d70937e0ba9 Mon Sep 17 00:00:00 2001 From: jaeyson Date: Mon, 13 Apr 2026 14:54:18 +0800 Subject: [PATCH 10/13] Add assertion for exceeding quota for using models --- test/operations/nl_search_models_test.exs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/operations/nl_search_models_test.exs b/test/operations/nl_search_models_test.exs index cf2a4ca..d694e31 100644 --- a/test/operations/nl_search_models_test.exs +++ b/test/operations/nl_search_models_test.exs @@ -41,6 +41,8 @@ defmodule NlSearchModelsTest do case NlSearchModels.create_nl_search_model(model) do {:error, error} -> assert String.contains?(String.downcase(error.message), [ + "You exceeded your current quota", + "`api_key` is missing or is not a non-empty string.", "already exists", "please pass a valid api key", "not found" @@ -56,6 +58,8 @@ defmodule NlSearchModelsTest do assert {:error, error} = NlSearchModels.create_nl_search_model(model) assert String.contains?(String.downcase(error.message), [ + "You exceeded your current quota", + "`api_key` is missing or is not a non-empty string.", "already exists", "please pass a valid api key", "not found" @@ -146,7 +150,11 @@ defmodule NlSearchModelsTest do case NlSearchModels.update_nl_search_model(model["id"], body) do {:error, error} -> + assert 1 = error + assert String.contains?(String.downcase(error.message), [ + "You exceeded your current quota", + "`api_key` is missing or is not a non-empty string.", "please pass a valid api key", "not found" ]) === true From d6d1c1d39728dd4771e1f40a4d63c0db24c84fb4 Mon Sep 17 00:00:00 2001 From: jaeyson Date: Mon, 13 Apr 2026 14:58:54 +0800 Subject: [PATCH 11/13] remove failing assertion --- test/operations/nl_search_models_test.exs | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/operations/nl_search_models_test.exs b/test/operations/nl_search_models_test.exs index d694e31..943435c 100644 --- a/test/operations/nl_search_models_test.exs +++ b/test/operations/nl_search_models_test.exs @@ -150,8 +150,6 @@ defmodule NlSearchModelsTest do case NlSearchModels.update_nl_search_model(model["id"], body) do {:error, error} -> - assert 1 = error - assert String.contains?(String.downcase(error.message), [ "You exceeded your current quota", "`api_key` is missing or is not a non-empty string.", From d108a31450162385467f1449b9bcbf5ba0632219 Mon Sep 17 00:00:00 2001 From: jaeyson Date: Mon, 13 Apr 2026 15:14:50 +0800 Subject: [PATCH 12/13] update llm.yml --- .github/workflows/llm.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/llm.yml b/.github/workflows/llm.yml index 8a497a8..51c3d63 100644 --- a/.github/workflows/llm.yml +++ b/.github/workflows/llm.yml @@ -4,7 +4,7 @@ on: branches: ["main"] concurrency: - group: ${{ github.workflow }}-${{ github.ref }} + group: ${{ github.workflow }}-${{ github.ref }}-${{ github.event_name }} cancel-in-progress: true jobs: From 1b3605a31d3c78bb679765a41c42d2b96777a98f Mon Sep 17 00:00:00 2001 From: jaeyson Date: Mon, 13 Apr 2026 15:29:28 +0800 Subject: [PATCH 13/13] update CHANGELOG.md [skip ci] --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1efa4b1..e08d279 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## major.minor.patch (yyyy.mm.dd) -## 1.2.0 (2026.04.12) +## 1.2.0 (2026.04.13) ### Deprecated