diff --git a/clients/algoliasearch-client-python/algoliasearch/http/api_response.py b/clients/algoliasearch-client-python/algoliasearch/http/api_response.py index 8c5e0748ec..aa7c7de7bb 100644 --- a/clients/algoliasearch-client-python/algoliasearch/http/api_response.py +++ b/clients/algoliasearch-client-python/algoliasearch/http/api_response.py @@ -21,7 +21,7 @@ class ApiResponse(Generic[T]): def __init__( self, verb: Verb, - data: T = None, + data: Optional[T] = None, error_message: str = "", headers: Optional[Dict[str, str]] = None, host: str = "", @@ -94,6 +94,6 @@ def deserialize(klass: Any = None, data: Any = None) -> Any: return data if isinstance(data, str): - return klass.from_json(data) + return klass.from_json(data) # pyright: ignore - return klass.from_dict(data) + return klass.from_dict(data) # pyright: ignore diff --git a/clients/algoliasearch-client-python/algoliasearch/http/base_config.py b/clients/algoliasearch-client-python/algoliasearch/http/base_config.py index 572bb27c21..4191a0bd55 100644 --- a/clients/algoliasearch-client-python/algoliasearch/http/base_config.py +++ b/clients/algoliasearch-client-python/algoliasearch/http/base_config.py @@ -5,20 +5,6 @@ class BaseConfig: - app_id: Optional[str] - api_key: Optional[str] - - read_timeout: int - write_timeout: int - connect_timeout: int - - wait_task_time_before_retry: Optional[int] - - headers: Dict[str, str] - proxies: Dict[str, str] - - hosts: HostsCollection - def __init__(self, app_id: Optional[str] = None, api_key: Optional[str] = None): app_id = environ.get("ALGOLIA_APP_ID") if app_id is None else app_id @@ -36,12 +22,14 @@ def __init__(self, app_id: Optional[str] = None, api_key: Optional[str] = None): self.write_timeout = 30000 self.connect_timeout = 2000 - self.wait_task_time_before_retry = None - self.headers = None - self.proxies = None - self.hosts = None + self.wait_task_time_before_retry: Optional[int] = None + self.headers: Optional[Dict[str, str]] = None + self.proxies: Optional[Dict[str, str]] = None + self.hosts: Optional[HostsCollection] = None def set_client_api_key(self, api_key: str) -> None: """Sets a new API key to authenticate requests.""" self.api_key = api_key + if self.headers is None: + self.headers = {} self.headers["x-algolia-api-key"] = api_key diff --git a/clients/algoliasearch-client-python/algoliasearch/http/base_transporter.py b/clients/algoliasearch-client-python/algoliasearch/http/base_transporter.py index 047663ccae..b9404b1d67 100644 --- a/clients/algoliasearch-client-python/algoliasearch/http/base_transporter.py +++ b/clients/algoliasearch-client-python/algoliasearch/http/base_transporter.py @@ -7,14 +7,15 @@ class BaseTransporter: - _config: BaseConfig - _retry_strategy: RetryStrategy - _hosts: List[Host] - def __init__(self, config: BaseConfig) -> None: self._config = config self._retry_strategy = RetryStrategy() - self._hosts = [] + self._hosts: List[Host] = [] + self._timeout = 5000 + + @property + def config(self) -> BaseConfig: + return self._config def prepare( self, @@ -25,13 +26,18 @@ def prepare( if use_read_transporter: self._timeout = request_options.timeouts["read"] - self._hosts = self._config.hosts.read() + self._hosts = ( + self._config.hosts.read() if self._config.hosts is not None else [] + ) if isinstance(request_options.data, dict): query_parameters.update(request_options.data) - return query_parameters + else: + self._timeout = request_options.timeouts["write"] + self._hosts = ( + self._config.hosts.write() if self._config.hosts is not None else [] + ) - self._timeout = request_options.timeouts["write"] - self._hosts = self._config.hosts.write() + return query_parameters def build_path(self, path, query_parameters): if query_parameters is not None and len(query_parameters) > 0: @@ -54,9 +60,21 @@ def build_url(self, host, path): ) def get_proxy(self, url): + if self._config.proxies is None: + return None + if url.startswith("https"): return self._config.proxies.get("https") elif url.startswith("http"): return self._config.proxies.get("http") else: return None + + def get_proxies(self, url): + if self._config.proxies is None: + return None + + if url.startswith("http"): + return self._config.proxies + else: + return None diff --git a/clients/algoliasearch-client-python/algoliasearch/http/exceptions.py b/clients/algoliasearch-client-python/algoliasearch/http/exceptions.py index 2e7e514066..0fa29f5a37 100644 --- a/clients/algoliasearch-client-python/algoliasearch/http/exceptions.py +++ b/clients/algoliasearch-client-python/algoliasearch/http/exceptions.py @@ -127,7 +127,12 @@ def __init__(self, msg, path_to_item=None) -> None: class ApiException(AlgoliaException): - def __init__(self, status_code=None, error_message=None, raw_data=None) -> None: + def __init__( + self, + status_code: int = -1, + error_message: str = "Unknown error", + raw_data: bytes = b"", + ) -> None: self.status_code = status_code self.error_message = error_message self.body = raw_data.decode("utf-8") diff --git a/clients/algoliasearch-client-python/algoliasearch/http/helpers.py b/clients/algoliasearch-client-python/algoliasearch/http/helpers.py index 196694d1cc..a56994203c 100644 --- a/clients/algoliasearch-client-python/algoliasearch/http/helpers.py +++ b/clients/algoliasearch-client-python/algoliasearch/http/helpers.py @@ -23,8 +23,8 @@ def __call__(self, retry_count: int = 0) -> int: async def create_iterable( func: Callable[[Optional[T]], Awaitable[T]], validate: Callable[[T], bool], - aggregator: Callable[[T], None], - timeout: Timeout = Timeout(), + aggregator: Optional[Callable[[T], None]], + timeout: Callable[[], int] = Timeout(), error_validate: Optional[Callable[[T], bool]] = None, error_message: Optional[Callable[[T], str]] = None, ) -> T: @@ -55,8 +55,8 @@ async def retry(prev: Optional[T] = None) -> T: def create_iterable_sync( func: Callable[[Optional[T]], T], validate: Callable[[T], bool], - aggregator: Callable[[T], None], - timeout: Timeout = Timeout(), + aggregator: Optional[Callable[[T], None]], + timeout: Callable[[], int] = Timeout(), error_validate: Optional[Callable[[T], bool]] = None, error_message: Optional[Callable[[T], str]] = None, ) -> T: diff --git a/clients/algoliasearch-client-python/algoliasearch/http/hosts.py b/clients/algoliasearch-client-python/algoliasearch/http/hosts.py index 83544b4ccc..c94351f11b 100644 --- a/clients/algoliasearch-client-python/algoliasearch/http/hosts.py +++ b/clients/algoliasearch-client-python/algoliasearch/http/hosts.py @@ -18,8 +18,9 @@ def __init__( self.port = port self.priority = cast(int, priority) self.accept = (CallType.WRITE | CallType.READ) if accept is None else accept - - self.reset() + self.last_use = 0.0 + self.retry_count = 0 + self.up = True def reset(self) -> None: self.last_use = 0.0 diff --git a/clients/algoliasearch-client-python/algoliasearch/http/request_options.py b/clients/algoliasearch-client-python/algoliasearch/http/request_options.py index 41104a0929..e220831628 100644 --- a/clients/algoliasearch-client-python/algoliasearch/http/request_options.py +++ b/clients/algoliasearch-client-python/algoliasearch/http/request_options.py @@ -1,6 +1,6 @@ from copy import deepcopy from sys import version_info -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Dict, Optional, Union from urllib.parse import quote from algoliasearch.http.base_config import BaseConfig @@ -13,20 +13,20 @@ class RequestOptions: - _config: BaseConfig - headers: Dict[str, str] - query_parameters: Dict[str, Any] - timeouts: Dict[str, int] - data: Dict[str, Any] - def __init__( self, config: BaseConfig, - headers: Dict[str, str] = {}, - query_parameters: Dict[str, Any] = {}, - timeouts: Dict[str, int] = {}, - data: Dict[str, Any] = {}, + headers: Optional[Dict[str, str]] = None, + query_parameters: Optional[Dict[str, Any]] = None, + timeouts: Optional[Dict[str, int]] = None, + data: Optional[Dict[str, Any]] = None, ) -> None: + if headers is None: + headers = {} + if query_parameters is None: + query_parameters = {} + if timeouts is None: + timeouts = {} self._config = config self.headers = headers self.query_parameters = { @@ -51,13 +51,12 @@ def from_dict(self, data: Dict[str, Dict[str, Any]]) -> Self: query_parameters=data.get("query_parameters", {}), timeouts=data.get("timeouts", {}), data=data.get("data", {}), - ) + ) # pyright: ignore def merge( self, - query_parameters: List[Tuple[str, str]] = [], - headers: Dict[str, Optional[str]] = {}, - _: Dict[str, int] = {}, + query_parameters: Optional[Dict[str, Any]] = None, + headers: Optional[Dict[str, str]] = None, data: Optional[str] = None, user_request_options: Optional[Union[Self, Dict[str, Any]]] = None, ) -> Self: @@ -65,11 +64,15 @@ def merge( Merges the default config values with the user given request options if it exists. """ - headers.update(self._config.headers) + if query_parameters is None: + query_parameters = {} + if headers is None: + headers = {} + headers.update(self._config.headers or {}) request_options = { "headers": headers, - "query_parameters": {k: v for k, v in query_parameters}, + "query_parameters": query_parameters, "timeouts": { "read": self._config.read_timeout, "write": self._config.write_timeout, diff --git a/clients/algoliasearch-client-python/algoliasearch/http/retry.py b/clients/algoliasearch-client-python/algoliasearch/http/retry.py index e42670de6d..8a5a48b34a 100644 --- a/clients/algoliasearch-client-python/algoliasearch/http/retry.py +++ b/clients/algoliasearch-client-python/algoliasearch/http/retry.py @@ -14,7 +14,7 @@ class RetryOutcome: class RetryStrategy: def valid_hosts(self, hosts: List[Host]) -> List[Host]: for host in hosts: - if not host.up and self._now() - host.last_use > Host.TTL: + if not host.up and time.time() - host.last_use > Host.TTL: host.up = True return [host for host in hosts if host.up] diff --git a/clients/algoliasearch-client-python/algoliasearch/http/serializer.py b/clients/algoliasearch-client-python/algoliasearch/http/serializer.py index 52bba424c5..3a71105cf7 100644 --- a/clients/algoliasearch-client-python/algoliasearch/http/serializer.py +++ b/clients/algoliasearch-client-python/algoliasearch/http/serializer.py @@ -10,7 +10,16 @@ class QueryParametersSerializer: Parses the given 'query_parameters' values of each keys into their string value. """ - query_parameters: Dict[str, Any] = {} + def __init__(self, query_parameters: Optional[Dict[str, Any]]) -> None: + self.query_parameters: Dict[str, Any] = {} + if query_parameters is None: + return + for key, value in query_parameters.items(): + if isinstance(value, dict): + for dkey, dvalue in value.items(): + self.query_parameters[dkey] = self.parse(dvalue) + else: + self.query_parameters[key] = self.parse(value) def parse(self, value) -> Any: if isinstance(value, list): @@ -27,19 +36,8 @@ def encoded(self) -> str: dict(sorted(self.query_parameters.items(), key=lambda val: val[0])) ).replace("+", "%20") - def __init__(self, query_parameters: Optional[Dict[str, Any]]) -> None: - self.query_parameters = {} - if query_parameters is None: - return - for key, value in query_parameters.items(): - if isinstance(value, dict): - for dkey, dvalue in value.items(): - self.query_parameters[dkey] = self.parse(dvalue) - else: - self.query_parameters[key] = self.parse(value) - -def bodySerializer(obj: Any) -> Any: +def body_serializer(obj: Any) -> Any: """Builds a JSON POST object. If obj is None, return None. @@ -57,9 +55,9 @@ def bodySerializer(obj: Any) -> Any: elif isinstance(obj, PRIMITIVE_TYPES): return obj elif isinstance(obj, list): - return [bodySerializer(sub_obj) for sub_obj in obj] + return [body_serializer(sub_obj) for sub_obj in obj] elif isinstance(obj, tuple): - return tuple(bodySerializer(sub_obj) for sub_obj in obj) + return tuple(body_serializer(sub_obj) for sub_obj in obj) elif isinstance(obj, dict): obj_dict = obj else: @@ -67,4 +65,4 @@ def bodySerializer(obj: Any) -> Any: if obj_dict is None: return None - return {key: bodySerializer(val) for key, val in obj_dict.items()} + return {key: body_serializer(val) for key, val in obj_dict.items()} diff --git a/clients/algoliasearch-client-python/algoliasearch/http/transporter.py b/clients/algoliasearch-client-python/algoliasearch/http/transporter.py index a52e31276c..b82d7aac55 100644 --- a/clients/algoliasearch-client-python/algoliasearch/http/transporter.py +++ b/clients/algoliasearch-client-python/algoliasearch/http/transporter.py @@ -1,5 +1,6 @@ from asyncio import TimeoutError from json import loads +from typing import List, Optional from aiohttp import ClientSession, TCPConnector from async_timeout import timeout @@ -11,19 +12,19 @@ AlgoliaUnreachableHostException, RequestException, ) +from algoliasearch.http.hosts import Host from algoliasearch.http.request_options import RequestOptions from algoliasearch.http.retry import RetryOutcome, RetryStrategy from algoliasearch.http.verb import Verb class Transporter(BaseTransporter): - _session: ClientSession - def __init__(self, config: BaseConfig) -> None: - self._session = None + super().__init__(config) + self._session: Optional[ClientSession] = None self._config = config self._retry_strategy = RetryStrategy() - self._hosts = [] + self._hosts: List[Host] = [] async def close(self) -> None: if self._session is not None: @@ -71,7 +72,7 @@ async def request( url=url, host=host.url, status_code=resp.status, - headers=resp.headers, + headers=resp.headers, # pyright: ignore # insensitive dict is still a dict data=_raw_data, raw_data=_raw_data, error_message=str(resp.reason), @@ -103,6 +104,7 @@ async def request( class EchoTransporter(Transporter): def __init__(self, config: BaseConfig) -> None: + super().__init__(config) self._config = config self._retry_strategy = RetryStrategy() diff --git a/clients/algoliasearch-client-python/algoliasearch/http/transporter_sync.py b/clients/algoliasearch-client-python/algoliasearch/http/transporter_sync.py index cba92ba1aa..4d11a55e48 100644 --- a/clients/algoliasearch-client-python/algoliasearch/http/transporter_sync.py +++ b/clients/algoliasearch-client-python/algoliasearch/http/transporter_sync.py @@ -4,9 +4,9 @@ from requests import Request, Session, Timeout if version_info >= (3, 11): - from typing import Self + from typing import List, Optional, Self else: - from typing_extensions import Self + from typing_extensions import List, Self from requests.adapters import HTTPAdapter from urllib3.util import Retry @@ -18,19 +18,19 @@ AlgoliaUnreachableHostException, RequestException, ) +from algoliasearch.http.hosts import Host from algoliasearch.http.request_options import RequestOptions from algoliasearch.http.retry import RetryOutcome, RetryStrategy from algoliasearch.http.verb import Verb class TransporterSync(BaseTransporter): - _session: Session - def __init__(self, config: BaseConfig) -> None: - self._session = None + super().__init__(config) + self._session: Optional[Session] = None self._config = config self._retry_strategy = RetryStrategy() - self._hosts = [] + self._hosts: List[Host] = [] def __enter__(self) -> Self: return self @@ -64,7 +64,7 @@ def request( for host in self._retry_strategy.valid_hosts(self._hosts): url = self.build_url(host, path) - proxy = self.get_proxy(url) + proxies = self.get_proxies(url) req = Request( method=verb, @@ -77,7 +77,7 @@ def request( resp = self._session.send( req, timeout=self._timeout / 1000, - proxies=proxy, + proxies=proxies, ) response = ApiResponse( @@ -86,7 +86,7 @@ def request( url=url, host=host.url, status_code=resp.status_code, - headers=resp.headers, # type: ignore -- insensitive dict is still a dict + headers=resp.headers, # type: ignore # insensitive dict is still a dict data=resp.text, raw_data=resp.text, error_message=str(resp.reason), @@ -117,6 +117,7 @@ def request( class EchoTransporterSync(TransporterSync): def __init__(self, config: BaseConfig) -> None: + super().__init__(config) self._config = config self._retry_strategy = RetryStrategy() diff --git a/clients/algoliasearch-client-python/algoliasearch/http/user_agent.py b/clients/algoliasearch-client-python/algoliasearch/http/user_agent.py index 63545e0e06..edaf03f034 100644 --- a/clients/algoliasearch-client-python/algoliasearch/http/user_agent.py +++ b/clients/algoliasearch-client-python/algoliasearch/http/user_agent.py @@ -11,14 +11,14 @@ class UserAgent: + def __init__(self) -> None: + self.value = "Algolia for Python ({}); Python ({})".format( + __version__, str(python_version()) + ) + def get(self) -> str: return self.value def add(self, segment: str, version: Optional[str] = __version__) -> Self: self.value += "; {} ({})".format(segment, version) return self - - def __init__(self) -> None: - self.value = "Algolia for Python ({}); Python ({})".format( - __version__, str(python_version()) - ) diff --git a/clients/algoliasearch-client-python/algoliasearch/http/verb.py b/clients/algoliasearch-client-python/algoliasearch/http/verb.py index ebd7e5b940..3e8af8129e 100644 --- a/clients/algoliasearch-client-python/algoliasearch/http/verb.py +++ b/clients/algoliasearch-client-python/algoliasearch/http/verb.py @@ -1,4 +1,7 @@ -class Verb: +from enum import Enum + + +class Verb(str, Enum): GET = "GET" POST = "POST" PUT = "PUT" diff --git a/clients/algoliasearch-client-swift/Sources/Search/Extra/SearchClientExtension.swift b/clients/algoliasearch-client-swift/Sources/Search/Extra/SearchClientExtension.swift index 2cea7870c9..e52ad5bd32 100644 --- a/clients/algoliasearch-client-swift/Sources/Search/Extra/SearchClientExtension.swift +++ b/clients/algoliasearch-client-swift/Sources/Search/Extra/SearchClientExtension.swift @@ -319,7 +319,7 @@ public extension SearchClient { aggregator: @escaping (SearchSynonymsResponse) -> Void, requestOptions: RequestOptions? = nil ) async throws -> SearchSynonymsResponse { - let hitsPerPage = searchSynonymsParams.hitsPerPage ?? 1000 + let hitsPerPage = 1000 var updatedSearchSynonymsParams = searchSynonymsParams if updatedSearchSynonymsParams.page == nil { diff --git a/config/generation.config.mjs b/config/generation.config.mjs index d8aef75ddf..da859858e4 100644 --- a/config/generation.config.mjs +++ b/config/generation.config.mjs @@ -110,6 +110,7 @@ export const patterns = [ 'tests/output/python/poetry.lock', '!tests/output/python/**/__init__.py', + 'tests/output/python/requirements.txt', // Ruby '!clients/algoliasearch-client-ruby/**', @@ -151,10 +152,12 @@ export const patterns = [ '!clients/algoliasearch-client-swift/Sources/zlib/**', 'tests/output/swift/Package.swift', - '!tests/output/swift/handwritten/**', + '!tests/output/swift/manual/**', '!tests/output/swift/Utils/**', 'guides/.gitignore', 'guides/swift/.swiftformat', 'clients/**/LICENSE', + + 'yarn.lock', ]; diff --git a/playground/python/pyproject.toml b/playground/python/pyproject.toml index 4678659be0..ff86b49c7f 100644 --- a/playground/python/pyproject.toml +++ b/playground/python/pyproject.toml @@ -11,7 +11,8 @@ repository = "https://github.com/algolia/api-clients-automation" [tool.poetry.dependencies] python = ">= 3.8.1" algoliasearch = { path = "../../clients/algoliasearch-client-python", develop = true } -ruff = "== 0.6.4" +ruff = "== 0.6.8" +pyright = "== 1.1.383" python-dotenv = "== 1.0.1" [tool.poetry.scripts] diff --git a/scripts/docker/Dockerfile.base b/scripts/docker/Dockerfile.base index 2b6bcd904c..480591faef 100644 --- a/scripts/docker/Dockerfile.base +++ b/scripts/docker/Dockerfile.base @@ -42,7 +42,9 @@ RUN echo "export PATH=$PATH:/usr/local/bin/python" >> ~/.profile \ && echo "export PATH=$PATH:/usr/local/bin/pip" >> ~/.profile \ && pip install --upgrade pip pipx && pipx ensurepath \ && pipx install poetry - +ENV VIRTUAL_ENV=/opt/env +RUN python -m venv $VIRTUAL_ENV +ENV PATH="$VIRTUAL_ENV/bin:$PATH" # Go COPY --from=go-builder /usr/local/go/ /usr/local/go/ diff --git a/scripts/formatter.ts b/scripts/formatter.ts index 0803c05dab..2b290554d4 100644 --- a/scripts/formatter.ts +++ b/scripts/formatter.ts @@ -54,6 +54,9 @@ export async function formatter(language: string, cwd: string): Promise { 'poetry lock --no-update && poetry install --sync && pip freeze > requirements.txt && poetry run ruff check --fix --unsafe-fixes && poetry run ruff format', { cwd, language }, ); + if (!cwd.includes('tests')) { + await run('poetry run pyright', { cwd, language }); + } break; case 'ruby': await run('bundle install', { cwd, language }); diff --git a/templates/python/api.mustache b/templates/python/api.mustache index b3c73a4789..521e87ade8 100644 --- a/templates/python/api.mustache +++ b/templates/python/api.mustache @@ -2,12 +2,13 @@ {{> imports}} {{#isSearchClient}} +from algoliasearch.search.models.action import Action from algoliasearch.search.models.batch_request import BatchRequest +from algoliasearch.search.models.browse_params_object import BrowseParamsObject +from algoliasearch.search.models.operation_type import OperationType +from algoliasearch.search.models.replace_all_objects_response import ReplaceAllObjectsResponse from algoliasearch.search.models.scope_type import ScopeType -from algoliasearch.search.models.action import Action from algoliasearch.search.models.secured_api_key_restrictions import SecuredApiKeyRestrictions -from algoliasearch.search.models.replace_all_objects_response import ReplaceAllObjectsResponse -from algoliasearch.search.models.browse_params_object import BrowseParamsObject {{/isSearchClient}} {{#operations}}{{#operation}}{{#imports}} @@ -36,12 +37,12 @@ class {{classname}}{{#isSyncClient}}Sync{{/isSyncClient}}: """ _transporter: Transporter{{#isSyncClient}}Sync{{/isSyncClient}} - _config: {{#lambda.pascalcase}}{{client}}Config{{/lambda.pascalcase}} + _config: BaseConfig _request_options: RequestOptions - def __init__(self, app_id: Optional[str] = None, api_key: Optional[str] = None, {{#hasRegionalHost}}region: {{#fallbackToAliasHost}}Optional[str]{{/fallbackToAliasHost}}{{^fallbackToAliasHost}}str{{/fallbackToAliasHost}} = None, {{/hasRegionalHost}}transporter: Optional[Transporter{{#isSyncClient}}Sync{{/isSyncClient}}] = None, config: Optional[{{#lambda.pascalcase}}{{client}}Config{{/lambda.pascalcase}}] = None) -> None: + def __init__(self, app_id: Optional[str] = None, api_key: Optional[str] = None, {{#hasRegionalHost}}region: {{#fallbackToAliasHost}}Optional[str] = None{{/fallbackToAliasHost}}{{^fallbackToAliasHost}}str = ""{{/fallbackToAliasHost}}, {{/hasRegionalHost}}transporter: Optional[Transporter{{#isSyncClient}}Sync{{/isSyncClient}}] = None, config: Optional[{{#lambda.pascalcase}}{{client}}Config{{/lambda.pascalcase}}] = None) -> None: if transporter is not None and config is None: - config = transporter._config + config = {{#lambda.pascalcase}}{{client}}Config{{/lambda.pascalcase}}(transporter.config.app_id, transporter.config.api_key{{#hasRegionalHost}}, region{{/hasRegionalHost}}) if config is None: config = {{#lambda.pascalcase}}{{client}}Config{{/lambda.pascalcase}}(app_id, api_key{{#hasRegionalHost}}, region{{/hasRegionalHost}}) @@ -99,7 +100,7 @@ class {{classname}}{{#isSyncClient}}Sync{{/isSyncClient}}: {{^isSyncClient}}async {{/isSyncClient}}def set_client_api_key(self, api_key: str) -> None: """Sets a new API key to authenticate requests.""" - self._transporter._config.set_client_api_key(api_key) + self._transporter.config.set_client_api_key(api_key) {{#isSearchClient}} {{> search_helpers}} @@ -138,21 +139,21 @@ class {{classname}}{{#isSyncClient}}Sync{{/isSyncClient}}: {{/allParams}} {{#queryParams.0}} - _query_parameters: List[Tuple[str, str]] = [] + _query_parameters: Dict[str, Any] = {} {{/queryParams.0}} {{#headerParams.0}} - _headers: Dict[str, Optional[str]] = {} + _headers: Dict[str, str] = {} {{/headerParams.0}} {{#vendorExtensions}} {{#queryParams}} if {{paramName}} is not None: {{^x-is-custom-request}} - _query_parameters.append(('{{baseName}}', {{paramName}})) + _query_parameters["{{baseName}}"] = {{paramName}} {{/x-is-custom-request}} {{#x-is-custom-request}} for _qpkey, _qpvalue in {{paramName}}.items(): - _query_parameters.append((_qpkey, _qpvalue)) + _query_parameters[_qpkey] = _qpvalue {{/x-is-custom-request}} {{/queryParams}} {{/vendorExtensions}} @@ -174,7 +175,7 @@ class {{classname}}{{#isSyncClient}}Sync{{/isSyncClient}}: request_options=self._request_options.merge( {{#queryParams.0}}query_parameters=_query_parameters,{{/queryParams.0}} {{#headerParams.0}}headers=_headers,{{/headerParams.0}} - {{#bodyParam}}data=dumps(bodySerializer(_data)),{{/bodyParam}} + {{#bodyParam}}data=dumps(body_serializer(_data)),{{/bodyParam}} user_request_options=request_options, ), {{#vendorExtensions}} diff --git a/templates/python/config.mustache b/templates/python/config.mustache index 60520ce175..58c155e02c 100644 --- a/templates/python/config.mustache +++ b/templates/python/config.mustache @@ -11,11 +11,14 @@ from algoliasearch.http.base_config import BaseConfig class {{#lambda.pascalcase}}{{client}}{{/lambda.pascalcase}}Config(BaseConfig): - def __init__(self, app_id: str, api_key: str{{#hasRegionalHost}}, region: {{#fallbackToAliasHost}}Optional[str]{{/fallbackToAliasHost}}{{^fallbackToAliasHost}}str{{/fallbackToAliasHost}} = None{{/hasRegionalHost}}) -> None: + def __init__(self, app_id: Optional[str], api_key: Optional[str]{{#hasRegionalHost}}, region: {{#fallbackToAliasHost}}Optional[str] = None{{/fallbackToAliasHost}}{{^fallbackToAliasHost}}str = ""{{/fallbackToAliasHost}}{{/hasRegionalHost}}) -> None: super().__init__(app_id, api_key) user_agent = UserAgent().add("{{#lambda.pascalcase}}{{client}}{{/lambda.pascalcase}}") + assert app_id, "`app_id` is missing." + assert api_key, "`api_key` is missing." + self.headers = { "x-algolia-application-id": app_id, "x-algolia-api-key": api_key, @@ -40,7 +43,7 @@ class {{#lambda.pascalcase}}{{client}}{{/lambda.pascalcase}}Config(BaseConfig): if {{^fallbackToAliasHost}}not region or {{/fallbackToAliasHost}}(region is not None and region not in _regions): raise ValueError(f"`region` {{^fallbackToAliasHost}}is required and {{/fallbackToAliasHost}}must be one of the following: {', '.join(_regions)}") - self.hosts = HostsCollection([Host({{#fallbackToAliasHost}}"{{{hostWithFallback}}}" if region is None else {{/fallbackToAliasHost}} "{{{regionalHost}}}".replace("{region}", region))]) + self.hosts = HostsCollection([Host({{#fallbackToAliasHost}}"{{{hostWithFallback}}}" if region is None else {{/fallbackToAliasHost}} "{{{regionalHost}}}".replace("{region}", region or ""))]) {{/hasRegionalHost}} {{^hasRegionalHost}} diff --git a/templates/python/imports.mustache b/templates/python/imports.mustache index 868d2eeb58..ee8d083ef2 100644 --- a/templates/python/imports.mustache +++ b/templates/python/imports.mustache @@ -46,13 +46,14 @@ if version_info >= (3, 11): else: from typing_extensions import Self -from algoliasearch.http.helpers import create_iterable, create_iterable_sync, RetryTimeout -from algoliasearch.http.serializer import bodySerializer, QueryParametersSerializer from algoliasearch.http.api_response import ApiResponse +from algoliasearch.http.base_config import BaseConfig +from algoliasearch.http.exceptions import RequestException, ValidUntilNotFoundException +from algoliasearch.http.helpers import create_iterable, create_iterable_sync, RetryTimeout from algoliasearch.http.request_options import RequestOptions +from algoliasearch.http.serializer import body_serializer, QueryParametersSerializer from algoliasearch.http.transporter import Transporter from algoliasearch.http.transporter_sync import TransporterSync from algoliasearch.http.verb import Verb -from algoliasearch.http.exceptions import RequestException, ValidUntilNotFoundException from algoliasearch.{{packageName}}.config import {{#lambda.pascalcase}}{{client}}Config{{/lambda.pascalcase}} \ No newline at end of file diff --git a/templates/python/model_generic.mustache b/templates/python/model_generic.mustache index a76d4c2e93..fb2a11958d 100644 --- a/templates/python/model_generic.mustache +++ b/templates/python/model_generic.mustache @@ -1,8 +1,18 @@ +_ALIASES = { + {{#vars}} + "{{name}}": "{{{baseName}}}", + {{/vars}} +} + + +def _alias_generator(name: str) -> str: + return _ALIASES.get(name, name) + class {{classname}}({{#parent}}{{{.}}}{{/parent}}{{^parent}}BaseModel{{/parent}}): {{> model_description}} {{#vars}} - {{name}}: {{^required}}Optional[{{/required}}{{{dataType}}}{{^required}}]{{/required}} = Field({{^required}}default=None, {{/required}}alias="{{{baseName}}}") + {{name}}: {{^required}}Optional[{{/required}}{{{dataType}}}{{^required}}]{{/required}}{{^required}} = None{{/required}} {{#description}} """ {{{.}}} """ {{/description}} @@ -64,6 +74,7 @@ class {{classname}}({{#parent}}{{{.}}}{{/parent}}{{^parent}}BaseModel{{/parent}} populate_by_name=True, validate_assignment=True, protected_namespaces=(), + alias_generator=_alias_generator, {{#isAdditionalPropertiesTrue}} extra='allow', {{/isAdditionalPropertiesTrue}} @@ -129,7 +140,9 @@ class {{classname}}({{#parent}}{{{.}}}{{/parent}}{{^parent}}BaseModel{{/parent}} {{/items.isContainer}} {{/items.isEnumOrRef}} {{#items.isEnumOrRef}} - obj["{{{baseName}}}"] = dict((_k, _v) for _k, _v in obj.get("{{{baseName}}}").items()) + {{{baseName}}} = obj.get("{{{baseName}}}") + if {{{baseName}}} is not None: + obj["{{{baseName}}}"] = dict((_k, _v) for _k, _v in {{{baseName}}}.items()) {{/items.isEnumOrRef}} {{/items.isPrimitiveType}} {{/isMap}} diff --git a/templates/python/model_oneof.mustache b/templates/python/model_oneof.mustache index f6ac0563a6..cb39264b30 100644 --- a/templates/python/model_oneof.mustache +++ b/templates/python/model_oneof.mustache @@ -5,7 +5,7 @@ class {{classname}}({{#parent}}{{{.}}}{{/parent}}{{^parent}}BaseModel{{/parent}} {{vendorExtensions.x-py-name}}: {{^required}}Optional[{{/required}}{{{dataType}}}{{^required}}]{{/required}} = Field({{^required}}default=None{{/required}}) {{#description}}""" {{{.}}} """{{/description}} {{/composedSchemas.oneOf}} - actual_instance: Optional[Union[{{#oneOf}}{{{.}}}{{^-last}}, {{/-last}}{{/oneOf}}]] = None + actual_instance: Union[{{#oneOf}}{{{.}}}, {{/oneOf}}None] = None one_of_schemas: Set[str] = { {{#oneOf}}"{{.}}"{{^-last}}, {{/-last}}{{/oneOf}} } def __init__(self, *args, **kwargs) -> None: @@ -14,12 +14,12 @@ class {{classname}}({{#parent}}{{{.}}}{{/parent}}{{^parent}}BaseModel{{/parent}} raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") if kwargs: raise ValueError("If a position argument is used, keyword arguments cannot be used.") - super().__init__(actual_instance=args[0]) + super().__init__(actual_instance=args[0]) # pyright: ignore else: super().__init__(**kwargs) @model_serializer - def unwrap_actual_instance(self) -> Optional[Union[{{#oneOf}}{{{.}}}{{^-last}}, {{/-last}}{{/oneOf}}]]: + def unwrap_actual_instance(self) -> Union[{{#oneOf}}{{{.}}}, {{/oneOf}}Self, None]: """ Unwraps the `actual_instance` when calling the `to_json` method. """ @@ -84,8 +84,10 @@ class {{classname}}({{#parent}}{{{.}}}{{/parent}}{{^parent}}BaseModel{{/parent}} if self.actual_instance is None: return "null" - if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): - return self.actual_instance.to_json() + if hasattr(self.actual_instance, "to_json") and callable( + self.actual_instance.to_json # pyright: ignore + ): + return self.actual_instance.to_json() # pyright: ignore else: return dumps(self.actual_instance) @@ -94,7 +96,9 @@ class {{classname}}({{#parent}}{{{.}}}{{/parent}}{{^parent}}BaseModel{{/parent}} if self.actual_instance is None: return None - if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): - return self.actual_instance.to_dict() + if hasattr(self.actual_instance, "to_dict") and callable( + self.actual_instance.to_dict # pyright: ignore + ): + return self.actual_instance.to_dict() # pyright: ignore else: - return self.actual_instance \ No newline at end of file + return self.actual_instance # pyright: ignore \ No newline at end of file diff --git a/templates/python/partial_api_args.mustache b/templates/python/partial_api_args.mustache index 667aeec65d..5c63dc0cd3 100644 --- a/templates/python/partial_api_args.mustache +++ b/templates/python/partial_api_args.mustache @@ -1,7 +1,7 @@ ( self, {{#allParams}} - {{paramName}}: {{{vendorExtensions.x-py-typing}}}{{^required}} = None{{/required}}, + {{paramName}}: {{#isModel}}Union[{{/isModel}}{{#isArray}}{{#items.isModel}}Union[{{/items.isModel}}{{/isArray}}{{#isEnumRef}}Union[{{/isEnumRef}}{{{vendorExtensions.x-py-typing}}}{{#isModel}}, dict[str, Any]]{{/isModel}}{{#isEnumRef}}, str]{{/isEnumRef}}{{#isArray}}{{#items.isModel}}, list[dict[str, Any]]]{{/items.isModel}}{{/isArray}}{{^required}} = None{{/required}}, {{/allParams}} request_options: Optional[Union[dict, RequestOptions]] = None ) \ No newline at end of file diff --git a/templates/python/pyproject.mustache b/templates/python/pyproject.mustache index 59b1068b2e..82de151eb1 100644 --- a/templates/python/pyproject.mustache +++ b/templates/python/pyproject.mustache @@ -23,7 +23,8 @@ async-timeout = ">= 4.0.3" pydantic = ">= 2" [tool.poetry.group.dev.dependencies] -ruff = "== 0.6.4" +ruff = "== 0.6.8" +pyright = "== 1.1.383" [tool.ruff] line-length = 88 @@ -40,4 +41,4 @@ quote-style = "double" [build-system] requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" \ No newline at end of file +build-backend = "poetry.core.masonry.api" diff --git a/templates/python/search_helpers.mustache b/templates/python/search_helpers.mustache index 4d9aeb9f8e..7accf339c1 100644 --- a/templates/python/search_helpers.mustache +++ b/templates/python/search_helpers.mustache @@ -9,21 +9,22 @@ """ Helper: Wait for a task to be published (completed) for a given `indexName` and `taskID`. """ - self._retry_count = 0 + _retry_count = 0 - {{^isSyncClient}}async {{/isSyncClient}}def _func(_: GetTaskResponse) -> GetTaskResponse: + {{^isSyncClient}}async {{/isSyncClient}}def _func(_: Optional[GetTaskResponse]) -> GetTaskResponse: return {{^isSyncClient}}await {{/isSyncClient}}self.get_task(index_name, task_id, request_options) def _aggregator(_: GetTaskResponse) -> None: - self._retry_count += 1 + nonlocal _retry_count + _retry_count += 1 return {{^isSyncClient}}await {{/isSyncClient}}create_iterable{{#isSyncClient}}_sync{{/isSyncClient}}( func=_func, aggregator=_aggregator, validate=lambda _resp: _resp.status == "published", - timeout=lambda: timeout(self._retry_count), - error_validate=lambda _: self._retry_count >= max_retries, - error_message=lambda: f"The maximum number of retries exceeded. (${self._retry_count}/${max_retries})", + timeout=lambda: timeout(_retry_count), + error_validate=lambda _: _retry_count >= max_retries, + error_message=lambda _: f"The maximum number of retries exceeded. (${_retry_count}/${max_retries})", ) {{^isSyncClient}}async {{/isSyncClient}}def wait_for_app_task( @@ -36,28 +37,29 @@ """ Helper: Wait for an application-level task to complete for a given `taskID`. """ - self._retry_count = 0 + _retry_count = 0 - {{^isSyncClient}}async {{/isSyncClient}}def _func(_: GetTaskResponse) -> GetTaskResponse: + {{^isSyncClient}}async {{/isSyncClient}}def _func(_: Optional[GetTaskResponse]) -> GetTaskResponse: return {{^isSyncClient}}await {{/isSyncClient}}self.get_app_task(task_id, request_options) def _aggregator(_: GetTaskResponse) -> None: - self._retry_count += 1 + nonlocal _retry_count + _retry_count += 1 return {{^isSyncClient}}await {{/isSyncClient}}create_iterable{{#isSyncClient}}_sync{{/isSyncClient}}( func=_func, aggregator=_aggregator, validate=lambda _resp: _resp.status == "published", - timeout=lambda: timeout(self._retry_count), - error_validate=lambda _: self._retry_count >= max_retries, - error_message=lambda: f"The maximum number of retries exceeded. (${self._retry_count}/${max_retries})", + timeout=lambda: timeout(_retry_count), + error_validate=lambda _: _retry_count >= max_retries, + error_message=lambda _: f"The maximum number of retries exceeded. (${_retry_count}/${max_retries})", ) {{^isSyncClient}}async {{/isSyncClient}}def wait_for_api_key( self, key: str, operation: str, - api_key: Optional[ApiKey] = None, + api_key: Optional[Union[ApiKey, dict[str, Any]]] = None, max_retries: int = 50, timeout: RetryTimeout = RetryTimeout(), request_options: Optional[Union[dict, RequestOptions]] = None, @@ -65,28 +67,33 @@ """ Helper: Wait for an API key to be added, updated or deleted based on a given `operation`. """ - self._retry_count = 0 + _retry_count = 0 if operation == "update" and api_key is None: raise ValueError( "`apiKey` is required when waiting for an `update` operation." ) - {{^isSyncClient}}async {{/isSyncClient}}def _func(_prev: GetApiKeyResponse | None) -> GetApiKeyResponse | None: + {{^isSyncClient}}async {{/isSyncClient}}def _func(_prev: Optional[GetApiKeyResponse]) -> GetApiKeyResponse: try: return {{^isSyncClient}}await {{/isSyncClient}}self.get_api_key(key=key, request_options=request_options) except RequestException as e: if e.status_code == 404 and (operation == "delete" or operation == "add"): - return None + return None # pyright: ignore raise e def _aggregator(_: GetApiKeyResponse | None) -> None: - self._retry_count += 1 + nonlocal _retry_count + _retry_count += 1 def _validate(_resp: GetApiKeyResponse | None) -> bool: if operation == "update": + if _resp is None: + return False resp_dict = _resp.to_dict() api_key_dict = api_key.to_dict() if isinstance(api_key, ApiKey) else api_key + if api_key_dict is None: + return False for field in api_key_dict: if isinstance(api_key_dict[field], list) and isinstance( resp_dict[field], list @@ -106,27 +113,27 @@ func=_func, validate=_validate, aggregator=_aggregator, - timeout=lambda: timeout(self._retry_count), - error_validate=lambda _: self._retry_count >= max_retries, - error_message=lambda _: f"The maximum number of retries exceeded. (${self._retry_count}/${max_retries})", + timeout=lambda: timeout(_retry_count), + error_validate=lambda _: _retry_count >= max_retries, + error_message=lambda _: f"The maximum number of retries exceeded. (${_retry_count}/${max_retries})", ) {{^isSyncClient}}async {{/isSyncClient}}def browse_objects( self, index_name: str, aggregator: Optional[Callable[[BrowseResponse], None]], - browse_params: Optional[BrowseParamsObject] = BrowseParamsObject(), + browse_params: BrowseParamsObject = BrowseParamsObject(), request_options: Optional[Union[dict, RequestOptions]] = None, ) -> BrowseResponse: """ Helper: Iterate on the `browse` method of the client to allow aggregating objects of an index. """ - {{^isSyncClient}}async {{/isSyncClient}}def _func(_prev: BrowseResponse) -> BrowseResponse: + {{^isSyncClient}}async {{/isSyncClient}}def _func(_prev: Optional[BrowseResponse]) -> BrowseResponse: if _prev is not None and _prev.cursor is not None: browse_params.cursor = _prev.cursor return {{^isSyncClient}}await {{/isSyncClient}}self.browse( index_name=index_name, - browse_params=browse_params, + browse_params=BrowseParams(browse_params), request_options=request_options, ) @@ -140,16 +147,17 @@ self, index_name: str, aggregator: Optional[Callable[[SearchRulesResponse], None]], - search_rules_params: Optional[SearchRulesParams] = SearchRulesParams(hits_per_page=1000), + search_rules_params: SearchRulesParams = SearchRulesParams(hits_per_page=1000), request_options: Optional[Union[dict, RequestOptions]] = None, ) -> SearchRulesResponse: """ Helper: Iterate on the `search_rules` method of the client to allow aggregating rules of an index. """ - if search_rules_params is not None: + if search_rules_params.hits_per_page is None: search_rules_params.hits_per_page = 1000 + hits_per_page = search_rules_params.hits_per_page - {{^isSyncClient}}async {{/isSyncClient}}def _func(_prev: SearchRulesResponse) -> SearchRulesResponse: + {{^isSyncClient}}async {{/isSyncClient}}def _func(_prev: Optional[SearchRulesResponse]) -> SearchRulesResponse: if _prev is not None: search_rules_params.page = _prev.page + 1 return {{^isSyncClient}}await {{/isSyncClient}}self.search_rules( @@ -159,7 +167,7 @@ ) return {{^isSyncClient}}await {{/isSyncClient}}create_iterable{{#isSyncClient}}_sync{{/isSyncClient}}( func=_func, - validate=lambda _resp: _resp.nb_hits < search_rules_params.hits_per_page, + validate=lambda _resp: _resp.nb_hits < hits_per_page, aggregator=aggregator, ) @@ -167,27 +175,29 @@ self, index_name: str, aggregator: Callable[[SearchSynonymsResponse], None], - search_synonyms_params: Optional[SearchSynonymsParams] = SearchSynonymsParams(), + search_synonyms_params: SearchSynonymsParams = SearchSynonymsParams(hits_per_page=1000), request_options: Optional[Union[dict, RequestOptions]] = None, ) -> SearchSynonymsResponse: """ Helper: Iterate on the `search_synonyms` method of the client to allow aggregating synonyms of an index. """ - if search_synonyms_params.page is None: - search_synonyms_params.page = 0 - search_synonyms_params.hits_per_page = 1000 + hits_per_page = 1000 + page = search_synonyms_params.page or 0 + search_synonyms_params.hits_per_page = hits_per_page - {{^isSyncClient}}async {{/isSyncClient}}def _func(_prev: SearchRulesResponse) -> SearchRulesResponse: + {{^isSyncClient}}async {{/isSyncClient}}def _func(_prev: Optional[SearchSynonymsResponse]) -> SearchSynonymsResponse: + nonlocal page resp = {{^isSyncClient}}await {{/isSyncClient}}self.search_synonyms( index_name=index_name, search_synonyms_params=search_synonyms_params, request_options=request_options, ) - search_synonyms_params.page += 1 + page += 1 + search_synonyms_params.page = page return resp return {{^isSyncClient}}await {{/isSyncClient}}create_iterable{{#isSyncClient}}_sync{{/isSyncClient}}( func=_func, - validate=lambda _resp: _resp.nb_hits < search_synonyms_params.hits_per_page, + validate=lambda _resp: _resp.nb_hits < hits_per_page, aggregator=aggregator, ) @@ -199,15 +209,17 @@ """ Helper: Generates a secured API key based on the given `parent_api_key` and given `restrictions`. """ - if not isinstance(restrictions, SecuredApiKeyRestrictions): - restrictions = SecuredApiKeyRestrictions.from_dict(restrictions) + restrictions_dict = {} + if isinstance(restrictions, SecuredApiKeyRestrictions): + restrictions_dict = restrictions.to_dict() + elif isinstance(restrictions, dict): + restrictions_dict = restrictions - restrictions = restrictions.to_dict() - if "searchParams" in restrictions: - restrictions = {**restrictions, **restrictions["searchParams"]} - del restrictions["searchParams"] + if "searchParams" in restrictions_dict: + restrictions_dict = {**restrictions_dict, **restrictions_dict["searchParams"]} + del restrictions_dict["searchParams"] - query_parameters = QueryParametersSerializer(dict(sorted(restrictions.items()))).encoded() + query_parameters = QueryParametersSerializer(dict(sorted(restrictions_dict.items()))).encoded() secured_key = hmac.new( parent_api_key.encode(encoding="utf-8"), @@ -266,7 +278,7 @@ self, index_name: str, objects: List[Dict[str, Any]], - create_if_not_exists: Optional[bool] = False, + create_if_not_exists: bool = False, request_options: Optional[Union[dict, RequestOptions]] = None, ) -> List[BatchResponse]: """ @@ -312,7 +324,7 @@ objects: List[Dict[str, Any]], batch_size: int = 1000, request_options: Optional[Union[dict, RequestOptions]] = None, - ) -> List[ApiResponse[str]]: + ) -> ReplaceAllObjectsResponse: """ Helper: Replaces all objects (records) in the given `index_name` with the given `objects`. A temporary index is created during this process in order to backup your data. @@ -324,7 +336,7 @@ return {{^isSyncClient}}await {{/isSyncClient}}self.operation_index( index_name=index_name, operation_index_params=OperationIndexParams( - operation="copy", + operation=OperationType.COPY, destination=tmp_index_name, scope=[ ScopeType("settings"), @@ -357,7 +369,7 @@ move_operation_response = {{^isSyncClient}}await {{/isSyncClient}}self.operation_index( index_name=tmp_index_name, operation_index_params=OperationIndexParams( - operation="move", + operation=OperationType.MOVE, destination=index_name, ), request_options=request_options, diff --git a/templates/python/snippets/method.mustache b/templates/python/snippets/method.mustache index c4170cf473..974672250d 100644 --- a/templates/python/snippets/method.mustache +++ b/templates/python/snippets/method.mustache @@ -19,20 +19,10 @@ def snippet_for_{{#lambda.snakecase}}{{method}}{{/lambda.snakecase}}{{testIndex} # Call the API {{#hasResponse}}response = {{/hasResponse}}client.{{#lambda.snakecase}}{{method}}{{/lambda.snakecase}}({{#parametersWithDataType}}{{> tests/generateParams}}{{/parametersWithDataType}}{{#hasRequestOptions}} request_options={ {{#requestOptions.headers.parameters}}"headers":loads("""{{{.}}}"""),{{/requestOptions.headers.parameters}}{{#requestOptions.queryParameters.parameters}}"query_parameters":loads("""{{{.}}}"""),{{/requestOptions.queryParameters.parameters}} }{{/hasRequestOptions}}) - {{#hasResponse}}print(response){{/hasResponse}} - - # Skip deserialization - {{#hasResponse}}raw_response = {{/hasResponse}}client.{{#lambda.snakecase}}{{method}}{{/lambda.snakecase}}_with_http_info({{#parametersWithDataType}}{{> tests/generateParams}}{{/parametersWithDataType}}{{#hasRequestOptions}} request_options={ {{#requestOptions.headers.parameters}}"headers":loads("""{{{.}}}"""),{{/requestOptions.headers.parameters}}{{#requestOptions.queryParameters.parameters}}"query_parameters":loads("""{{{.}}}"""),{{/requestOptions.queryParameters.parameters}} }{{/hasRequestOptions}}) - - {{#hasResponse}}print(raw_response.raw_data){{/hasResponse}} - # >LOG {{#hasResponse}} # use the class directly print(response) - - # print the JSON response - print(response.to_json()) {{/hasResponse}} # SEPARATOR< diff --git a/templates/python/snippets/pyproject.mustache b/templates/python/snippets/pyproject.mustache index 57bdb75b0f..c19c79a82c 100644 --- a/templates/python/snippets/pyproject.mustache +++ b/templates/python/snippets/pyproject.mustache @@ -10,4 +10,5 @@ repository = "https://github.com/algolia/api-clients-automation" [tool.poetry.dependencies] python = "^3.8.1" algoliasearch = { path = "../../clients/algoliasearch-client-python", develop = true } -ruff = "== 0.6.4" +ruff = "== 0.6.8" +pyright = "== 1.1.383" diff --git a/tests/output/python/pyproject.toml b/tests/output/python/pyproject.toml index 0257c0a8d1..2102838797 100644 --- a/tests/output/python/pyproject.toml +++ b/tests/output/python/pyproject.toml @@ -10,7 +10,8 @@ repository = "https://github.com/algolia/api-clients-automation" [tool.poetry.dependencies] python = "^3.8.1" algoliasearch = { path = "../../../clients/algoliasearch-client-python", develop = true } -ruff = "== 0.6.4" +ruff = "== 0.6.8" +pyright = "== 1.1.383" pytest = "=8.3.3" python-dotenv = "=1.0.1" pytest-aiohttp = "=1.0.5" diff --git a/tests/output/python/tests/helpers.py b/tests/output/python/tests/helpers.py index 79601632c4..de2d1cd09a 100644 --- a/tests/output/python/tests/helpers.py +++ b/tests/output/python/tests/helpers.py @@ -1,6 +1,6 @@ class Helpers: def unwrap(self, resp): - """ converts the object response to its dictionary form, if it's a list, every items are iterated on and we call to_dict on it, otherwise we just call to_dict """ + """converts the object response to its dictionary form, if it's a list, every items are iterated on and we call to_dict on it, otherwise we just call to_dict""" if isinstance(resp, list): _res = [] for _, r in enumerate(resp):