diff --git a/docs/01_overview/code/01_usage_async.py b/docs/01_overview/code/01_usage_async.py index 3ad4e883..4a45b1e2 100644 --- a/docs/01_overview/code/01_usage_async.py +++ b/docs/01_overview/code/01_usage_async.py @@ -16,6 +16,6 @@ async def main() -> None: return # Fetch results from the Actor run's default dataset. - dataset_client = apify_client.dataset(call_result['defaultDatasetId']) + dataset_client = apify_client.dataset(call_result.default_dataset_id) list_items_result = await dataset_client.list_items() print(f'Dataset: {list_items_result}') diff --git a/docs/01_overview/code/01_usage_sync.py b/docs/01_overview/code/01_usage_sync.py index afa15ffb..84e430fa 100644 --- a/docs/01_overview/code/01_usage_sync.py +++ b/docs/01_overview/code/01_usage_sync.py @@ -16,6 +16,6 @@ def main() -> None: return # Fetch results from the Actor run's default dataset. - dataset_client = apify_client.dataset(call_result['defaultDatasetId']) + dataset_client = apify_client.dataset(call_result.default_dataset_id) list_items_result = dataset_client.list_items() print(f'Dataset: {list_items_result}') diff --git a/docs/02_concepts/code/01_async_support.py b/docs/02_concepts/code/01_async_support.py index 22cc390e..e8fe81b0 100644 --- a/docs/02_concepts/code/01_async_support.py +++ b/docs/02_concepts/code/01_async_support.py @@ -11,7 +11,7 @@ async def main() -> None: # Start the Actor and get the run ID run_result = await actor_client.start() - run_client = apify_client.run(run_result['id']) + run_client = apify_client.run(run_result.id) log_client = run_client.log() # Stream the logs diff --git a/docs/03_examples/code/02_tasks_async.py b/docs/03_examples/code/02_tasks_async.py index d3e962fa..9894b20b 100644 --- a/docs/03_examples/code/02_tasks_async.py +++ b/docs/03_examples/code/02_tasks_async.py @@ -1,22 +1,22 @@ import asyncio from apify_client import ApifyClientAsync -from apify_client.clients.resource_clients import TaskClientAsync +from apify_client._models import Run, Task +from apify_client._resource_clients import TaskClientAsync TOKEN = 'MY-APIFY-TOKEN' HASHTAGS = ['zebra', 'lion', 'hippo'] -async def run_apify_task(client: TaskClientAsync) -> dict: - result = await client.call() - return result or {} +async def run_apify_task(client: TaskClientAsync) -> Run | None: + return await client.call() async def main() -> None: apify_client = ApifyClientAsync(token=TOKEN) # Create Apify tasks - apify_tasks = list[dict]() + apify_tasks = list[Task]() apify_tasks_client = apify_client.tasks() for hashtag in HASHTAGS: @@ -34,7 +34,7 @@ async def main() -> None: apify_task_clients = list[TaskClientAsync]() for apify_task in apify_tasks: - task_id = apify_task['id'] + task_id = apify_task.id apify_task_client = apify_client.task(task_id) apify_task_clients.append(apify_task_client) diff --git a/docs/03_examples/code/02_tasks_sync.py b/docs/03_examples/code/02_tasks_sync.py index 72437742..4e75d3dd 100644 --- a/docs/03_examples/code/02_tasks_sync.py +++ b/docs/03_examples/code/02_tasks_sync.py @@ -1,20 +1,20 @@ from apify_client import ApifyClient -from apify_client.clients.resource_clients import TaskClient +from apify_client._models import Run, Task +from apify_client._resource_clients import TaskClient TOKEN = 'MY-APIFY-TOKEN' HASHTAGS = ['zebra', 'lion', 'hippo'] -def run_apify_task(client: TaskClient) -> dict: - result = client.call() - return result or {} +def run_apify_task(client: TaskClient) -> Run | None: + return client.call() def main() -> None: apify_client = ApifyClient(token=TOKEN) # Create Apify tasks - apify_tasks = list[dict]() + apify_tasks = list[Task]() apify_tasks_client = apify_client.tasks() for hashtag in HASHTAGS: @@ -32,18 +32,19 @@ def main() -> None: apify_task_clients = list[TaskClient]() for apify_task in apify_tasks: - task_id = apify_task['id'] + task_id = apify_task.id apify_task_client = apify_client.task(task_id) apify_task_clients.append(apify_task_client) print('Task clients created:', apify_task_clients) # Execute Apify tasks - task_run_results = list[dict]() + task_run_results = list[Run]() for client in apify_task_clients: result = run_apify_task(client) - task_run_results.append(result) + if result is not None: + task_run_results.append(result) print('Task results:', task_run_results) diff --git a/docs/03_examples/code/03_retrieve_async.py b/docs/03_examples/code/03_retrieve_async.py index c6e35095..fc60d068 100644 --- a/docs/03_examples/code/03_retrieve_async.py +++ b/docs/03_examples/code/03_retrieve_async.py @@ -19,11 +19,11 @@ async def main() -> None: for dataset_item in actor_datasets.items: # Dataset items can be handled here. Dataset items can be paginated - dataset_client = apify_client.dataset(dataset_item['id']) + dataset_client = apify_client.dataset(dataset_item.id) dataset_items = await dataset_client.list_items(limit=1000) # Items can be pushed to single dataset - merging_dataset_client = apify_client.dataset(merging_dataset['id']) + merging_dataset_client = apify_client.dataset(merging_dataset.id) await merging_dataset_client.push_items(dataset_items.items) # ... diff --git a/docs/03_examples/code/03_retrieve_sync.py b/docs/03_examples/code/03_retrieve_sync.py index 7d92dd53..24e05e2f 100644 --- a/docs/03_examples/code/03_retrieve_sync.py +++ b/docs/03_examples/code/03_retrieve_sync.py @@ -17,11 +17,11 @@ def main() -> None: for dataset_item in actor_datasets.items: # Dataset items can be handled here. Dataset items can be paginated - dataset_client = apify_client.dataset(dataset_item['id']) + dataset_client = apify_client.dataset(dataset_item.id) dataset_items = dataset_client.list_items(limit=1000) # Items can be pushed to single dataset - merging_dataset_client = apify_client.dataset(merging_dataset['id']) + merging_dataset_client = apify_client.dataset(merging_dataset.id) merging_dataset_client.push_items(dataset_items.items) # ... diff --git a/pyproject.toml b/pyproject.toml index 1025934e..06f4b4a0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,6 +58,7 @@ dev = [ "setuptools", # setuptools are used by pytest but not explicitly required "types-colorama<0.5.0", "werkzeug<4.0.0", # Werkzeug is used by pytest-httpserver + "datamodel-code-generator[http,ruff]<1.0.0", ] [tool.hatch.build.targets.wheel] @@ -138,6 +139,10 @@ indent-style = "space" "N999", # Invalid module name "T201", # print found ] +"src/apify_client/_models.py" = [ + "D", # Everything from the pydocstyle + "E501", # Line too long +] [tool.ruff.lint.flake8-quotes] docstring-quotes = "double" @@ -187,3 +192,21 @@ exclude_lines = ["pragma: no cover", "if TYPE_CHECKING:", "assert_never()"] [tool.ipdb] context = 7 + +# https://koxudaxi.github.io/datamodel-code-generator/ +[tool.datamodel-codegen] +url = "https://docs.apify.com/api/openapi.json" +input_file_type = "openapi" +output = "src/apify_client/_models.py" +target_python_version = "3.10" +output_model_type = "pydantic_v2.BaseModel" +use_schema_description = true +use_field_description = true +use_union_operator = true +capitalise_enum_members = true +collapse_root_models = true +set_default_enum_member = true +use_annotated = true +wrap_string_literal = true +snake_case_field = true +formatters = ["ruff-check", "ruff-format"] diff --git a/scripts/utils.py b/scripts/utils.py index 7eb07a8a..379f3a9c 100644 --- a/scripts/utils.py +++ b/scripts/utils.py @@ -25,7 +25,7 @@ def get_current_package_version() -> str: # It replaces the version number on the line with the format `version = "1.2.3"` def set_current_package_version(version: str) -> None: with open(PYPROJECT_TOML_FILE_PATH, 'r+', encoding='utf-8') as pyproject_toml_file: - updated_pyproject_toml_file_lines = [] + updated_pyproject_toml_file_lines = list[str]() version_string_found = False for line in pyproject_toml_file: line_processed = line diff --git a/src/apify_client/__init__.py b/src/apify_client/__init__.py index d8845f71..c597a192 100644 --- a/src/apify_client/__init__.py +++ b/src/apify_client/__init__.py @@ -1,6 +1,6 @@ from importlib import metadata -from .client import ApifyClient, ApifyClientAsync +from ._client import ApifyClient, ApifyClientAsync __version__ = metadata.version('apify-client') diff --git a/src/apify_client/client.py b/src/apify_client/_client.py similarity index 99% rename from src/apify_client/client.py rename to src/apify_client/_client.py index b6ed7abf..8c7c63df 100644 --- a/src/apify_client/client.py +++ b/src/apify_client/_client.py @@ -1,8 +1,7 @@ from __future__ import annotations from apify_client._http_client import HTTPClient, HTTPClientAsync -from apify_client._statistics import Statistics -from apify_client.clients import ( +from apify_client._resource_clients import ( ActorClient, ActorClientAsync, ActorCollectionClient, @@ -50,6 +49,7 @@ WebhookDispatchCollectionClient, WebhookDispatchCollectionClientAsync, ) +from apify_client._types import Statistics DEFAULT_API_URL = 'https://api.apify.com' DEFAULT_TIMEOUT = 360 diff --git a/src/apify_client/_http_client.py b/src/apify_client/_http_client.py index 5f3c76d2..85fb1214 100644 --- a/src/apify_client/_http_client.py +++ b/src/apify_client/_http_client.py @@ -14,14 +14,14 @@ import impit from apify_client._logging import log_context, logger_name -from apify_client._statistics import Statistics +from apify_client._types import Statistics from apify_client._utils import is_retryable_error, retry_with_exp_backoff, retry_with_exp_backoff_async from apify_client.errors import ApifyApiError if TYPE_CHECKING: from collections.abc import Callable - from apify_client._types import JSONSerializable + from apify_client._types import JsonSerializable DEFAULT_BACKOFF_EXPONENTIAL_FACTOR = 2 DEFAULT_BACKOFF_RANDOM_FACTOR = 1 @@ -96,7 +96,7 @@ def _prepare_request_call( headers: dict | None = None, params: dict | None = None, data: Any = None, - json: JSONSerializable | None = None, + json: JsonSerializable | None = None, ) -> tuple[dict, dict | None, Any]: if json and data: raise ValueError('Cannot pass both "json" and "data" parameters at the same time!') @@ -125,7 +125,7 @@ def _build_url_with_params(self, url: str, params: dict | None = None) -> str: if not params: return url - param_pairs: list[tuple[str, str]] = [] + param_pairs = list[tuple[str, str]]() for key, value in params.items(): if isinstance(value, list): param_pairs.extend((key, str(v)) for v in value) @@ -146,7 +146,7 @@ def call( headers: dict | None = None, params: dict | None = None, data: Any = None, - json: JSONSerializable | None = None, + json: JsonSerializable | None = None, stream: bool | None = None, timeout_secs: int | None = None, ) -> impit.Response: @@ -225,7 +225,7 @@ async def call( headers: dict | None = None, params: dict | None = None, data: Any = None, - json: JSONSerializable | None = None, + json: JsonSerializable | None = None, stream: bool | None = None, timeout_secs: int | None = None, ) -> impit.Response: diff --git a/src/apify_client/_logging.py b/src/apify_client/_logging.py index cfdfe42c..86b8e957 100644 --- a/src/apify_client/_logging.py +++ b/src/apify_client/_logging.py @@ -2,7 +2,6 @@ import functools import inspect -import json import logging from contextvars import ContextVar from typing import TYPE_CHECKING, Any, NamedTuple @@ -12,18 +11,19 @@ if TYPE_CHECKING: from collections.abc import Callable - from apify_client.clients.base.base_client import _BaseBaseClient + from apify_client._resource_clients.base import BaseBaseClient -# Name of the logger used throughout the library logger_name = __name__.split('.')[0] +"""Name of the logger used throughout the library.""" -# Logger used throughout the library logger = logging.getLogger(logger_name) +"""Logger used throughout the library.""" -# Context containing the details of the request and the resource client making the request class LogContext(NamedTuple): + """Request context details for logging (attempt, client method, HTTP method, resource ID, URL).""" + attempt: ContextVar[int | None] client_method: ContextVar[str | None] method: ContextVar[str | None] @@ -40,10 +40,11 @@ class LogContext(NamedTuple): ) -# Metaclass for resource clients which wraps all their public methods -# With injection of their details to the log context vars class WithLogDetailsClient(type): + """Metaclass that wraps public methods to inject client details into log context.""" + def __new__(cls, name: str, bases: tuple, attrs: dict) -> WithLogDetailsClient: + """Wrap all public methods in the class with logging context injection.""" for attr_name, attr_value in attrs.items(): if not attr_name.startswith('_') and inspect.isfunction(attr_value): attrs[attr_name] = _injects_client_details_to_log_context(attr_value) @@ -51,47 +52,52 @@ def __new__(cls, name: str, bases: tuple, attrs: dict) -> WithLogDetailsClient: return type.__new__(cls, name, bases, attrs) -# Wraps an unbound method so that its call will inject the details -# of the resource client (which is the `self` argument of the method) -# to the log context vars -def _injects_client_details_to_log_context(fun: Callable) -> Callable: - if inspect.iscoroutinefunction(fun): +class RedirectLogFormatter(logging.Formatter): + """Log formatter that prepends colored logger name to messages.""" - @functools.wraps(fun) - async def async_wrapper(resource_client: _BaseBaseClient, *args: Any, **kwargs: Any) -> Any: - log_context.client_method.set(fun.__qualname__) - log_context.resource_id.set(resource_client.resource_id) + def format(self, record: logging.LogRecord) -> str: + """Format log by prepending colored logger name. - return await fun(resource_client, *args, **kwargs) + Args: + record: The log record to format. - return async_wrapper - elif inspect.isasyncgenfunction(fun): # noqa: RET505 + Returns: + Formatted log message with colored logger name prefix. + """ + formatted_logger_name = f'{Fore.CYAN}[{record.name}]{Style.RESET_ALL}' + return f'{formatted_logger_name} -> {record.msg}' - @functools.wraps(fun) - async def async_generator_wrapper(resource_client: _BaseBaseClient, *args: Any, **kwargs: Any) -> Any: - log_context.client_method.set(fun.__qualname__) - log_context.resource_id.set(resource_client.resource_id) - async for item in fun(resource_client, *args, **kwargs): - yield item +def create_redirect_logger(name: str) -> logging.Logger: + """Create a logger for redirecting logs from another Actor. - return async_generator_wrapper - else: + Args: + name: Logger name. Use dot notation for hierarchy (e.g., "apify.xyz" creates "xyz" under "apify"). - @functools.wraps(fun) - def wrapper(resource_client: _BaseBaseClient, *args: Any, **kwargs: Any) -> Any: - log_context.client_method.set(fun.__qualname__) - log_context.resource_id.set(resource_client.resource_id) + Returns: + Configured logger with RedirectLogFormatter. + """ + to_logger = logging.getLogger(name) + to_logger.propagate = False - return fun(resource_client, *args, **kwargs) + # Remove filters and handlers in case this logger already exists and was set up in some way. + for handler in to_logger.handlers: + to_logger.removeHandler(handler) + for log_filter in to_logger.filters: + to_logger.removeFilter(log_filter) - return wrapper + handler = logging.StreamHandler() + handler.setFormatter(RedirectLogFormatter()) + to_logger.addHandler(handler) + to_logger.setLevel(logging.DEBUG) + return to_logger -# A filter which lets every log record through, -# but adds the current logging context to the record class _ContextInjectingFilter(logging.Filter): + """Filter that injects current log context into all log records.""" + def filter(self, record: logging.LogRecord) -> bool: + """Add log context variables to the record.""" record.client_method = log_context.client_method.get() record.resource_id = log_context.resource_id.get() record.method = log_context.method.get() @@ -100,71 +106,39 @@ def filter(self, record: logging.LogRecord) -> bool: return True -logger.addFilter(_ContextInjectingFilter()) - - -# Log formatter useful for debugging of the client -# Will print out all the extra fields added to the log record -class _DebugLogFormatter(logging.Formatter): - empty_record = logging.LogRecord('dummy', 0, 'dummy', 0, 'dummy', None, None) - - # Gets the extra fields from the log record which are not present on an empty record - def _get_extra_fields(self, record: logging.LogRecord) -> dict: - extra_fields: dict = {} - for key, value in record.__dict__.items(): - if key not in self.empty_record.__dict__: - extra_fields[key] = value # noqa: PERF403 - - return extra_fields - - def format(self, record: logging.LogRecord) -> str: - extra = self._get_extra_fields(record) +def _injects_client_details_to_log_context(fun: Callable) -> Callable: + """Wrap a method to inject resource client details into log context before execution.""" + if inspect.iscoroutinefunction(fun): - log_string = super().format(record) - if extra: - log_string = f'{log_string} ({json.dumps(extra)})' - return log_string + @functools.wraps(fun) + async def async_wrapper(resource_client: BaseBaseClient, *args: Any, **kwargs: Any) -> Any: + log_context.client_method.set(fun.__qualname__) + log_context.resource_id.set(resource_client.resource_id) + return await fun(resource_client, *args, **kwargs) -def create_redirect_logger( - name: str, -) -> logging.Logger: - """Create a logger for redirecting logs from another Actor. + return async_wrapper - Args: - name: The name of the logger. It can be used to inherit from other loggers. Example: `apify.xyz` will use logger - named `xyz` and make it a children of `apify` logger. + if inspect.isasyncgenfunction(fun): - Returns: - The created logger. - """ - to_logger = logging.getLogger(name) - to_logger.propagate = False + @functools.wraps(fun) + async def async_generator_wrapper(resource_client: BaseBaseClient, *args: Any, **kwargs: Any) -> Any: + log_context.client_method.set(fun.__qualname__) + log_context.resource_id.set(resource_client.resource_id) - # Remove filters and handlers in case this logger already exists and was set up in some way. - for handler in to_logger.handlers: - to_logger.removeHandler(handler) - for log_filter in to_logger.filters: - to_logger.removeFilter(log_filter) + async for item in fun(resource_client, *args, **kwargs): + yield item - handler = logging.StreamHandler() - handler.setFormatter(RedirectLogFormatter()) - to_logger.addHandler(handler) - to_logger.setLevel(logging.DEBUG) - return to_logger + return async_generator_wrapper + @functools.wraps(fun) + def wrapper(resource_client: BaseBaseClient, *args: Any, **kwargs: Any) -> Any: + log_context.client_method.set(fun.__qualname__) + log_context.resource_id.set(resource_client.resource_id) -class RedirectLogFormatter(logging.Formatter): - """Formatter applied to default redirect logger.""" + return fun(resource_client, *args, **kwargs) - def format(self, record: logging.LogRecord) -> str: - """Format the log by prepending logger name to the original message. + return wrapper - Args: - record: Log record to be formatted. - Returns: - Formatted log message. - """ - formatted_logger_name = f'{Fore.CYAN}[{record.name}]{Style.RESET_ALL}' - return f'{formatted_logger_name} -> {record.msg}' +logger.addFilter(_ContextInjectingFilter()) diff --git a/src/apify_client/_models.py b/src/apify_client/_models.py new file mode 100644 index 00000000..67a50742 --- /dev/null +++ b/src/apify_client/_models.py @@ -0,0 +1,2141 @@ +# generated by datamodel-codegen: +# filename: https://docs.apify.com/api/openapi.json +# timestamp: 2025-12-09T16:04:03+00:00 + +from __future__ import annotations + +from enum import Enum, IntEnum +from typing import Annotated, Any, Literal + +from pydantic import AwareDatetime, BaseModel, Field + + +class PaginationResponse(BaseModel): + total: Annotated[float, Field(examples=[2])] + offset: Annotated[float, Field(examples=[0])] + limit: Annotated[float, Field(examples=[1000])] + desc: Annotated[bool, Field(examples=[False])] + count: Annotated[float, Field(examples=[2])] + + +class ActorShort(BaseModel): + id: Annotated[str, Field(examples=['br9CKmk457'])] + created_at: Annotated[str, Field(alias='createdAt', examples=['2019-10-29T07:34:24.202Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2019-10-30T07:34:24.202Z'])] + name: Annotated[str, Field(examples=['MyAct'])] + username: Annotated[str, Field(examples=['janedoe'])] + + +class Data(PaginationResponse): + items: list[ActorShort] + + +class GetListOfActorsResponse(BaseModel): + data: Data + + +class VersionSourceType(Enum): + SOURCE_FILES = 'SOURCE_FILES' + GIT_REPO = 'GIT_REPO' + TARBALL = 'TARBALL' + GITHUB_GIST = 'GITHUB_GIST' + + +class EnvVar(BaseModel): + name: Annotated[str, Field(examples=['MY_ENV_VAR'])] + value: Annotated[str, Field(examples=['my-value'])] + is_secret: Annotated[bool | None, Field(alias='isSecret', examples=[False])] = None + + +class Format(Enum): + BASE64 = 'BASE64' + TEXT = 'TEXT' + + +class VersionSourceFiles1(BaseModel): + format: Annotated[Format, Field(examples=['TEXT'])] + content: Annotated[str, Field(examples=["console.log('This is the main.js file');"])] + name: Annotated[str, Field(examples=['src/main.js'])] + + +class Folder(Enum): + BOOLEAN_TRUE = True + + +class VersionSourceFiles2(BaseModel): + name: Annotated[str, Field(examples=['src/placeholder'])] + folder: Annotated[Folder, Field(examples=[True])] + + +class Version(BaseModel): + version_number: Annotated[str, Field(alias='versionNumber', examples=['0.0'])] + source_type: Annotated[Any | VersionSourceType, Field(alias='sourceType')] + env_vars: Annotated[list[EnvVar] | None, Field(alias='envVars')] = None + apply_env_vars_to_build: Annotated[bool | None, Field(alias='applyEnvVarsToBuild', examples=[False])] = None + build_tag: Annotated[str, Field(alias='buildTag', examples=['latest'])] + source_files: Annotated[ + list[VersionSourceFiles1 | VersionSourceFiles2] | None, Field(alias='sourceFiles', title='VersionSourceFiles') + ] = None + + +class CommonActorPricingInfo(BaseModel): + apify_margin_percentage: Annotated[float, Field(alias='apifyMarginPercentage')] + """ + In [0, 1], fraction of pricePerUnitUsd that goes to Apify + """ + created_at: Annotated[AwareDatetime, Field(alias='createdAt')] + """ + When this pricing info record has been created + """ + started_at: Annotated[AwareDatetime, Field(alias='startedAt')] + """ + Since when is this pricing info record effective for a given Actor + """ + notified_about_future_change_at: Annotated[AwareDatetime | None, Field(alias='notifiedAboutFutureChangeAt')] = None + notified_about_change_at: Annotated[AwareDatetime | None, Field(alias='notifiedAboutChangeAt')] = None + reason_for_change: Annotated[str | None, Field(alias='reasonForChange')] = None + + +class ActorChargeEvent(BaseModel): + event_price_usd: Annotated[float, Field(alias='eventPriceUsd')] + event_title: Annotated[str, Field(alias='eventTitle')] + event_description: Annotated[str, Field(alias='eventDescription')] + + +class PricingModel(Enum): + PAY_PER_EVENT = 'PAY_PER_EVENT' + + +class PricingPerEvent(BaseModel): + actor_charge_events: Annotated[dict[str, ActorChargeEvent] | None, Field(alias='actorChargeEvents')] = None + + +class PayPerEventActorPricingInfo(CommonActorPricingInfo): + pricing_model: Annotated[Literal['PAY_PER_EVENT'], Field(alias='pricingModel')] + pricing_per_event: Annotated[PricingPerEvent, Field(alias='pricingPerEvent')] + minimal_max_total_charge_usd: Annotated[float | None, Field(alias='minimalMaxTotalChargeUsd')] = None + + +class PricingModel1(Enum): + PRICE_PER_DATASET_ITEM = 'PRICE_PER_DATASET_ITEM' + + +class PricePerDatasetItemActorPricingInfo(CommonActorPricingInfo): + pricing_model: Annotated[Literal['PRICE_PER_DATASET_ITEM'], Field(alias='pricingModel')] + unit_name: Annotated[str, Field(alias='unitName')] + """ + Name of the unit that is being charged + """ + price_per_unit_usd: Annotated[float, Field(alias='pricePerUnitUsd')] + + +class PricingModel2(Enum): + FLAT_PRICE_PER_MONTH = 'FLAT_PRICE_PER_MONTH' + + +class FlatPricePerMonthActorPricingInfo(CommonActorPricingInfo): + pricing_model: Annotated[Literal['FLAT_PRICE_PER_MONTH'], Field(alias='pricingModel')] + trial_minutes: Annotated[float, Field(alias='trialMinutes')] + """ + For how long this Actor can be used for free in trial period + """ + price_per_unit_usd: Annotated[float, Field(alias='pricePerUnitUsd')] + """ + Monthly flat price in USD + """ + + +class PricingModel3(Enum): + FREE = 'FREE' + + +class FreeActorPricingInfo(CommonActorPricingInfo): + pricing_model: Annotated[Literal['FREE'], Field(alias='pricingModel')] + + +class DefaultRunOptions(BaseModel): + build: Annotated[str, Field(examples=['latest'])] + timeout_secs: Annotated[float, Field(alias='timeoutSecs', examples=[3600])] + memory_mbytes: Annotated[float, Field(alias='memoryMbytes', examples=[2048])] + restart_on_error: Annotated[bool | None, Field(alias='restartOnError', examples=[False])] = None + + +class CreateActorRequest(BaseModel): + name: Annotated[str | None, Field(examples=['MyActor'])] = None + description: Annotated[str | None, Field(examples=['My favourite actor!'])] = None + title: Annotated[str | None, Field(examples=['My actor'])] = None + is_public: Annotated[bool | None, Field(alias='isPublic', examples=[False])] = None + seo_title: Annotated[str | None, Field(alias='seoTitle', examples=['My actor'])] = None + seo_description: Annotated[str | None, Field(alias='seoDescription', examples=['My actor is the best'])] = None + restart_on_error: Annotated[bool | None, Field(alias='restartOnError', examples=[False])] = None + versions: list[Version] | None = None + pricing_infos: Annotated[ + list[ + PayPerEventActorPricingInfo + | PricePerDatasetItemActorPricingInfo + | FlatPricePerMonthActorPricingInfo + | FreeActorPricingInfo + ] + | None, + Field(alias='pricingInfos'), + ] = None + categories: list[str] | None = None + default_run_options: Annotated[DefaultRunOptions | None, Field(alias='defaultRunOptions')] = None + + +class ActorStats(BaseModel): + total_builds: Annotated[float, Field(alias='totalBuilds', examples=[9])] + total_runs: Annotated[float, Field(alias='totalRuns', examples=[16])] + total_users: Annotated[float, Field(alias='totalUsers', examples=[6])] + total_users7_days: Annotated[float, Field(alias='totalUsers7Days', examples=[2])] + total_users30_days: Annotated[float, Field(alias='totalUsers30Days', examples=[6])] + total_users90_days: Annotated[float, Field(alias='totalUsers90Days', examples=[6])] + total_metamorphs: Annotated[float, Field(alias='totalMetamorphs', examples=[2])] + last_run_started_at: Annotated[str, Field(alias='lastRunStartedAt', examples=['2019-07-08T14:01:05.546Z'])] + + +class ExampleRunInput(BaseModel): + body: Annotated[str, Field(examples=[{'helloWorld': 123}])] + content_type: Annotated[str, Field(alias='contentType', examples=['application/json; charset=utf-8'])] + + +class Latest(BaseModel): + build_id: Annotated[str | None, Field(alias='buildId', examples=['z2EryhbfhgSyqj6Hn'])] = None + build_number: Annotated[str | None, Field(alias='buildNumber', examples=['0.0.2'])] = None + finished_at: Annotated[str | None, Field(alias='finishedAt', examples=['2019-06-10T11:15:49.286Z'])] = None + + +class TaggedBuilds(BaseModel): + latest: Any | Latest | None = None + + +class Actor(BaseModel): + id: Annotated[str, Field(examples=['zdc3Pyhyz3m8vjDeM'])] + user_id: Annotated[str, Field(alias='userId', examples=['wRsJZtadYvn4mBZmm'])] + name: Annotated[str, Field(examples=['MyActor'])] + username: Annotated[str, Field(examples=['jane35'])] + description: Annotated[str | None, Field(examples=['My favourite actor!'])] = None + restart_on_error: Annotated[bool | None, Field(alias='restartOnError', examples=[False])] = None + is_public: Annotated[bool, Field(alias='isPublic', examples=[False])] + created_at: Annotated[str, Field(alias='createdAt', examples=['2019-07-08T11:27:57.401Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2019-07-08T14:01:05.546Z'])] + stats: ActorStats + versions: list[Version] + pricing_infos: Annotated[ + list[ + PayPerEventActorPricingInfo + | PricePerDatasetItemActorPricingInfo + | FlatPricePerMonthActorPricingInfo + | FreeActorPricingInfo + ] + | None, + Field(alias='pricingInfos'), + ] = None + default_run_options: Annotated[DefaultRunOptions, Field(alias='defaultRunOptions')] + example_run_input: Annotated[Any | ExampleRunInput | None, Field(alias='exampleRunInput')] = None + is_deprecated: Annotated[bool | None, Field(alias='isDeprecated', examples=[False])] = None + deployment_key: Annotated[str, Field(alias='deploymentKey', examples=['ssh-rsa AAAA ...'])] + title: Annotated[str | None, Field(examples=['My Actor'])] = None + tagged_builds: Annotated[Any | TaggedBuilds | None, Field(alias='taggedBuilds')] = None + + +class CreateActorResponse(BaseModel): + data: Actor + + +class GetActorResponse(BaseModel): + data: Actor + + +class CreateOrUpdateEnvVarRequest(BaseModel): + name: Annotated[str, Field(examples=['MY_ENV_VAR'])] + value: Annotated[str, Field(examples=['my-new-value'])] + is_secret: Annotated[bool | None, Field(alias='isSecret', examples=[False])] = None + + +class TaggedBuilds1(BaseModel): + build_id: Annotated[str, Field(alias='buildId')] + + +class UpdateActorRequest(BaseModel): + name: Annotated[str, Field(examples=['MyActor'])] + description: Annotated[str | None, Field(examples=['My favourite actor!'])] = None + is_public: Annotated[bool, Field(alias='isPublic', examples=[False])] + seo_title: Annotated[str | None, Field(alias='seoTitle', examples=['My actor'])] = None + seo_description: Annotated[str | None, Field(alias='seoDescription', examples=['My actor is the best'])] = None + title: Annotated[str | None, Field(examples=['My Actor'])] = None + restart_on_error: Annotated[bool | None, Field(alias='restartOnError', examples=[False])] = None + versions: list[CreateOrUpdateEnvVarRequest] + pricing_infos: Annotated[ + list[ + PayPerEventActorPricingInfo + | PricePerDatasetItemActorPricingInfo + | FlatPricePerMonthActorPricingInfo + | FreeActorPricingInfo + ] + | None, + Field(alias='pricingInfos'), + ] = None + categories: list[str] | None = None + default_run_options: Annotated[DefaultRunOptions | None, Field(alias='defaultRunOptions')] = None + tagged_builds: Annotated[ + dict[str, TaggedBuilds1] | None, + Field(alias='taggedBuilds', examples=[{'latest': {'buildId': 'z2EryhbfhgSyqj6Hn'}, 'beta': None}]), + ] = None + """ + An object to modify tags on the Actor's builds. The key is the tag name (e.g., _latest_), and the value is either an object with a `buildId` or `null`. + + This operation is a patch; any existing tags that you omit from this object will be preserved. + + - **To create or reassign a tag**, provide the tag name with a `buildId`. e.g., to assign the _latest_ tag: + +   + + ```json + { + "latest": { + "buildId": "z2EryhbfhgSyqj6Hn" + } + } + ``` + + - **To remove a tag**, provide the tag name with a `null` value. e.g., to remove the _beta_ tag: + +   + + ```json + { + "beta": null + } + ``` + + - **To perform multiple operations**, combine them. The following reassigns _latest_ and removes _beta_, while preserving any other existing tags. + +   + + ```json + { + "latest": { + "buildId": "z2EryhbfhgSyqj6Hn" + }, + "beta": null + } + ``` + + """ + + +class UpdateActorResponse(BaseModel): + data: Actor + + +class Data1(BaseModel): + total: Annotated[float, Field(examples=[5])] + items: list[Version] + + +class GetVersionListResponse(BaseModel): + data: Data1 + + +class CreateOrUpdateVersionRequest(BaseModel): + version_number: Annotated[str | None, Field(alias='versionNumber', examples=['0.0'])] = None + source_type: Annotated[Any | VersionSourceType | None, Field(alias='sourceType')] = None + env_vars: Annotated[list[EnvVar] | None, Field(alias='envVars')] = None + apply_env_vars_to_build: Annotated[bool | None, Field(alias='applyEnvVarsToBuild', examples=[False])] = None + build_tag: Annotated[str | None, Field(alias='buildTag', examples=['latest'])] = None + source_files: Annotated[ + list[VersionSourceFiles1 | VersionSourceFiles2] | None, Field(alias='sourceFiles', title='VersionSourceFiles') + ] = None + + +class GetVersionResponse(BaseModel): + data: Version + + +class Data2(BaseModel): + total: Annotated[float, Field(examples=[5])] + items: list[EnvVar] + + +class GetEnvVarListResponse(BaseModel): + data: Data2 + + +class GetEnvVarResponse(BaseModel): + data: EnvVar + + +class WebhookCondition(BaseModel): + actor_id: Annotated[str | None, Field(alias='actorId', examples=['hksJZtadYvn4mBuin'])] = None + actor_task_id: Annotated[str | None, Field(alias='actorTaskId', examples=['asdLZtadYvn4mBZmm'])] = None + actor_run_id: Annotated[str | None, Field(alias='actorRunId', examples=['hgdKZtadYvn4mBpoi'])] = None + + +class ExampleWebhookDispatch(BaseModel): + status: Annotated[str, Field(examples=['SUCCEEDED'])] + finished_at: Annotated[str, Field(alias='finishedAt', examples=['2019-12-13T08:36:13.202Z'])] + + +class WebhookStats(BaseModel): + total_dispatches: Annotated[float, Field(alias='totalDispatches', examples=[1])] + + +class WebhookShort(BaseModel): + id: Annotated[str, Field(examples=['YiKoxjkaS9gjGTqhF'])] + created_at: Annotated[str, Field(alias='createdAt', examples=['2019-12-12T07:34:14.202Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2019-12-13T08:36:13.202Z'])] + user_id: Annotated[str, Field(alias='userId', examples=['wRsJZtadYvn4mBZmm'])] + is_ad_hoc: Annotated[bool | None, Field(alias='isAdHoc', examples=[False])] = None + should_interpolate_strings: Annotated[bool | None, Field(alias='shouldInterpolateStrings', examples=[False])] = None + event_types: Annotated[list[str], Field(alias='eventTypes', examples=[['ACTOR.RUN.SUCCEEDED']])] + condition: WebhookCondition + ignore_ssl_errors: Annotated[bool, Field(alias='ignoreSslErrors', examples=[False])] + do_not_retry: Annotated[bool, Field(alias='doNotRetry', examples=[False])] + request_url: Annotated[str, Field(alias='requestUrl', examples=['http://example.com/'])] + last_dispatch: Annotated[ExampleWebhookDispatch | None, Field(alias='lastDispatch')] = None + stats: WebhookStats | None = None + + +class Data3(PaginationResponse): + items: list[WebhookShort] | None = None + + +class GetListOfWebhooksResponse(BaseModel): + data: Data3 + + +class BuildsMeta(BaseModel): + origin: Annotated[str, Field(examples=['WEB'])] + client_ip: Annotated[str, Field(alias='clientIp', examples=['172.234.12.34'])] + user_agent: Annotated[str, Field(alias='userAgent', examples=['Mozilla/5.0 (iPad)'])] + + +class BuildShort(BaseModel): + id: Annotated[str, Field(examples=['HG7ML7M8z78YcAPEB'])] + act_id: Annotated[str | None, Field(alias='actId', examples=['janedoe~my-actor'])] = None + status: Annotated[str, Field(examples=['SUCCEEDED'])] + started_at: Annotated[str, Field(alias='startedAt', examples=['2019-11-30T07:34:24.202Z'])] + finished_at: Annotated[str, Field(alias='finishedAt', examples=['2019-12-12T09:30:12.202Z'])] + usage_total_usd: Annotated[float, Field(alias='usageTotalUsd', examples=[0.02])] + meta: BuildsMeta | None = None + + +class Data4(BaseModel): + total: Annotated[float, Field(examples=[2])] + offset: Annotated[float, Field(examples=[0])] + limit: Annotated[float, Field(examples=[1000])] + desc: Annotated[bool, Field(examples=[False])] + count: Annotated[float, Field(examples=[2])] + items: list[BuildShort] + + +class GetBuildListResponse(BaseModel): + data: Data4 + + +class BuildStats(BaseModel): + duration_millis: Annotated[float, Field(alias='durationMillis', examples=[1000])] + run_time_secs: Annotated[float, Field(alias='runTimeSecs', examples=[45.718])] + compute_units: Annotated[float, Field(alias='computeUnits', examples=[0.0126994444444444])] + + +class BuildOptions(BaseModel): + use_cache: Annotated[bool | None, Field(alias='useCache', examples=[False])] = None + beta_packages: Annotated[bool | None, Field(alias='betaPackages', examples=[False])] = None + memory_mbytes: Annotated[float | None, Field(alias='memoryMbytes', examples=[1024])] = None + disk_mbytes: Annotated[float | None, Field(alias='diskMbytes', examples=[2048])] = None + + +class BuildUsage(BaseModel): + actor_compute_units: Annotated[float | None, Field(alias='ACTOR_COMPUTE_UNITS', examples=[0.08])] = None + + +class ActorSpecification(IntEnum): + """The Actor specification version that this Actor follows. This property must be set to 1.""" + + INTEGER_1 = 1 + + +class Storages(BaseModel): + dataset: dict[str, Any] | None = None + """ + Defines the schema of items in your dataset, the full specification can be found in [Apify docs](https://docs.apify.com/platform/actors/development/actor-definition/dataset-schema) + """ + + +class ActorDefinition(BaseModel): + """The definition of the Actor, the full specification of this field can be found in [Apify docs](https://docs.apify.com/platform/actors/development/actor-definition/actor-json)""" + + actor_specification: Annotated[ActorSpecification, Field(alias='actorSpecification')] + """ + The Actor specification version that this Actor follows. This property must be set to 1. + """ + name: str + """ + The name of the Actor. + """ + version: Annotated[str, Field(pattern='^[0-9]+\\\\.[0-9]+$')] + """ + The version of the Actor, specified in the format [Number].[Number], e.g., 0.1, 1.0. + """ + build_tag: Annotated[str | None, Field(alias='buildTag')] = None + """ + The tag name to be applied to a successful build of the Actor. Defaults to 'latest' if not specified. + """ + environment_variables: Annotated[dict[str, str] | None, Field(alias='environmentVariables')] = None + """ + A map of environment variables to be used during local development and deployment. + """ + dockerfile: str | None = None + """ + The path to the Dockerfile used for building the Actor on the platform. + """ + docker_context_dir: Annotated[str | None, Field(alias='dockerContextDir')] = None + """ + The path to the directory used as the Docker context when building the Actor. + """ + readme: str | None = None + """ + The path to the README file for the Actor. + """ + input: dict[str, Any] | None = None + """ + The input schema object, the full specification can be found in [Apify docs](https://docs.apify.com/platform/actors/development/actor-definition/input-schema) + """ + changelog: str | None = None + """ + The path to the CHANGELOG file displayed in the Actor's information tab. + """ + storages: Storages | None = None + min_memory_mbytes: Annotated[int | None, Field(alias='minMemoryMbytes', ge=256)] = None + """ + Specifies the minimum amount of memory in megabytes required by the Actor. + """ + max_memory_mbytes: Annotated[int | None, Field(alias='maxMemoryMbytes', ge=256)] = None + """ + Specifies the maximum amount of memory in megabytes required by the Actor. + """ + uses_standby_mode: Annotated[bool | None, Field(alias='usesStandbyMode')] = None + """ + Specifies whether the Actor will have Standby mode enabled. + """ + + +class Stats1(BaseModel): + pass + + +class Stats2(BuildStats, Stats1): + pass + + +class Options1(BaseModel): + pass + + +class Options2(BuildOptions, Options1): + pass + + +class Usage1(BaseModel): + pass + + +class Usage2(BuildUsage, Usage1): + pass + + +class UsageUsd1(BaseModel): + pass + + +class UsageUsd2(BuildUsage, UsageUsd1): + pass + + +class Build(BaseModel): + id: Annotated[str, Field(examples=['HG7ML7M8z78YcAPEB'])] + act_id: Annotated[str, Field(alias='actId', examples=['janedoe~my-actor'])] + user_id: Annotated[str, Field(alias='userId', examples=['klmdEpoiojmdEMlk3'])] + started_at: Annotated[str, Field(alias='startedAt', examples=['2019-11-30T07:34:24.202Z'])] + finished_at: Annotated[str | None, Field(alias='finishedAt', examples=['2019-12-12T09:30:12.202Z'])] = None + status: Annotated[str, Field(examples=['SUCCEEDED'])] + meta: BuildsMeta + stats: Stats2 | None = None + options: Options2 | None = None + usage: Usage2 | None = None + usage_total_usd: Annotated[float | None, Field(alias='usageTotalUsd', examples=[0.02])] = None + usage_usd: Annotated[UsageUsd2 | None, Field(alias='usageUsd')] = None + input_schema: Annotated[ + str | None, Field(alias='inputSchema', examples=['{\\n \\"title\\": \\"Schema for ... }']) + ] = None + readme: Annotated[str | None, Field(examples=['# Magic Actor\\nThis Actor is magic.'])] = None + build_number: Annotated[str, Field(alias='buildNumber', examples=['0.1.1'])] + actor_definition: Annotated[ActorDefinition | None, Field(alias='actorDefinition')] = None + + +class BuildActorResponse(BaseModel): + data: Build + + +class GetBuildResponse(BaseModel): + data: Build + + +class Info(BaseModel): + title: Annotated[str | None, Field(examples=['Your Magic Actor'])] = None + version: Annotated[str | None, Field(examples=['1.0'])] = None + x_build_id: Annotated[str | None, Field(alias='x-build-id', examples=['ID of build'])] = None + + +class Server(BaseModel): + url: Annotated[str | None, Field(examples=['https://api.apify.com/v2'])] = None + + +class Schema(BaseModel): + field_ref: Annotated[str | None, Field(alias='$ref', examples=['#/components/schemas/inputSchema'])] = None + + +class ApplicationJson(BaseModel): + schema_: Annotated[Schema | None, Field(alias='schema')] = None + + +class Content(BaseModel): + application_json: Annotated[ApplicationJson | None, Field(alias='application/json')] = None + + +class RequestBody(BaseModel): + required: Annotated[bool | None, Field(examples=[True])] = None + content: Content | None = None + + +class Schema1(BaseModel): + type: Annotated[str | None, Field(examples=['string'])] = None + + +class Parameter(BaseModel): + name: Annotated[str | None, Field(examples=['token'])] = None + in_: Annotated[str | None, Field(alias='in', examples=['query'])] = None + required: Annotated[bool | None, Field(examples=[True])] = None + schema_: Annotated[Schema1 | None, Field(alias='schema')] = None + description: Annotated[str | None, Field(examples=['Enter your Apify token here'])] = None + + +class Field200(BaseModel): + description: Annotated[str | None, Field(examples=['OK'])] = None + + +class Responses(BaseModel): + field_200: Annotated[Field200 | None, Field(alias='200')] = None + + +class Post(BaseModel): + operation_id: Annotated[str | None, Field(alias='operationId', examples=['run-sync-get-dataset-items'])] = None + x_openai_is_consequential: Annotated[bool | None, Field(alias='x-openai-isConsequential', examples=[False])] = None + summary: Annotated[ + str | None, + Field(examples=["Executes an Actor, waits for its completion, and returns Actor's dataset items in response."]), + ] = None + tags: Annotated[list[str] | None, Field(examples=[['Run Actor']])] = None + request_body: Annotated[RequestBody | None, Field(alias='requestBody')] = None + parameters: list[Parameter] | None = None + responses: Responses | None = None + + +class FieldActsUsernameActorRunSyncGetDatasetItems(BaseModel): + post: Post | None = None + + +class Schema2(BaseModel): + field_ref: Annotated[str | None, Field(alias='$ref', examples=['#/components/schemas/inputSchema'])] = None + + +class ApplicationJson1(BaseModel): + schema_: Annotated[Schema2 | None, Field(alias='schema')] = None + + +class Content1(BaseModel): + application_json: Annotated[ApplicationJson1 | None, Field(alias='application/json')] = None + + +class RequestBody1(BaseModel): + required: Annotated[bool | None, Field(examples=[True])] = None + content: Content1 | None = None + + +class Schema3(BaseModel): + type: str | None = None + + +class Parameter1(BaseModel): + name: str | None = None + in_: Annotated[str | None, Field(alias='in', examples=['query'])] = None + required: bool | None = None + schema_: Annotated[Schema3 | None, Field(alias='schema')] = None + description: str | None = None + + +class Schema4(BaseModel): + field_ref: Annotated[str | None, Field(alias='$ref', examples=['#/components/schemas/runsResponseSchema'])] = None + + +class ApplicationJson2(BaseModel): + schema_: Annotated[Schema4 | None, Field(alias='schema')] = None + + +class Content2(BaseModel): + application_json: Annotated[ApplicationJson2 | None, Field(alias='application/json')] = None + + +class Field2001(BaseModel): + description: Annotated[str | None, Field(examples=['OK'])] = None + content: Content2 | None = None + + +class Responses1(BaseModel): + field_200: Annotated[Field2001 | None, Field(alias='200')] = None + + +class Post1(BaseModel): + operation_id: Annotated[str | None, Field(alias='operationId', examples=['runs'])] = None + x_openai_is_consequential: Annotated[bool | None, Field(alias='x-openai-isConsequential', examples=[False])] = None + summary: Annotated[ + str | None, Field(examples=['Executes an Actor and returns information about the initiated run in response.']) + ] = None + tags: Annotated[list[str] | None, Field(examples=[['Run Actor']])] = None + request_body: Annotated[RequestBody1 | None, Field(alias='requestBody')] = None + parameters: list[Parameter1] | None = None + responses: Responses1 | None = None + + +class FieldActsUsernameActorRuns(BaseModel): + post: Post1 | None = None + + +class Schema5(BaseModel): + field_ref: Annotated[str | None, Field(alias='$ref', examples=['#/components/schemas/inputSchema'])] = None + + +class ApplicationJson3(BaseModel): + schema_: Annotated[Schema5 | None, Field(alias='schema')] = None + + +class Content3(BaseModel): + application_json: Annotated[ApplicationJson3 | None, Field(alias='application/json')] = None + + +class RequestBody2(BaseModel): + required: Annotated[bool | None, Field(examples=[True])] = None + content: Content3 | None = None + + +class Schema6(BaseModel): + type: str | None = None + + +class Parameter2(BaseModel): + name: str | None = None + in_: Annotated[str | None, Field(alias='in', examples=['query'])] = None + required: bool | None = None + schema_: Annotated[Schema6 | None, Field(alias='schema')] = None + description: str | None = None + + +class Field2002(BaseModel): + description: Annotated[str | None, Field(examples=['OK'])] = None + + +class Responses2(BaseModel): + field_200: Annotated[Field2002 | None, Field(alias='200')] = None + + +class Post2(BaseModel): + operation_id: Annotated[str | None, Field(alias='operationId', examples=['run-sync'])] = None + x_openai_is_consequential: Annotated[bool | None, Field(alias='x-openai-isConsequential', examples=[False])] = None + summary: Annotated[ + str | None, + Field( + examples=[ + 'Executes an Actor, waits for completion, and returns the OUTPUT from Key-value store in response.' + ] + ), + ] = None + tags: Annotated[list[str] | None, Field(examples=[['Run Actor']])] = None + request_body: Annotated[RequestBody2 | None, Field(alias='requestBody')] = None + parameters: list[Parameter2] | None = None + responses: Responses2 | None = None + + +class FieldActsUsernameActorRunSync(BaseModel): + post: Post2 | None = None + + +class Paths(BaseModel): + field_acts__username___actor__run_sync_get_dataset_items: Annotated[ + FieldActsUsernameActorRunSyncGetDatasetItems | None, + Field(alias='/acts/~/run-sync-get-dataset-items'), + ] = None + field_acts__username___actor__runs: Annotated[ + FieldActsUsernameActorRuns | None, Field(alias='/acts/~/runs') + ] = None + field_acts__username___actor__run_sync: Annotated[ + FieldActsUsernameActorRunSync | None, Field(alias='/acts/~/run-sync') + ] = None + + +class InputSchema(BaseModel): + type: Annotated[str | None, Field(examples=['object'])] = None + + +class Id(BaseModel): + type: Annotated[str | None, Field(examples=['string'])] = None + + +class ActId(BaseModel): + type: Annotated[str | None, Field(examples=['string'])] = None + + +class UserId(BaseModel): + type: Annotated[str | None, Field(examples=['string'])] = None + + +class StartedAt(BaseModel): + type: Annotated[str | None, Field(examples=['string'])] = None + format: Annotated[str | None, Field(examples=['date-time'])] = None + example: Annotated[str | None, Field(examples=['2025-01-08T00:00:00.000Z'])] = None + + +class FinishedAt(BaseModel): + type: Annotated[str | None, Field(examples=['string'])] = None + format: Annotated[str | None, Field(examples=['date-time'])] = None + example: Annotated[str | None, Field(examples=['2025-01-08T00:00:00.000Z'])] = None + + +class Status(BaseModel): + type: Annotated[str | None, Field(examples=['string'])] = None + example: Annotated[str | None, Field(examples=['READY'])] = None + + +class Origin(BaseModel): + type: Annotated[str | None, Field(examples=['string'])] = None + example: Annotated[str | None, Field(examples=['API'])] = None + + +class UserAgent(BaseModel): + type: Annotated[str | None, Field(examples=['string'])] = None + + +class Properties2(BaseModel): + origin: Origin | None = None + user_agent: Annotated[UserAgent | None, Field(alias='userAgent')] = None + + +class Meta(BaseModel): + type: Annotated[str | None, Field(examples=['object'])] = None + properties: Properties2 | None = None + + +class Properties1(BaseModel): + id: Id | None = None + act_id: Annotated[ActId | None, Field(alias='actId')] = None + user_id: Annotated[UserId | None, Field(alias='userId')] = None + started_at: Annotated[StartedAt | None, Field(alias='startedAt')] = None + finished_at: Annotated[FinishedAt | None, Field(alias='finishedAt')] = None + status: Status | None = None + meta: Meta | None = None + + +class Data5(BaseModel): + type: Annotated[str | None, Field(examples=['object'])] = None + properties: Properties1 | None = None + + +class Properties(BaseModel): + data: Data5 | None = None + + +class RunsResponseSchema(BaseModel): + type: Annotated[str | None, Field(examples=['object'])] = None + properties: Properties | None = None + + +class Schemas(BaseModel): + input_schema: Annotated[InputSchema | None, Field(alias='inputSchema')] = None + runs_response_schema: Annotated[RunsResponseSchema | None, Field(alias='runsResponseSchema')] = None + + +class Components(BaseModel): + schemas: Schemas | None = None + + +class GetOpenApiResponse(BaseModel): + openapi: Annotated[str | None, Field(examples=['3.0.1'])] = None + info: Info | None = None + servers: list[Server] | None = None + paths: Paths | None = None + components: Components | None = None + + +class PostAbortBuildResponse(BaseModel): + data: Build + + +class Origin1(Enum): + DEVELOPMENT = 'DEVELOPMENT' + WEB = 'WEB' + API = 'API' + SCHEDULER = 'SCHEDULER' + TEST = 'TEST' + WEBHOOK = 'WEBHOOK' + ACTOR = 'ACTOR' + CLI = 'CLI' + STANDBY = 'STANDBY' + + +class RunMeta(BaseModel): + origin: Origin1 + + +class RunShort(BaseModel): + id: Annotated[str, Field(examples=['HG7ML7M8z78YcAPEB'])] + act_id: Annotated[str, Field(alias='actId', examples=['HDSasDasz78YcAPEB'])] + actor_task_id: Annotated[str | None, Field(alias='actorTaskId', examples=['KJHSKHausidyaJKHs'])] = None + status: Annotated[str, Field(examples=['SUCCEEDED'])] + started_at: Annotated[str, Field(alias='startedAt', examples=['2019-11-30T07:34:24.202Z'])] + finished_at: Annotated[str, Field(alias='finishedAt', examples=['2019-12-12T09:30:12.202Z'])] + build_id: Annotated[str, Field(alias='buildId', examples=['HG7ML7M8z78YcAPEB'])] + build_number: Annotated[str, Field(alias='buildNumber', examples=['0.0.2'])] + meta: RunMeta + usage_total_usd: Annotated[float, Field(alias='usageTotalUsd', examples=[0.2])] + default_key_value_store_id: Annotated[str, Field(alias='defaultKeyValueStoreId', examples=['sfAjeR4QmeJCQzTfe'])] + default_dataset_id: Annotated[str, Field(alias='defaultDatasetId', examples=['3ZojQDdFTsyE7Moy4'])] + default_request_queue_id: Annotated[str, Field(alias='defaultRequestQueueId', examples=['so93g2shcDzK3pA85'])] + + +class Data6(PaginationResponse): + items: list[RunShort] + + +class GetUserRunsListResponse(BaseModel): + data: Data6 + + +class RunStats(BaseModel): + input_body_len: Annotated[float | None, Field(alias='inputBodyLen', examples=[240])] = None + migration_count: Annotated[float | None, Field(alias='migrationCount', examples=[0])] = None + restart_count: Annotated[float, Field(alias='restartCount', examples=[0])] + resurrect_count: Annotated[float, Field(alias='resurrectCount', examples=[2])] + mem_avg_bytes: Annotated[float | None, Field(alias='memAvgBytes', examples=[267874071.9])] = None + mem_max_bytes: Annotated[float | None, Field(alias='memMaxBytes', examples=[404713472])] = None + mem_current_bytes: Annotated[float | None, Field(alias='memCurrentBytes', examples=[0])] = None + cpu_avg_usage: Annotated[float | None, Field(alias='cpuAvgUsage', examples=[33.7532101107538])] = None + cpu_max_usage: Annotated[float | None, Field(alias='cpuMaxUsage', examples=[169.650735534941])] = None + cpu_current_usage: Annotated[float | None, Field(alias='cpuCurrentUsage', examples=[0])] = None + net_rx_bytes: Annotated[float | None, Field(alias='netRxBytes', examples=[103508042])] = None + net_tx_bytes: Annotated[float | None, Field(alias='netTxBytes', examples=[4854600])] = None + duration_millis: Annotated[float | None, Field(alias='durationMillis', examples=[248472])] = None + run_time_secs: Annotated[float | None, Field(alias='runTimeSecs', examples=[248.472])] = None + metamorph: Annotated[float | None, Field(examples=[0])] = None + compute_units: Annotated[float, Field(alias='computeUnits', examples=[0.13804])] + + +class RunOptions(BaseModel): + build: Annotated[str, Field(examples=['latest'])] + timeout_secs: Annotated[float, Field(alias='timeoutSecs', examples=[300])] + memory_mbytes: Annotated[float, Field(alias='memoryMbytes', examples=[1024])] + disk_mbytes: Annotated[float, Field(alias='diskMbytes', examples=[2048])] + max_items: Annotated[float | None, Field(alias='maxItems', examples=[1000])] = None + max_total_charge_usd: Annotated[float | None, Field(alias='maxTotalChargeUsd', examples=[5])] = None + + +class RunUsage(BaseModel): + actor_compute_units: Annotated[float | None, Field(alias='ACTOR_COMPUTE_UNITS', examples=[3])] = None + dataset_reads: Annotated[float | None, Field(alias='DATASET_READS', examples=[4])] = None + dataset_writes: Annotated[float | None, Field(alias='DATASET_WRITES', examples=[4])] = None + key_value_store_reads: Annotated[float | None, Field(alias='KEY_VALUE_STORE_READS', examples=[5])] = None + key_value_store_writes: Annotated[float | None, Field(alias='KEY_VALUE_STORE_WRITES', examples=[3])] = None + key_value_store_lists: Annotated[float | None, Field(alias='KEY_VALUE_STORE_LISTS', examples=[5])] = None + request_queue_reads: Annotated[float | None, Field(alias='REQUEST_QUEUE_READS', examples=[2])] = None + request_queue_writes: Annotated[float | None, Field(alias='REQUEST_QUEUE_WRITES', examples=[1])] = None + data_transfer_internal_gbytes: Annotated[ + float | None, Field(alias='DATA_TRANSFER_INTERNAL_GBYTES', examples=[1]) + ] = None + data_transfer_external_gbytes_: Annotated[ + float | None, Field(alias='DATA_TRANSFER_EXTERNAL_GBYTES?', examples=[3]) + ] = None + proxy_residential_transfer_gbytes: Annotated[ + float | None, Field(alias='PROXY_RESIDENTIAL_TRANSFER_GBYTES', examples=[34]) + ] = None + proxy_serps: Annotated[float | None, Field(alias='PROXY_SERPS', examples=[3])] = None + + +class Usage31(BaseModel): + pass + + +class Usage32(RunUsage, Usage31): + pass + + +class UsageUsd31(BaseModel): + pass + + +class UsageUsd32(RunUsage, UsageUsd31): + pass + + +class Run(BaseModel): + id: Annotated[str, Field(examples=['HG7ML7M8z78YcAPEB'])] + act_id: Annotated[str, Field(alias='actId', examples=['HDSasDasz78YcAPEB'])] + user_id: Annotated[str, Field(alias='userId', examples=['7sT5jcggjjA9fNcxF'])] + actor_task_id: Annotated[str | None, Field(alias='actorTaskId', examples=['KJHSKHausidyaJKHs'])] = None + started_at: Annotated[str, Field(alias='startedAt', examples=['2019-11-30T07:34:24.202Z'])] + finished_at: Annotated[str, Field(alias='finishedAt', examples=['2019-12-12T09:30:12.202Z'])] + status: Annotated[str, Field(examples=['RUNNING'])] + status_message: Annotated[str | None, Field(alias='statusMessage', examples=['Actor is running'])] = None + is_status_message_terminal: Annotated[bool | None, Field(alias='isStatusMessageTerminal', examples=[False])] = None + meta: RunMeta + pricing_info: Annotated[ + PayPerEventActorPricingInfo + | PricePerDatasetItemActorPricingInfo + | FlatPricePerMonthActorPricingInfo + | FreeActorPricingInfo + | None, + Field(alias='pricingInfo', discriminator='pricing_model', title='ActorRunPricingInfo'), + ] = None + stats: RunStats + charged_event_counts: Annotated[dict[str, ActorChargeEvent] | None, Field(alias='chargedEventCounts')] = None + options: RunOptions + build_id: Annotated[str, Field(alias='buildId', examples=['7sT5jcggjjA9fNcxF'])] + exit_code: Annotated[float | None, Field(alias='exitCode', examples=[0])] = None + default_key_value_store_id: Annotated[str, Field(alias='defaultKeyValueStoreId', examples=['eJNzqsbPiopwJcgGQ'])] + default_dataset_id: Annotated[str, Field(alias='defaultDatasetId', examples=['wmKPijuyDnPZAPRMk'])] + default_request_queue_id: Annotated[str, Field(alias='defaultRequestQueueId', examples=['FL35cSF7jrxr3BY39'])] + build_number: Annotated[str, Field(alias='buildNumber', examples=['0.0.36'])] + container_url: Annotated[str, Field(alias='containerUrl', examples=['https://g8kd8kbc5ge8.runs.apify.net'])] + is_container_server_ready: Annotated[bool | None, Field(alias='isContainerServerReady', examples=[True])] = None + git_branch_name: Annotated[str | None, Field(alias='gitBranchName', examples=['master'])] = None + usage: Usage32 | None = None + usage_total_usd: Annotated[float | None, Field(alias='usageTotalUsd', examples=[0.2654])] = None + usage_usd: Annotated[UsageUsd32 | None, Field(alias='usageUsd')] = None + + +class RunResponse(BaseModel): + data: Run + + +class Error(BaseModel): + type: Annotated[str, Field(examples=['run-failed'])] + message: Annotated[str, Field(examples=['Actor run did not succeed (run ID: 55uatRrZib4xbZs, status: FAILED)'])] + + +class ErrorResponse(BaseModel): + error: Error + + +class TaskStats(BaseModel): + total_runs: Annotated[float, Field(alias='totalRuns', examples=[15])] + + +class Stats31(BaseModel): + pass + + +class Stats32(TaskStats, Stats31): + pass + + +class TaskShort(BaseModel): + id: Annotated[str, Field(examples=['zdc3Pyhyz3m8vjDeM'])] + user_id: Annotated[str, Field(alias='userId', examples=['wRsJZtadYvn4mBZmm'])] + act_id: Annotated[str, Field(alias='actId', examples=['asADASadYvn4mBZmm'])] + act_name: Annotated[str, Field(alias='actName', examples=['my-actor'])] + name: Annotated[str, Field(examples=['my-task'])] + username: Annotated[str | None, Field(examples=['janedoe'])] = None + act_username: Annotated[str, Field(alias='actUsername', examples=['janedoe'])] + created_at: Annotated[str, Field(alias='createdAt', examples=['2018-10-26T07:23:14.855Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2018-10-26T13:30:49.578Z'])] + stats: Stats32 | None = None + + +class TaskOptions(BaseModel): + build: Annotated[str | None, Field(examples=['latest'])] = None + timeout_secs: Annotated[float | None, Field(alias='timeoutSecs', examples=[300])] = None + memory_mbytes: Annotated[float | None, Field(alias='memoryMbytes', examples=[128])] = None + restart_on_error: Annotated[bool | None, Field(alias='restartOnError', examples=[False])] = None + + +class TaskInput(BaseModel): + hello: Annotated[str | None, Field(examples=['world'])] = None + + +class Options31(BaseModel): + pass + + +class Options32(TaskOptions, Options31): + pass + + +class Input1(BaseModel): + pass + + +class Input2(TaskInput, Input1): + pass + + +class CreateTaskRequest(BaseModel): + act_id: Annotated[str, Field(alias='actId', examples=['asADASadYvn4mBZmm'])] + name: Annotated[str, Field(examples=['my-task'])] + options: Options32 | None = None + input: Input2 | None = None + + +class Stats41(BaseModel): + pass + + +class Stats42(TaskStats, Stats41): + pass + + +class Options41(BaseModel): + pass + + +class Options42(TaskOptions, Options41): + pass + + +class Input31(BaseModel): + pass + + +class Input32(TaskInput, Input31): + pass + + +class Task(BaseModel): + id: Annotated[str, Field(examples=['zdc3Pyhyz3m8vjDeM'])] + user_id: Annotated[str, Field(alias='userId', examples=['wRsJZtadYvn4mBZmm'])] + act_id: Annotated[str, Field(alias='actId', examples=['asADASadYvn4mBZmm'])] + name: Annotated[str, Field(examples=['my-task'])] + username: Annotated[str | None, Field(examples=['janedoe'])] = None + created_at: Annotated[str, Field(alias='createdAt', examples=['2018-10-26T07:23:14.855Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2018-10-26T13:30:49.578Z'])] + removed_at: Annotated[str | None, Field(alias='removedAt')] = None + stats: Stats42 | None = None + options: Options42 | None = None + input: Input32 | None = None + + +class Stats51(BaseModel): + pass + + +class Stats52(TaskStats, Stats51): + pass + + +class Options51(BaseModel): + pass + + +class Options52(TaskOptions, Options51): + pass + + +class Input41(BaseModel): + pass + + +class Input42(Task, Input41): + pass + + +class UpdateTaskRequest(BaseModel): + id: Annotated[str, Field(examples=['ZxLNxrRaZrSjuhT9y'])] + user_id: Annotated[str, Field(alias='userId', examples=['BPWZBd7Z9c746JAnF'])] + act_id: Annotated[str, Field(alias='actId', examples=['asADASadYvn4mBZmm'])] + name: Annotated[str, Field(examples=['my-task'])] + username: Annotated[str | None, Field(examples=['janedoe'])] = None + created_at: Annotated[str, Field(alias='createdAt', examples=['2018-10-26T07:23:14.855Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2018-10-26T13:30:49.578Z'])] + removed_at: Annotated[str | None, Field(alias='removedAt')] = None + stats: Stats52 | None = None + options: Options52 | None = None + input: Input42 | None = None + + +class Webhook(BaseModel): + id: Annotated[str, Field(examples=['YiKoxjkaS9gjGTqhF'])] + created_at: Annotated[str, Field(alias='createdAt', examples=['2019-12-12T07:34:14.202Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2019-12-13T08:36:13.202Z'])] + user_id: Annotated[str, Field(alias='userId', examples=['wRsJZtadYvn4mBZmm'])] + is_ad_hoc: Annotated[bool | None, Field(alias='isAdHoc', examples=[False])] = None + should_interpolate_strings: Annotated[bool | None, Field(alias='shouldInterpolateStrings', examples=[False])] = None + event_types: Annotated[list[str], Field(alias='eventTypes', examples=[['ACTOR.RUN.SUCCEEDED']])] + condition: WebhookCondition + ignore_ssl_errors: Annotated[bool, Field(alias='ignoreSslErrors', examples=[False])] + do_not_retry: Annotated[bool | None, Field(alias='doNotRetry', examples=[False])] = None + request_url: Annotated[str, Field(alias='requestUrl', examples=['http://example.com/'])] + payload_template: Annotated[ + str | None, Field(alias='payloadTemplate', examples=['{\\n \\"userId\\": {{userId}}...']) + ] = None + headers_template: Annotated[ + str | None, Field(alias='headersTemplate', examples=['{\\n \\"Authorization\\": Bearer...']) + ] = None + description: Annotated[str | None, Field(examples=['this is webhook description'])] = None + last_dispatch: Annotated[ExampleWebhookDispatch | None, Field(alias='lastDispatch')] = None + stats: WebhookStats | None = None + + +class UpdateRunRequest(BaseModel): + run_id: Annotated[str, Field(alias='runId', examples=['3KH8gEpp4d8uQSe8T'])] + status_message: Annotated[str, Field(alias='statusMessage', examples=['Actor has finished'])] + is_status_message_terminal: Annotated[bool | None, Field(alias='isStatusMessageTerminal', examples=[True])] = None + + +class ChargeRunRequest(BaseModel): + event_name: Annotated[str, Field(alias='eventName', examples=['ANALYZE_PAGE'])] + count: Annotated[float, Field(examples=[1])] + + +class KeyValueStoreStats(BaseModel): + read_count: Annotated[float, Field(alias='readCount', examples=[9])] + write_count: Annotated[float, Field(alias='writeCount', examples=[3])] + delete_count: Annotated[float, Field(alias='deleteCount', examples=[6])] + list_count: Annotated[float, Field(alias='listCount', examples=[2])] + s3_storage_bytes: Annotated[float, Field(alias='s3StorageBytes', examples=[18])] + + +class KeyValueStore(BaseModel): + id: Annotated[str, Field(examples=['WkzbQMuFYuamGv3YF'])] + name: Annotated[str, Field(examples=['d7b9MDYsbtX5L7XAj'])] + user_id: Annotated[str | None, Field(alias='userId', examples=['BPWDBd7Z9c746JAnF'])] = None + username: Annotated[str | None, Field(examples=['janedoe'])] = None + created_at: Annotated[str, Field(alias='createdAt', examples=['2019-12-12T07:34:14.202Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2019-12-13T08:36:13.202Z'])] + accessed_at: Annotated[str, Field(alias='accessedAt', examples=['2019-12-14T08:36:13.202Z'])] + act_id: Annotated[str | None, Field(alias='actId', examples=[None])] = None + act_run_id: Annotated[str | None, Field(alias='actRunId', examples=[None])] = None + console_url: Annotated[ + str, + Field(alias='consoleUrl', examples=['https://console.apify.com/storage/key-value-stores/27TmTznX9YPeAYhkC']), + ] + keys_public_url: Annotated[ + str, + Field( + alias='keysPublicUrl', + examples=['https://api.apify.com/v2/key-value-stores/WkzbQMuFYuamGv3YF/keys?signature=abc123'], + ), + ] + """ + A public link to access keys of the key-value store directly. + """ + url_signing_secret_key: Annotated[str | None, Field(alias='urlSigningSecretKey')] = None + """ + A secret key for generating signed public URLs. It is only provided to clients with WRITE permission for the key-value store. + """ + stats: KeyValueStoreStats | None = None + + +class Data7(PaginationResponse): + items: list[KeyValueStore] + + +class GetListOfKeyValueStoresResponse(BaseModel): + data: Data7 + + +class CreateKeyValueStoreResponse(BaseModel): + data: KeyValueStore + + +class GetStoreResponse(BaseModel): + data: KeyValueStore + + +class UpdateStoreRequest(BaseModel): + name: str + + +class UpdateStoreResponse(BaseModel): + data: KeyValueStore + + +class Item(BaseModel): + key: Annotated[str, Field(examples=['second-key'])] + size: Annotated[float, Field(examples=[36])] + record_public_url: Annotated[ + str, + Field( + alias='recordPublicUrl', + examples=['https://api.apify.com/v2/key-value-stores/WkzbQMuFYuamGv3YF/records/some-key?signature=abc123'], + ), + ] + """ + A public link to access this record directly. + """ + + +class ListOfKeysResponse(BaseModel): + items: list[Item] + count: Annotated[float, Field(examples=[2])] + limit: Annotated[float, Field(examples=[2])] + exclusive_start_key: Annotated[str | None, Field(alias='exclusiveStartKey', examples=['some-key'])] = None + is_truncated: Annotated[bool, Field(alias='isTruncated', examples=[True])] + next_exclusive_start_key: Annotated[str | None, Field(alias='nextExclusiveStartKey', examples=['third-key'])] = None + + +class GetListOfKeysResponse(BaseModel): + data: ListOfKeysResponse + + +class GetRecordResponse(BaseModel): + foo: str + + +class PutRecordRequest(BaseModel): + foo: Annotated[str | None, Field(examples=['bar'])] = None + + +class DatasetListItem(BaseModel): + id: Annotated[str, Field(examples=['WkzbQMuFYuamGv3YF'])] + name: Annotated[str, Field(examples=['d7b9MDYsbtX5L7XAj'])] + user_id: Annotated[str, Field(alias='userId', examples=['tbXmWu7GCxnyYtSiL'])] + created_at: Annotated[str, Field(alias='createdAt', examples=['2019-12-12T07:34:14.202Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2019-12-13T08:36:13.202Z'])] + accessed_at: Annotated[str, Field(alias='accessedAt', examples=['2019-12-14T08:36:13.202Z'])] + item_count: Annotated[float, Field(alias='itemCount', examples=[7])] + clean_item_count: Annotated[float, Field(alias='cleanItemCount', examples=[5])] + act_id: Annotated[str | None, Field(alias='actId')] = None + act_run_id: Annotated[str | None, Field(alias='actRunId')] = None + + +class Data8(PaginationResponse): + items: list[DatasetListItem] + + +class GetListOfDatasetsResponse(BaseModel): + data: Data8 + + +class DatasetStats(BaseModel): + read_count: Annotated[float, Field(alias='readCount', examples=[22])] + write_count: Annotated[float, Field(alias='writeCount', examples=[3])] + storage_bytes: Annotated[float, Field(alias='storageBytes', examples=[783])] + + +class Dataset(BaseModel): + id: Annotated[str, Field(examples=['WkzbQMuFYuamGv3YF'])] + name: Annotated[str, Field(examples=['d7b9MDYsbtX5L7XAj'])] + user_id: Annotated[str, Field(alias='userId', examples=['wRsJZtadYvn4mBZmm'])] + created_at: Annotated[str, Field(alias='createdAt', examples=['2019-12-12T07:34:14.202Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2019-12-13T08:36:13.202Z'])] + accessed_at: Annotated[str, Field(alias='accessedAt', examples=['2019-12-14T08:36:13.202Z'])] + item_count: Annotated[float, Field(alias='itemCount', examples=[7])] + clean_item_count: Annotated[float, Field(alias='cleanItemCount', examples=[5])] + act_id: Annotated[str | None, Field(alias='actId')] = None + act_run_id: Annotated[str | None, Field(alias='actRunId')] = None + fields: list[str] | None = None + schema_: Annotated[ + dict[str, Any] | None, + Field( + alias='schema', + examples=[ + { + 'actorSpecification': 1, + 'title': 'My dataset', + 'views': { + 'overview': { + 'title': 'Overview', + 'transformation': {'fields': ['linkUrl']}, + 'display': { + 'component': 'table', + 'properties': {'linkUrl': {'label': 'Link URL', 'format': 'link'}}, + }, + } + }, + } + ], + ), + ] = None + """ + Defines the schema of items in your dataset, the full specification can be found in [Apify docs](/platform/actors/development/actor-definition/dataset-schema) + """ + console_url: Annotated[ + str, Field(alias='consoleUrl', examples=['https://console.apify.com/storage/datasets/27TmTznX9YPeAYhkC']) + ] + items_public_url: Annotated[ + str | None, + Field( + alias='itemsPublicUrl', + examples=['https://api.apify.com/v2/datasets/WkzbQMuFYuamGv3YF/items?signature=abc123'], + ), + ] = None + """ + A public link to access the dataset items directly. + """ + url_signing_secret_key: Annotated[str | None, Field(alias='urlSigningSecretKey')] = None + """ + A secret key for generating signed public URLs. It is only provided to clients with WRITE permission for the dataset. + """ + stats: DatasetStats | None = None + + +class DatasetResponse(BaseModel): + data: Dataset + + +class UpdateDatasetRequest(BaseModel): + name: str + + +class PutItemsRequest(BaseModel): + foo: str + + +class ValidationError(BaseModel): + instance_path: Annotated[str | None, Field(alias='instancePath')] = None + """ + The path to the instance being validated. + """ + schema_path: Annotated[str | None, Field(alias='schemaPath')] = None + """ + The path to the schema that failed the validation. + """ + keyword: str | None = None + """ + The validation keyword that caused the error. + """ + message: str | None = None + """ + A message describing the validation error. + """ + params: dict[str, Any] | None = None + """ + Additional parameters specific to the validation error. + """ + + +class InvalidItem(BaseModel): + item_position: Annotated[float | None, Field(alias='itemPosition', examples=[2])] = None + """ + The position of the invalid item in the array. + """ + validation_errors: Annotated[list[ValidationError] | None, Field(alias='validationErrors')] = None + """ + A complete list of AJV validation error objects for the invalid item. + """ + + +class Data9(BaseModel): + invalid_items: Annotated[list[InvalidItem], Field(alias='invalidItems')] + """ + A list of invalid items in the received array of items. + """ + + +class Error1(BaseModel): + type: Annotated[str, Field(examples=['schema-validation-error'])] + """ + The type of the error. + """ + message: Annotated[str, Field(examples=['Schema validation failed'])] + """ + A human-readable message describing the error. + """ + data: Data9 + + +class DatasetSchemaValidationError(BaseModel): + error: Error1 | None = None + + +class PutItemResponseError(BaseModel): + error: DatasetSchemaValidationError + + +class DatasetFieldStatistics(BaseModel): + min: float | None = None + """ + Minimum value of the field. For numbers, this is calculated directly. For strings, this is the length of the shortest string. For arrays, this is the length of the shortest array. For objects, this is the number of keys in the smallest object. + """ + max: float | None = None + """ + Maximum value of the field. For numbers, this is calculated directly. For strings, this is the length of the longest string. For arrays, this is the length of the longest array. For objects, this is the number of keys in the largest object. + """ + null_count: Annotated[float | None, Field(alias='nullCount')] = None + """ + How many items in the dataset have a null value for this field. + """ + empty_count: Annotated[float | None, Field(alias='emptyCount')] = None + """ + How many items in the dataset are `undefined`, meaning that for example empty string is not considered empty. + """ + + +class Data10(BaseModel): + field_statistics: Annotated[dict[str, DatasetFieldStatistics] | None, Field(alias='fieldStatistics')] = None + """ + When you configure the dataset [fields schema](https://docs.apify.com/platform/actors/development/actor-definition/dataset-schema/validation), we measure the statistics such as `min`, `max`, `nullCount` and `emptyCount` for each field. This property provides statistics for each field from dataset fields schema.

See dataset field statistics [documentation](https://docs.apify.com/platform/actors/development/actor-definition/dataset-schema/validation#dataset-field-statistics) for more information. + """ + + +class GetDatasetStatisticsResponse(BaseModel): + data: Data10 + + +class RequestQueueShort(BaseModel): + id: Annotated[str, Field(examples=['WkzbQMuFYuamGv3YF'])] + name: Annotated[str, Field(examples=['some-name'])] + user_id: Annotated[str, Field(alias='userId', examples=['wRsJZtadYvn4mBZmm'])] + username: Annotated[str, Field(examples=['janedoe'])] + created_at: Annotated[str, Field(alias='createdAt', examples=['2019-12-12T07:34:14.202Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2019-12-13T08:36:13.202Z'])] + accessed_at: Annotated[str, Field(alias='accessedAt', examples=['2019-12-14T08:36:13.202Z'])] + expire_at: Annotated[str, Field(alias='expireAt', examples=['2019-06-02T17:15:06.751Z'])] + total_request_count: Annotated[float, Field(alias='totalRequestCount', examples=[100])] + handled_request_count: Annotated[float, Field(alias='handledRequestCount', examples=[50])] + pending_request_count: Annotated[float, Field(alias='pendingRequestCount', examples=[50])] + act_id: Annotated[str | None, Field(alias='actId')] = None + act_run_id: Annotated[str | None, Field(alias='actRunId')] = None + had_multiple_clients: Annotated[bool, Field(alias='hadMultipleClients', examples=[True])] + + +class Data11(BaseModel): + total: Annotated[float, Field(examples=[2])] + offset: Annotated[float, Field(examples=[0])] + limit: Annotated[float, Field(examples=[1000])] + desc: Annotated[bool, Field(examples=[False])] + count: Annotated[float, Field(examples=[2])] + items: list[RequestQueueShort] + + +class GetListOfRequestQueuesResponse(BaseModel): + data: Data11 + + +class RequestQueue(BaseModel): + id: Annotated[str, Field(examples=['WkzbQMuFYuamGv3YF'])] + name: Annotated[str | None, Field(examples=['some-name'])] = None + user_id: Annotated[str, Field(alias='userId', examples=['wRsJZtadYvn4mBZmm'])] + created_at: Annotated[str, Field(alias='createdAt', examples=['2019-12-12T07:34:14.202Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2030-12-13T08:36:13.202Z'])] + """ + The modifiedAt is updated whenever the queue is modified. Modifications include adding, updating, or removing requests, as well as locking or unlocking requests in the queue. + """ + accessed_at: Annotated[str, Field(alias='accessedAt', examples=['2019-12-14T08:36:13.202Z'])] + total_request_count: Annotated[float, Field(alias='totalRequestCount', examples=[870])] + handled_request_count: Annotated[float, Field(alias='handledRequestCount', examples=[100])] + pending_request_count: Annotated[float, Field(alias='pendingRequestCount', examples=[670])] + had_multiple_clients: Annotated[bool, Field(alias='hadMultipleClients', examples=[True])] + console_url: Annotated[ + str, Field(alias='consoleUrl', examples=['https://api.apify.com/v2/request-queues/27TmTznX9YPeAYhkC']) + ] + + +class CreateRequestQueueResponse(BaseModel): + data: RequestQueue + + +class GetRequestQueueResponse(BaseModel): + data: RequestQueue + + +class UpdateRequestQueueRequest(BaseModel): + name: str + + +class UpdateRequestQueueResponse(BaseModel): + data: RequestQueue + + +class RequestWithoutId(BaseModel): + unique_key: Annotated[str, Field(alias='uniqueKey', examples=['http://example.com'])] + url: Annotated[str, Field(examples=['http://example.com'])] + method: Annotated[str, Field(examples=['GET'])] + + +class ProcessedRequest(BaseModel): + request_id: Annotated[str, Field(alias='requestId', examples=['sbJ7klsdf7ujN9l'])] + unique_key: Annotated[str, Field(alias='uniqueKey', examples=['http://example.com'])] + was_already_present: Annotated[bool, Field(alias='wasAlreadyPresent', examples=[False])] + was_already_handled: Annotated[bool, Field(alias='wasAlreadyHandled', examples=[False])] + + +class UnprocessedRequest(BaseModel): + unique_key: Annotated[str, Field(alias='uniqueKey', examples=['http://example.com'])] + url: Annotated[str, Field(examples=['http://example.com'])] + method: Annotated[str, Field(examples=['GET'])] + + +class Data12(BaseModel): + processed_requests: Annotated[list[ProcessedRequest], Field(alias='processedRequests')] + unprocessed_requests: Annotated[list[UnprocessedRequest], Field(alias='unprocessedRequests')] + + +class BatchOperationResponse(BaseModel): + data: Data12 + + +class UserData(BaseModel): + label: Annotated[str | None, Field(examples=['DETAIL'])] = None + image: Annotated[str | None, Field(examples=['https://picserver1.eu'])] = None + + +class RequestQueueItems(BaseModel): + id: Annotated[str, Field(examples=['dnjkDMKLmdlkmlkmld'])] + retry_count: Annotated[float, Field(alias='retryCount', examples=[0])] + unique_key: Annotated[str, Field(alias='uniqueKey', examples=['http://example.com'])] + url: Annotated[str, Field(examples=['http://example.com'])] + method: Annotated[str, Field(examples=['GET'])] + loaded_url: Annotated[str | None, Field(alias='loadedUrl', examples=['http://example.com/example-1'])] = None + payload: dict[str, Any] | None = None + no_retry: Annotated[bool | None, Field(alias='noRetry', examples=[False])] = None + error_messages: Annotated[list[str] | None, Field(alias='errorMessages')] = None + headers: dict[str, Any] | None = None + user_data: Annotated[UserData | None, Field(alias='userData')] = None + handled_at: Annotated[str | None, Field(alias='handledAt', examples=['2019-06-16T10:23:31.607Z'])] = None + + +class Data13(BaseModel): + items: list[RequestQueueItems] + count: Annotated[float, Field(examples=[2])] + limit: Annotated[float, Field(examples=[2])] + exclusive_start_id: Annotated[str | None, Field(alias='exclusiveStartId', examples=['Ihnsp8YrvJ8102Kj'])] = None + + +class ListRequestsResponse(BaseModel): + data: Data13 + + +class RequestOperationInfo(BaseModel): + request_id: Annotated[str, Field(alias='requestId', examples=['YiKoxjkaS9gjGTqhF'])] + was_already_present: Annotated[bool, Field(alias='wasAlreadyPresent', examples=[True])] + was_already_handled: Annotated[bool, Field(alias='wasAlreadyHandled', examples=[False])] + + +class AddRequestResponse(BaseModel): + data: RequestOperationInfo + + +class GetRequestResponse(BaseModel): + data: RequestQueueItems + + +class UpdateRequestResponse(BaseModel): + data: RequestOperationInfo + + +class Item1(BaseModel): + id: Annotated[str, Field(examples=['8OamqXBCpPHxyH9'])] + retry_count: Annotated[float, Field(alias='retryCount', examples=[0])] + unique_key: Annotated[str, Field(alias='uniqueKey', examples=['http://example.com'])] + url: Annotated[str, Field(examples=['http://example.com'])] + method: Annotated[str, Field(examples=['GET'])] + + +class Data14(BaseModel): + limit: Annotated[float, Field(examples=[1000])] + queue_modified_at: Annotated[str, Field(alias='queueModifiedAt', examples=['2018-03-14T23:00:00.000Z'])] + had_multiple_clients: Annotated[bool, Field(alias='hadMultipleClients', examples=[False])] + items: list[Item1] + + +class GetHeadResponse(BaseModel): + data: Data14 + + +class Item2(BaseModel): + id: Annotated[str, Field(examples=['8OamqXBCpPHxyj9'])] + retry_count: Annotated[float, Field(alias='retryCount', examples=[0])] + unique_key: Annotated[str, Field(alias='uniqueKey', examples=['http://example.com'])] + url: Annotated[str, Field(examples=['http://example.com'])] + method: Annotated[str, Field(examples=['GET'])] + lock_expires_at: Annotated[str, Field(alias='lockExpiresAt', examples=['2022-06-14T23:00:00.000Z'])] + + +class Data15(BaseModel): + limit: Annotated[float, Field(examples=[1000])] + queue_modified_at: Annotated[str, Field(alias='queueModifiedAt', examples=['2018-03-14T23:00:00.000Z'])] + """ + The modifiedAt is updated whenever the queue is modified. Modifications include adding, updating, or removing requests, as well as locking or unlocking requests in the queue. + """ + queue_has_locked_requests: Annotated[bool | None, Field(alias='queueHasLockedRequests', examples=[True])] = None + """ + Whether the queue contains requests locked by any client (either the one calling the endpoint or a different one). + """ + client_key: Annotated[str | None, Field(alias='clientKey', examples=['client-one'])] = None + had_multiple_clients: Annotated[bool, Field(alias='hadMultipleClients', examples=[True])] + lock_secs: Annotated[float, Field(alias='lockSecs', examples=[60])] + items: list[Item2] + + +class GetHeadAndLockResponse(BaseModel): + data: Data15 + + +class Data16(BaseModel): + lock_expires_at: Annotated[str, Field(alias='lockExpiresAt', examples=['2022-01-01T00:00:00.000Z'])] + """ + Date when lock expires. + """ + + +class ProlongRequestLockResponse(BaseModel): + data: Data16 | None = None + + +class WebhookCreate(BaseModel): + is_ad_hoc: Annotated[bool | None, Field(alias='isAdHoc', examples=[False])] = None + event_types: Annotated[list[str], Field(alias='eventTypes', examples=[['ACTOR.RUN.SUCCEEDED']])] + condition: WebhookCondition + idempotency_key: Annotated[str | None, Field(alias='idempotencyKey', examples=['fdSJmdP3nfs7sfk3y'])] = None + ignore_ssl_errors: Annotated[bool | None, Field(alias='ignoreSslErrors', examples=[False])] = None + do_not_retry: Annotated[bool | None, Field(alias='doNotRetry', examples=[False])] = None + request_url: Annotated[str, Field(alias='requestUrl', examples=['http://example.com/'])] + payload_template: Annotated[ + str | None, Field(alias='payloadTemplate', examples=['{\\n \\"userId\\": {{userId}}...']) + ] = None + headers_template: Annotated[ + str | None, Field(alias='headersTemplate', examples=['{\\n \\"Authorization\\": Bearer...']) + ] = None + description: Annotated[str | None, Field(examples=['this is webhook description'])] = None + should_interpolate_strings: Annotated[bool | None, Field(alias='shouldInterpolateStrings', examples=[False])] = None + + +class CreateWebhookResponse(BaseModel): + data: Webhook + + +class GetWebhookResponse(BaseModel): + data: Webhook + + +class WebhookUpdate(BaseModel): + is_ad_hoc: Annotated[bool | None, Field(alias='isAdHoc', examples=[False])] = None + event_types: Annotated[list[str] | None, Field(alias='eventTypes', examples=[['ACTOR.RUN.SUCCEEDED']])] = None + condition: WebhookCondition | None = None + ignore_ssl_errors: Annotated[bool | None, Field(alias='ignoreSslErrors', examples=[False])] = None + do_not_retry: Annotated[bool | None, Field(alias='doNotRetry', examples=[False])] = None + request_url: Annotated[str | None, Field(alias='requestUrl', examples=['http://example.com/'])] = None + payload_template: Annotated[ + str | None, Field(alias='payloadTemplate', examples=['{\\n \\"userId\\": {{userId}}...']) + ] = None + headers_template: Annotated[ + str | None, Field(alias='headersTemplate', examples=['{\\n \\"Authorization\\": Bearer...']) + ] = None + description: Annotated[str | None, Field(examples=['this is webhook description'])] = None + should_interpolate_strings: Annotated[bool | None, Field(alias='shouldInterpolateStrings', examples=[False])] = None + + +class UpdateWebhookResponse(BaseModel): + data: Webhook + + +class EventData(BaseModel): + actor_id: Annotated[str, Field(alias='actorId', examples=['vvE7iMKuMc5qTHHsR'])] + actor_run_id: Annotated[str, Field(alias='actorRunId', examples=['JgwXN9BdwxGcu9MMF'])] + + +class Calls(BaseModel): + started_at: Annotated[str | None, Field(alias='startedAt', examples=['2019-12-12T07:34:14.202Z'])] = None + finished_at: Annotated[str | None, Field(alias='finishedAt', examples=['2019-12-12T07:34:14.202Z'])] = None + error_message: Annotated[str | None, Field(alias='errorMessage', examples=['Cannot send request'])] = None + response_status: Annotated[float | None, Field(alias='responseStatus', examples=[200])] = None + response_body: Annotated[str | None, Field(alias='responseBody', examples=[{'foo': 'bar'}])] = None + + +class WebhookDispatch(BaseModel): + id: Annotated[str, Field(examples=['asdLZtadYvn4mBZmm'])] + user_id: Annotated[str, Field(alias='userId', examples=['wRsJZtadYvn4mBZmm'])] + webhook_id: Annotated[str, Field(alias='webhookId', examples=['asdLZtadYvn4mBZmm'])] + created_at: Annotated[str, Field(alias='createdAt', examples=['2019-12-12T07:34:14.202Z'])] + status: Annotated[str, Field(examples=['SUCCEEDED'])] + event_type: Annotated[str, Field(alias='eventType', examples=['ACTOR.RUN.SUCCEEDED'])] + event_data: Annotated[EventData, Field(alias='eventData', title='eventData')] + calls: Annotated[Calls | None, Field(title='calls')] = None + + +class TestWebhookResponse(BaseModel): + data: WebhookDispatch + + +class Data17(PaginationResponse): + items: list[WebhookDispatch] + + +class WebhookDispatchList(BaseModel): + data: Data17 | None = None + + +class GetWebhookDispatchResponse(BaseModel): + data: WebhookDispatch + + +class GetListOfSchedulesResponseDataItemsActions(BaseModel): + id: Annotated[str, Field(examples=['ZReCs7hkdieq8ZUki'])] + type: Annotated[str, Field(examples=['RUN_ACTOR'])] + actor_id: Annotated[str, Field(alias='actorId', examples=['HKhKmiCMrDgu9eXeE'])] + + +class GetListOfSchedulesResponseDataItems(BaseModel): + id: Annotated[str, Field(examples=['asdLZtadYvn4mBZmm'])] + user_id: Annotated[str, Field(alias='userId', examples=['wRsJZtadYvn4mBZmm'])] + name: Annotated[str, Field(examples=['my-schedule'])] + created_at: Annotated[str, Field(alias='createdAt', examples=['2019-12-12T07:34:14.202Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2019-12-20T06:33:11.202Z'])] + last_run_at: Annotated[str, Field(alias='lastRunAt', examples=['2019-04-12T07:33:10.202Z'])] + next_run_at: Annotated[str, Field(alias='nextRunAt', examples=['2019-04-12T07:34:10.202Z'])] + is_enabled: Annotated[bool, Field(alias='isEnabled', examples=[True])] + is_exclusive: Annotated[bool, Field(alias='isExclusive', examples=[True])] + cron_expression: Annotated[str, Field(alias='cronExpression', examples=['* * * * *'])] + timezone: Annotated[str, Field(examples=['UTC'])] + actions: list[GetListOfSchedulesResponseDataItemsActions] + + +class GetListOfSchedulesResponseData(BaseModel): + total: Annotated[float, Field(examples=[2])] + offset: Annotated[float, Field(examples=[0])] + limit: Annotated[float, Field(examples=[1000])] + desc: Annotated[bool, Field(examples=[False])] + count: Annotated[float, Field(examples=[2])] + items: list[GetListOfSchedulesResponseDataItems] + + +class GetListOfSchedulesResponse(BaseModel): + data: GetListOfSchedulesResponseData + + +class ScheduleActionsRunInput(BaseModel): + body: Annotated[str | None, Field(examples=['{\\n \\"foo\\": \\"actor\\"\\n}'])] = None + content_type: Annotated[str | None, Field(alias='contentType', examples=['application/json; charset=utf-8'])] = None + + +class ScheduleActionsRunOptions(BaseModel): + build: Annotated[str | None, Field(examples=['latest'])] = None + timeout_secs: Annotated[float | None, Field(alias='timeoutSecs', examples=[60])] = None + memory_mbytes: Annotated[float | None, Field(alias='memoryMbytes', examples=[1024])] = None + restart_on_error: Annotated[bool | None, Field(alias='restartOnError', examples=[False])] = None + + +class ScheduleCreateActions(BaseModel): + type: Annotated[str, Field(examples=['RUN_ACTOR'])] + actor_id: Annotated[str, Field(alias='actorId', examples=['jF8GGEvbEg4Au3NLA'])] + run_input: Annotated[ScheduleActionsRunInput | None, Field(alias='runInput')] = None + run_options: Annotated[ScheduleActionsRunOptions | None, Field(alias='runOptions')] = None + + +class ScheduleCreate(BaseModel): + name: Annotated[str | None, Field(examples=['my-schedule'])] = None + is_enabled: Annotated[bool | None, Field(alias='isEnabled', examples=[True])] = None + is_exclusive: Annotated[bool | None, Field(alias='isExclusive', examples=[True])] = None + cron_expression: Annotated[str | None, Field(alias='cronExpression', examples=['* * * * *'])] = None + timezone: Annotated[str | None, Field(examples=['UTC'])] = None + description: Annotated[str | None, Field(examples=['Schedule of actor ...'])] = None + actions: list[ScheduleCreateActions] | None = None + + +class ScheduleResponseDataActions(BaseModel): + id: Annotated[str, Field(examples=['c6KfSgoQzFhMk3etc'])] + type: Annotated[str, Field(examples=['RUN_ACTOR'])] + actor_id: Annotated[str, Field(alias='actorId', examples=['jF8GGEvbEg4Au3NLA'])] + run_input: Annotated[ScheduleActionsRunInput | None, Field(alias='runInput')] = None + run_options: Annotated[ScheduleActionsRunOptions | None, Field(alias='runOptions')] = None + + +class ScheduleResponseData(BaseModel): + id: Annotated[str, Field(examples=['asdLZtadYvn4mBZmm'])] + user_id: Annotated[str, Field(alias='userId', examples=['wRsJZtadYvn4mBZmm'])] + name: Annotated[str, Field(examples=['my-schedule'])] + cron_expression: Annotated[str, Field(alias='cronExpression', examples=['* * * * *'])] + timezone: Annotated[str, Field(examples=['UTC'])] + is_enabled: Annotated[bool, Field(alias='isEnabled', examples=[True])] + is_exclusive: Annotated[bool, Field(alias='isExclusive', examples=[True])] + description: Annotated[str | None, Field(examples=['Schedule of actor ...'])] = None + created_at: Annotated[str, Field(alias='createdAt', examples=['2019-12-12T07:34:14.202Z'])] + modified_at: Annotated[str, Field(alias='modifiedAt', examples=['2019-12-20T06:33:11.202Z'])] + next_run_at: Annotated[str | None, Field(alias='nextRunAt', examples=['2019-04-12T07:34:10.202Z'])] = None + last_run_at: Annotated[str | None, Field(alias='lastRunAt', examples=['2019-04-12T07:33:10.202Z'])] = None + actions: list[ScheduleResponseDataActions] + + +class ScheduleResponse(BaseModel): + data: ScheduleResponseData + + +class ScheduleInvoked(BaseModel): + message: Annotated[str, Field(examples=['Schedule invoked'])] + level: Annotated[str, Field(examples=['INFO'])] + created_at: Annotated[str, Field(alias='createdAt', examples=['2019-03-26T12:28:00.370Z'])] + + +class GetScheduleLogResponse(BaseModel): + data: list[ScheduleInvoked] + + +class CurrentPricingInfo(BaseModel): + pricing_model: Annotated[str, Field(alias='pricingModel', examples=['FREE'])] + + +class StoreListActor(BaseModel): + id: Annotated[str, Field(examples=['zdc3Pyhyz3m8vjDeM'])] + title: Annotated[str, Field(examples=['My Public Actor'])] + name: Annotated[str, Field(examples=['my-public-actor'])] + username: Annotated[str, Field(examples=['jane35'])] + user_full_name: Annotated[str, Field(alias='userFullName', examples=['Jane H. Doe'])] + description: Annotated[str, Field(examples=['My public actor!'])] + categories: Annotated[list[str] | None, Field(examples=[['MARKETING', 'LEAD_GENERATION']])] = None + notice: str | None = None + picture_url: Annotated[str | None, Field(alias='pictureUrl', examples=['https://...'])] = None + user_picture_url: Annotated[str | None, Field(alias='userPictureUrl', examples=['https://...'])] = None + url: Annotated[str | None, Field(examples=['https://...'])] = None + stats: ActorStats + current_pricing_info: Annotated[CurrentPricingInfo, Field(alias='currentPricingInfo')] + + +class StoreData(BaseModel): + total: Annotated[float, Field(examples=[100])] + offset: Annotated[float, Field(examples=[0])] + limit: Annotated[float, Field(examples=[1000])] + desc: Annotated[bool, Field(examples=[False])] + count: Annotated[float, Field(examples=[1])] + items: list[StoreListActor] + + +class GetListOfActorsInStoreResponse(BaseModel): + data: StoreData + + +class Profile(BaseModel): + bio: Annotated[str | None, Field(examples=['I started web scraping in 1985 using Altair BASIC.'])] = None + name: Annotated[str | None, Field(examples=['Jane Doe'])] = None + picture_url: Annotated[str | None, Field(alias='pictureUrl', examples=['/img/anonymous_user_picture.png'])] = None + github_username: Annotated[str | None, Field(alias='githubUsername', examples=['torvalds.'])] = None + website_url: Annotated[str | None, Field(alias='websiteUrl', examples=['http://www.example.com'])] = None + twitter_username: Annotated[str | None, Field(alias='twitterUsername', examples=['@BillGates'])] = None + + +class UserPublicInfo(BaseModel): + username: Annotated[str, Field(examples=['d7b9MDYsbtX5L7XAj'])] + profile: Profile + + +class GetPublicUserDataResponse(BaseModel): + data: UserPublicInfo + + +class ProxyGroup(BaseModel): + name: Annotated[str, Field(examples=['Group1'])] + description: Annotated[str, Field(examples=['Group1 description'])] + available_count: Annotated[float, Field(alias='availableCount', examples=[10])] + + +class Proxy(BaseModel): + password: Annotated[str, Field(examples=['ad78knd9Jkjd86'])] + groups: list[ProxyGroup] + + +class AvailableProxyGroups(BaseModel): + somegroup: Annotated[float, Field(alias='SOMEGROUP', examples=[20])] + anothergroup: Annotated[float, Field(alias='ANOTHERGROUP', examples=[200])] + + +class Plan(BaseModel): + id: Annotated[str, Field(examples=['Personal'])] + description: Annotated[str, Field(examples=['Cost-effective plan for freelancers, developers and students.'])] + is_enabled: Annotated[bool, Field(alias='isEnabled', examples=[True])] + monthly_base_price_usd: Annotated[float, Field(alias='monthlyBasePriceUsd', examples=[49])] + monthly_usage_credits_usd: Annotated[float, Field(alias='monthlyUsageCreditsUsd', examples=[49])] + usage_discount_percent: Annotated[float, Field(alias='usageDiscountPercent', examples=[0])] + enabled_platform_features: Annotated[ + list[list[Any]], + Field( + alias='enabledPlatformFeatures', + examples=[[['ACTORS'], ['STORAGE'], ['PROXY_SERPS'], ['SCHEDULER'], ['WEBHOOKS']]], + ), + ] + max_monthly_usage_usd: Annotated[float, Field(alias='maxMonthlyUsageUsd', examples=[9999])] + max_actor_memory_gbytes: Annotated[float, Field(alias='maxActorMemoryGbytes', examples=[32])] + max_monthly_actor_compute_units: Annotated[float, Field(alias='maxMonthlyActorComputeUnits', examples=[1000])] + max_monthly_residential_proxy_gbytes: Annotated[ + float, Field(alias='maxMonthlyResidentialProxyGbytes', examples=[10]) + ] + max_monthly_proxy_serps: Annotated[float, Field(alias='maxMonthlyProxySerps', examples=[30000])] + max_monthly_external_data_transfer_gbytes: Annotated[ + float, Field(alias='maxMonthlyExternalDataTransferGbytes', examples=[1000]) + ] + max_actor_count: Annotated[float, Field(alias='maxActorCount', examples=[100])] + max_actor_task_count: Annotated[float, Field(alias='maxActorTaskCount', examples=[1000])] + data_retention_days: Annotated[float, Field(alias='dataRetentionDays', examples=[14])] + available_proxy_groups: Annotated[AvailableProxyGroups, Field(alias='availableProxyGroups')] + team_account_seat_count: Annotated[float, Field(alias='teamAccountSeatCount', examples=[1])] + support_level: Annotated[str, Field(alias='supportLevel', examples=['COMMUNITY'])] + available_add_ons: Annotated[list[str], Field(alias='availableAddOns', examples=[[]])] + + +class EffectivePlatformFeature(BaseModel): + is_enabled: Annotated[bool, Field(alias='isEnabled', examples=[True])] + disabled_reason: Annotated[ + str, + Field( + alias='disabledReason', + examples=[ + 'The "Selected public Actors for developers" feature is not enabled for your account. Please upgrade your plan or contact support@apify.com' + ], + ), + ] + disabled_reason_type: Annotated[str, Field(alias='disabledReasonType', examples=['DISABLED'])] + is_trial: Annotated[bool, Field(alias='isTrial', examples=[False])] + trial_expiration_at: Annotated[str, Field(alias='trialExpirationAt', examples=['2025-01-01T14:00:00.000Z'])] + + +class EffectivePlatformFeatures(BaseModel): + actors: Annotated[EffectivePlatformFeature, Field(alias='ACTORS')] + storage: Annotated[EffectivePlatformFeature, Field(alias='STORAGE')] + scheduler: Annotated[EffectivePlatformFeature, Field(alias='SCHEDULER')] + proxy: Annotated[EffectivePlatformFeature, Field(alias='PROXY')] + proxy_external_access: Annotated[EffectivePlatformFeature, Field(alias='PROXY_EXTERNAL_ACCESS')] + proxy_residential: Annotated[EffectivePlatformFeature, Field(alias='PROXY_RESIDENTIAL')] + proxy_serps: Annotated[EffectivePlatformFeature, Field(alias='PROXY_SERPS')] + webhooks: Annotated[EffectivePlatformFeature, Field(alias='WEBHOOKS')] + actors_public_all: Annotated[EffectivePlatformFeature, Field(alias='ACTORS_PUBLIC_ALL')] + actors_public_developer: Annotated[EffectivePlatformFeature, Field(alias='ACTORS_PUBLIC_DEVELOPER')] + + +class UserPrivateInfo(BaseModel): + id: Annotated[str, Field(examples=['YiKoxjkaS9gjGTqhF'])] + username: Annotated[str, Field(examples=['myusername'])] + profile: Profile + email: Annotated[str, Field(examples=['bob@example.com'])] + proxy: Proxy + plan: Plan + effective_platform_features: Annotated[EffectivePlatformFeatures, Field(alias='effectivePlatformFeatures')] + created_at: Annotated[str, Field(alias='createdAt', examples=['2022-11-29T14:48:29.381Z'])] + is_paying: Annotated[bool, Field(alias='isPaying', examples=[True])] + + +class GetPrivateUserDataResponse(BaseModel): + data: UserPrivateInfo + + +class UsageCycle(BaseModel): + start_at: Annotated[str, Field(alias='startAt', examples=['2022-10-02T00:00:00.000Z'])] + end_at: Annotated[str, Field(alias='endAt', examples=['2022-11-01T23:59:59.999Z'])] + + +class PriceTiers(BaseModel): + quantity_above: Annotated[float, Field(alias='quantityAbove', examples=[0])] + discount_percent: Annotated[float, Field(alias='discountPercent', examples=[100])] + tier_quantity: Annotated[float, Field(alias='tierQuantity', examples=[0.39])] + unit_price_usd: Annotated[float, Field(alias='unitPriceUsd', examples=[0])] + price_usd: Annotated[float, Field(alias='priceUsd', examples=[0])] + + +class UsageItem(BaseModel): + quantity: Annotated[float, Field(examples=[2.784475])] + base_amount_usd: Annotated[float, Field(alias='baseAmountUsd', examples=[0.69611875])] + base_unit_price_usd: Annotated[float, Field(alias='baseUnitPriceUsd', examples=[0.25])] + amount_after_volume_discount_usd: Annotated[ + float, Field(alias='amountAfterVolumeDiscountUsd', examples=[0.69611875]) + ] + price_tiers: Annotated[list[PriceTiers], Field(alias='priceTiers')] + + +class MonthlyServiceUsage(BaseModel): + usage_item: Annotated[UsageItem, Field(alias='USAGE_ITEM')] + + +class ServiceUsage(BaseModel): + service_usage_item: Annotated[UsageItem, Field(alias='SERVICE_USAGE_ITEM')] + + +class DailyServiceUsages(BaseModel): + date: Annotated[str, Field(examples=['2022-10-02T00:00:00.000Z'])] + service_usage: Annotated[ServiceUsage, Field(alias='serviceUsage')] + total_usage_credits_usd: Annotated[float, Field(alias='totalUsageCreditsUsd', examples=[0.0474385791970591])] + + +class MonthlyUsage(BaseModel): + usage_cycle: Annotated[UsageCycle, Field(alias='usageCycle')] + monthly_service_usage: Annotated[MonthlyServiceUsage, Field(alias='monthlyServiceUsage')] + daily_service_usages: Annotated[list[DailyServiceUsages], Field(alias='dailyServiceUsages')] + total_usage_credits_usd_before_volume_discount: Annotated[ + float, Field(alias='totalUsageCreditsUsdBeforeVolumeDiscount', examples=[0.786143673840067]) + ] + total_usage_credits_usd_after_volume_discount: Annotated[ + float, Field(alias='totalUsageCreditsUsdAfterVolumeDiscount', examples=[0.786143673840067]) + ] + + +class GetMonthlyUsageResponse(BaseModel): + data: MonthlyUsage + + +class MonthlyUsageCycle(BaseModel): + start_at: Annotated[str, Field(alias='startAt', examples=['2022-10-02T00:00:00.000Z'])] + end_at: Annotated[str, Field(alias='endAt', examples=['2022-11-01T23:59:59.999Z'])] + + +class Limits(BaseModel): + max_monthly_usage_usd: Annotated[float, Field(alias='maxMonthlyUsageUsd', examples=[300])] + max_monthly_actor_compute_units: Annotated[float, Field(alias='maxMonthlyActorComputeUnits', examples=[1000])] + max_monthly_external_data_transfer_gbytes: Annotated[ + float, Field(alias='maxMonthlyExternalDataTransferGbytes', examples=[7]) + ] + max_monthly_proxy_serps: Annotated[float, Field(alias='maxMonthlyProxySerps', examples=[50])] + max_monthly_residential_proxy_gbytes: Annotated[ + float, Field(alias='maxMonthlyResidentialProxyGbytes', examples=[0.5]) + ] + max_actor_memory_gbytes: Annotated[float, Field(alias='maxActorMemoryGbytes', examples=[16])] + max_actor_count: Annotated[float, Field(alias='maxActorCount', examples=[100])] + max_actor_task_count: Annotated[float, Field(alias='maxActorTaskCount', examples=[1000])] + max_concurrent_actor_jobs: Annotated[float, Field(alias='maxConcurrentActorJobs', examples=[256])] + max_team_account_seat_count: Annotated[float, Field(alias='maxTeamAccountSeatCount', examples=[9])] + data_retention_days: Annotated[float, Field(alias='dataRetentionDays', examples=[90])] + + +class Current(BaseModel): + monthly_usage_usd: Annotated[float, Field(alias='monthlyUsageUsd', examples=[43])] + monthly_actor_compute_units: Annotated[float, Field(alias='monthlyActorComputeUnits', examples=[500.784475])] + monthly_external_data_transfer_gbytes: Annotated[ + float, Field(alias='monthlyExternalDataTransferGbytes', examples=[3.00861903931946]) + ] + monthly_proxy_serps: Annotated[float, Field(alias='monthlyProxySerps', examples=[34])] + monthly_residential_proxy_gbytes: Annotated[float, Field(alias='monthlyResidentialProxyGbytes', examples=[0.4])] + actor_memory_gbytes: Annotated[float, Field(alias='actorMemoryGbytes', examples=[8])] + actor_count: Annotated[float, Field(alias='actorCount', examples=[31])] + actor_task_count: Annotated[float, Field(alias='actorTaskCount', examples=[130])] + active_actor_job_count: Annotated[float, Field(alias='activeActorJobCount', examples=[0])] + team_account_seat_count: Annotated[float, Field(alias='teamAccountSeatCount', examples=[5])] + + +class AccountLimits(BaseModel): + monthly_usage_cycle: Annotated[MonthlyUsageCycle, Field(alias='monthlyUsageCycle')] + limits: Limits + current: Current + + +class GetLimitsResponse(BaseModel): + data: AccountLimits + + +class UpdateLimitsRequest(BaseModel): + max_monthly_usage_usd: Annotated[float | None, Field(alias='maxMonthlyUsageUsd', examples=[300])] = None + """ + If your platform usage in the billing period exceeds the prepaid usage, you will be charged extra. + Setting this property you can update your hard limit on monthly platform usage to prevent accidental overage or to limit the extra charges + + """ + data_retention_days: Annotated[float | None, Field(alias='dataRetentionDays', examples=[90])] = None + """ + Apify securely stores your ten most recent Actor runs indefinitely, ensuring they are always accessible. + Unnamed storages and other Actor runs are automatically deleted after the retention period. + If you're subscribed, you can change it to keep data for longer or to limit your usage. [Lear more](https://docs.apify.com/platform/storage/usage#data-retention) + + """ diff --git a/src/apify_client/clients/resource_clients/__init__.py b/src/apify_client/_resource_clients/__init__.py similarity index 96% rename from src/apify_client/clients/resource_clients/__init__.py rename to src/apify_client/_resource_clients/__init__.py index e818ce34..154e0132 100644 --- a/src/apify_client/clients/resource_clients/__init__.py +++ b/src/apify_client/_resource_clients/__init__.py @@ -4,6 +4,7 @@ from .actor_env_var_collection import ActorEnvVarCollectionClient, ActorEnvVarCollectionClientAsync from .actor_version import ActorVersionClient, ActorVersionClientAsync from .actor_version_collection import ActorVersionCollectionClient, ActorVersionCollectionClientAsync +from .base import ActorJobBaseClient, ActorJobBaseClientAsync from .build import BuildClient, BuildClientAsync from .build_collection import BuildCollectionClient, BuildCollectionClientAsync from .dataset import DatasetClient, DatasetClientAsync @@ -35,6 +36,8 @@ 'ActorEnvVarClientAsync', 'ActorEnvVarCollectionClient', 'ActorEnvVarCollectionClientAsync', + 'ActorJobBaseClient', + 'ActorJobBaseClientAsync', 'ActorVersionClient', 'ActorVersionClientAsync', 'ActorVersionCollectionClient', diff --git a/src/apify_client/clients/resource_clients/actor.py b/src/apify_client/_resource_clients/actor.py similarity index 94% rename from src/apify_client/clients/resource_clients/actor.py rename to src/apify_client/_resource_clients/actor.py index 675783c4..c03e9ba1 100644 --- a/src/apify_client/clients/resource_clients/actor.py +++ b/src/apify_client/_resource_clients/actor.py @@ -2,28 +2,31 @@ from typing import TYPE_CHECKING, Any, Literal -from apify_client._utils import ( - encode_key_value_store_record_value, - encode_webhook_list_to_base64, - filter_out_none_values_recursively, - maybe_extract_enum_member_value, - parse_date_fields, - pluck_data, -) -from apify_client.clients.base import ResourceClient, ResourceClientAsync -from apify_client.clients.resource_clients.actor_version import ActorVersionClient, ActorVersionClientAsync -from apify_client.clients.resource_clients.actor_version_collection import ( +from apify_client._models import Actor, Build, Run +from apify_client._resource_clients.actor_version import ActorVersionClient, ActorVersionClientAsync +from apify_client._resource_clients.actor_version_collection import ( ActorVersionCollectionClient, ActorVersionCollectionClientAsync, ) -from apify_client.clients.resource_clients.build import BuildClient, BuildClientAsync -from apify_client.clients.resource_clients.build_collection import BuildCollectionClient, BuildCollectionClientAsync -from apify_client.clients.resource_clients.run import RunClient, RunClientAsync -from apify_client.clients.resource_clients.run_collection import RunCollectionClient, RunCollectionClientAsync -from apify_client.clients.resource_clients.webhook_collection import ( +from apify_client._resource_clients.base import ResourceClient, ResourceClientAsync +from apify_client._resource_clients.build import BuildClient, BuildClientAsync +from apify_client._resource_clients.build_collection import ( + BuildCollectionClient, + BuildCollectionClientAsync, +) +from apify_client._resource_clients.run import RunClient, RunClientAsync +from apify_client._resource_clients.run_collection import RunCollectionClient, RunCollectionClientAsync +from apify_client._resource_clients.webhook_collection import ( WebhookCollectionClient, WebhookCollectionClientAsync, ) +from apify_client._utils import ( + encode_key_value_store_record_value, + encode_webhook_list_to_base64, + filter_out_none_values_recursively, + maybe_extract_enum_member_value, + response_to_dict, +) if TYPE_CHECKING: from decimal import Decimal @@ -105,7 +108,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'acts') super().__init__(*args, resource_path=resource_path, **kwargs) - def get(self) -> dict | None: + def get(self) -> Actor | None: """Retrieve the Actor. https://docs.apify.com/api/v2#/reference/actors/actor-object/get-actor @@ -113,7 +116,8 @@ def get(self) -> dict | None: Returns: The retrieved Actor. """ - return self._get() + result = self._get() + return Actor.model_validate(result) if result is not None else None def update( self, @@ -143,7 +147,7 @@ def update( actor_standby_memory_mbytes: int | None = None, pricing_infos: list[dict] | None = None, actor_permission_level: ActorPermissionLevel | None = None, - ) -> dict: + ) -> Actor: """Update the Actor with the specified fields. https://docs.apify.com/api/v2#/reference/actors/actor-object/update-actor @@ -211,7 +215,8 @@ def update( actor_permission_level=actor_permission_level, ) - return self._update(filter_out_none_values_recursively(actor_representation)) + result = self._update(filter_out_none_values_recursively(actor_representation)) + return Actor.model_validate(result) def delete(self) -> None: """Delete the Actor. @@ -234,7 +239,7 @@ def start( force_permission_level: ActorPermissionLevel | None = None, wait_for_finish: int | None = None, webhooks: list[dict] | None = None, - ) -> dict: + ) -> Run: """Start the Actor and immediately return the Run object. https://docs.apify.com/api/v2#/reference/actors/run-collection/run-actor @@ -290,7 +295,8 @@ def start( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + data = response_to_dict(response) + return Run.model_validate(data) def call( self, @@ -307,7 +313,7 @@ def call( force_permission_level: ActorPermissionLevel | None = None, wait_secs: int | None = None, logger: Logger | None | Literal['default'] = 'default', - ) -> dict | None: + ) -> Run | None: """Start the Actor and wait for it to finish before returning the Run object. It waits indefinitely, unless the wait_secs argument is provided. @@ -356,15 +362,15 @@ def call( force_permission_level=force_permission_level, ) if not logger: - return self.root_client.run(started_run['id']).wait_for_finish(wait_secs=wait_secs) + return self.root_client.run(started_run.id).wait_for_finish(wait_secs=wait_secs) - run_client = self.root_client.run(run_id=started_run['id']) + run_client = self.root_client.run(run_id=started_run.id) if logger == 'default': logger = None with run_client.get_status_message_watcher(to_logger=logger), run_client.get_streamed_log(to_logger=logger): - return self.root_client.run(started_run['id']).wait_for_finish(wait_secs=wait_secs) + return self.root_client.run(started_run.id).wait_for_finish(wait_secs=wait_secs) def build( self, @@ -374,7 +380,7 @@ def build( tag: str | None = None, use_cache: bool | None = None, wait_for_finish: int | None = None, - ) -> dict: + ) -> Build: """Build the Actor. https://docs.apify.com/api/v2#/reference/actors/build-collection/build-actor @@ -408,7 +414,7 @@ def build( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return Build.model_validate(response_to_dict(response)) def builds(self) -> BuildCollectionClient: """Retrieve a client for the builds of this Actor.""" @@ -439,7 +445,7 @@ async def default_build( ) response = self.http_client.call(url=self._url('builds/default'), method='GET', params=request_params) - data = pluck_data(response.json()) + data = response_to_dict(response) return BuildClient( base_url=self.base_url, @@ -528,7 +534,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'acts') super().__init__(*args, resource_path=resource_path, **kwargs) - async def get(self) -> dict | None: + async def get(self) -> Actor | None: """Retrieve the Actor. https://docs.apify.com/api/v2#/reference/actors/actor-object/get-actor @@ -536,7 +542,8 @@ async def get(self) -> dict | None: Returns: The retrieved Actor. """ - return await self._get() + result = await self._get() + return Actor.model_validate(result) if result is not None else None async def update( self, @@ -566,7 +573,7 @@ async def update( actor_standby_memory_mbytes: int | None = None, pricing_infos: list[dict] | None = None, actor_permission_level: ActorPermissionLevel | None = None, - ) -> dict: + ) -> Actor: """Update the Actor with the specified fields. https://docs.apify.com/api/v2#/reference/actors/actor-object/update-actor @@ -634,7 +641,8 @@ async def update( actor_permission_level=actor_permission_level, ) - return await self._update(filter_out_none_values_recursively(actor_representation)) + result = await self._update(filter_out_none_values_recursively(actor_representation)) + return Actor.model_validate(result) async def delete(self) -> None: """Delete the Actor. @@ -657,7 +665,7 @@ async def start( force_permission_level: ActorPermissionLevel | None = None, wait_for_finish: int | None = None, webhooks: list[dict] | None = None, - ) -> dict: + ) -> Run: """Start the Actor and immediately return the Run object. https://docs.apify.com/api/v2#/reference/actors/run-collection/run-actor @@ -713,7 +721,8 @@ async def start( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + data = response_to_dict(response) + return Run.model_validate(data) async def call( self, @@ -730,7 +739,7 @@ async def call( force_permission_level: ActorPermissionLevel | None = None, wait_secs: int | None = None, logger: Logger | None | Literal['default'] = 'default', - ) -> dict | None: + ) -> Run | None: """Start the Actor and wait for it to finish before returning the Run object. It waits indefinitely, unless the wait_secs argument is provided. @@ -780,9 +789,9 @@ async def call( ) if not logger: - return await self.root_client.run(started_run['id']).wait_for_finish(wait_secs=wait_secs) + return await self.root_client.run(started_run.id).wait_for_finish(wait_secs=wait_secs) - run_client = self.root_client.run(run_id=started_run['id']) + run_client = self.root_client.run(run_id=started_run.id) if logger == 'default': logger = None @@ -791,7 +800,7 @@ async def call( streamed_log = await run_client.get_streamed_log(to_logger=logger) async with status_redirector, streamed_log: - return await self.root_client.run(started_run['id']).wait_for_finish(wait_secs=wait_secs) + return await self.root_client.run(started_run.id).wait_for_finish(wait_secs=wait_secs) async def build( self, @@ -801,7 +810,7 @@ async def build( tag: str | None = None, use_cache: bool | None = None, wait_for_finish: int | None = None, - ) -> dict: + ) -> Build: """Build the Actor. https://docs.apify.com/api/v2#/reference/actors/build-collection/build-actor @@ -835,7 +844,8 @@ async def build( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + data = response_to_dict(response) + return Build.model_validate(data) def builds(self) -> BuildCollectionClientAsync: """Retrieve a client for the builds of this Actor.""" @@ -870,7 +880,7 @@ async def default_build( method='GET', params=request_params, ) - data = pluck_data(response.json()) + data = response_to_dict(response) return BuildClientAsync( base_url=self.base_url, diff --git a/src/apify_client/clients/resource_clients/actor_collection.py b/src/apify_client/_resource_clients/actor_collection.py similarity index 95% rename from src/apify_client/clients/resource_clients/actor_collection.py rename to src/apify_client/_resource_clients/actor_collection.py index 95c7b105..0889f8bd 100644 --- a/src/apify_client/clients/resource_clients/actor_collection.py +++ b/src/apify_client/_resource_clients/actor_collection.py @@ -2,12 +2,13 @@ from typing import TYPE_CHECKING, Any, Literal +from apify_client._models import Actor, ActorShort +from apify_client._resource_clients.actor import get_actor_representation +from apify_client._resource_clients.base import ResourceCollectionClient, ResourceCollectionClientAsync from apify_client._utils import filter_out_none_values_recursively -from apify_client.clients.base import ResourceCollectionClient, ResourceCollectionClientAsync -from apify_client.clients.resource_clients.actor import get_actor_representation if TYPE_CHECKING: - from apify_client.clients.base.resource_collection_client import ListPage + from apify_client._types import ListPage class ActorCollectionClient(ResourceCollectionClient): @@ -25,7 +26,7 @@ def list( offset: int | None = None, desc: bool | None = None, sort_by: Literal['createdAt', 'stats.lastRunStartedAt'] | None = 'createdAt', - ) -> ListPage[dict]: + ) -> ListPage[ActorShort]: """List the Actors the user has created or used. https://docs.apify.com/api/v2#/reference/actors/actor-collection/get-list-of-actors @@ -68,7 +69,7 @@ def create( actor_standby_idle_timeout_secs: int | None = None, actor_standby_build: str | None = None, actor_standby_memory_mbytes: int | None = None, - ) -> dict: + ) -> Actor: """Create a new Actor. https://docs.apify.com/api/v2#/reference/actors/actor-collection/create-actor @@ -132,7 +133,8 @@ def create( actor_standby_memory_mbytes=actor_standby_memory_mbytes, ) - return self._create(filter_out_none_values_recursively(actor_representation)) + result = self._create(filter_out_none_values_recursively(actor_representation)) + return Actor.model_validate(result) class ActorCollectionClientAsync(ResourceCollectionClientAsync): @@ -150,7 +152,7 @@ async def list( offset: int | None = None, desc: bool | None = None, sort_by: Literal['createdAt', 'stats.lastRunStartedAt'] | None = 'createdAt', - ) -> ListPage[dict]: + ) -> ListPage[ActorShort]: """List the Actors the user has created or used. https://docs.apify.com/api/v2#/reference/actors/actor-collection/get-list-of-actors @@ -193,7 +195,7 @@ async def create( actor_standby_idle_timeout_secs: int | None = None, actor_standby_build: str | None = None, actor_standby_memory_mbytes: int | None = None, - ) -> dict: + ) -> Actor: """Create a new Actor. https://docs.apify.com/api/v2#/reference/actors/actor-collection/create-actor @@ -257,4 +259,5 @@ async def create( actor_standby_memory_mbytes=actor_standby_memory_mbytes, ) - return await self._create(filter_out_none_values_recursively(actor_representation)) + result = await self._create(filter_out_none_values_recursively(actor_representation)) + return Actor.model_validate(result) diff --git a/src/apify_client/clients/resource_clients/actor_env_var.py b/src/apify_client/_resource_clients/actor_env_var.py similarity index 83% rename from src/apify_client/clients/resource_clients/actor_env_var.py rename to src/apify_client/_resource_clients/actor_env_var.py index 4fcc3968..df8001c4 100644 --- a/src/apify_client/clients/resource_clients/actor_env_var.py +++ b/src/apify_client/_resource_clients/actor_env_var.py @@ -2,8 +2,9 @@ from typing import Any +from apify_client._models import EnvVar +from apify_client._resource_clients.base import ResourceClient, ResourceClientAsync from apify_client._utils import filter_out_none_values_recursively -from apify_client.clients.base import ResourceClient, ResourceClientAsync def get_actor_env_var_representation( @@ -27,7 +28,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'env-vars') super().__init__(*args, resource_path=resource_path, **kwargs) - def get(self) -> dict | None: + def get(self) -> EnvVar | None: """Return information about the Actor environment variable. https://docs.apify.com/api/v2#/reference/actors/environment-variable-object/get-environment-variable @@ -35,7 +36,8 @@ def get(self) -> dict | None: Returns: The retrieved Actor environment variable data. """ - return self._get() + result = self._get() + return EnvVar.model_validate(result) if result is not None else None def update( self, @@ -43,7 +45,7 @@ def update( is_secret: bool | None = None, name: str, value: str, - ) -> dict: + ) -> EnvVar: """Update the Actor environment variable with specified fields. https://docs.apify.com/api/v2#/reference/actors/environment-variable-object/update-environment-variable @@ -62,7 +64,8 @@ def update( value=value, ) - return self._update(filter_out_none_values_recursively(actor_env_var_representation)) + result = self._update(filter_out_none_values_recursively(actor_env_var_representation)) + return EnvVar.model_validate(result) def delete(self) -> None: """Delete the Actor environment variable. @@ -79,7 +82,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'env-vars') super().__init__(*args, resource_path=resource_path, **kwargs) - async def get(self) -> dict | None: + async def get(self) -> EnvVar | None: """Return information about the Actor environment variable. https://docs.apify.com/api/v2#/reference/actors/environment-variable-object/get-environment-variable @@ -87,7 +90,8 @@ async def get(self) -> dict | None: Returns: The retrieved Actor environment variable data. """ - return await self._get() + result = await self._get() + return EnvVar.model_validate(result) if result is not None else None async def update( self, @@ -95,7 +99,7 @@ async def update( is_secret: bool | None = None, name: str, value: str, - ) -> dict: + ) -> EnvVar: """Update the Actor environment variable with specified fields. https://docs.apify.com/api/v2#/reference/actors/environment-variable-object/update-environment-variable @@ -114,7 +118,8 @@ async def update( value=value, ) - return await self._update(filter_out_none_values_recursively(actor_env_var_representation)) + result = await self._update(filter_out_none_values_recursively(actor_env_var_representation)) + return EnvVar.model_validate(result) async def delete(self) -> None: """Delete the Actor environment variable. diff --git a/src/apify_client/clients/resource_clients/actor_env_var_collection.py b/src/apify_client/_resource_clients/actor_env_var_collection.py similarity index 80% rename from src/apify_client/clients/resource_clients/actor_env_var_collection.py rename to src/apify_client/_resource_clients/actor_env_var_collection.py index 217bdd22..9c232780 100644 --- a/src/apify_client/clients/resource_clients/actor_env_var_collection.py +++ b/src/apify_client/_resource_clients/actor_env_var_collection.py @@ -2,12 +2,13 @@ from typing import TYPE_CHECKING, Any +from apify_client._models import EnvVar +from apify_client._resource_clients.actor_env_var import get_actor_env_var_representation +from apify_client._resource_clients.base import ResourceCollectionClient, ResourceCollectionClientAsync from apify_client._utils import filter_out_none_values_recursively -from apify_client.clients.base import ResourceCollectionClient, ResourceCollectionClientAsync -from apify_client.clients.resource_clients.actor_env_var import get_actor_env_var_representation if TYPE_CHECKING: - from apify_client.clients.base.resource_collection_client import ListPage + from apify_client._types import ListPage class ActorEnvVarCollectionClient(ResourceCollectionClient): @@ -17,7 +18,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'env-vars') super().__init__(*args, resource_path=resource_path, **kwargs) - def list(self) -> ListPage[dict]: + def list(self) -> ListPage[EnvVar]: """List the available actor environment variables. https://docs.apify.com/api/v2#/reference/actors/environment-variable-collection/get-list-of-environment-variables @@ -33,7 +34,7 @@ def create( is_secret: bool | None = None, name: str, value: str, - ) -> dict: + ) -> EnvVar: """Create a new actor environment variable. https://docs.apify.com/api/v2#/reference/actors/environment-variable-collection/create-environment-variable @@ -52,7 +53,8 @@ def create( value=value, ) - return self._create(filter_out_none_values_recursively(actor_env_var_representation)) + result = self._create(filter_out_none_values_recursively(actor_env_var_representation)) + return EnvVar.model_validate(result) class ActorEnvVarCollectionClientAsync(ResourceCollectionClientAsync): @@ -62,7 +64,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'env-vars') super().__init__(*args, resource_path=resource_path, **kwargs) - async def list(self) -> ListPage[dict]: + async def list(self) -> ListPage[EnvVar]: """List the available actor environment variables. https://docs.apify.com/api/v2#/reference/actors/environment-variable-collection/get-list-of-environment-variables @@ -78,7 +80,7 @@ async def create( is_secret: bool | None = None, name: str, value: str, - ) -> dict: + ) -> EnvVar: """Create a new actor environment variable. https://docs.apify.com/api/v2#/reference/actors/environment-variable-collection/create-environment-variable @@ -97,4 +99,5 @@ async def create( value=value, ) - return await self._create(filter_out_none_values_recursively(actor_env_var_representation)) + result = await self._create(filter_out_none_values_recursively(actor_env_var_representation)) + return EnvVar.model_validate(result) diff --git a/src/apify_client/clients/resource_clients/actor_version.py b/src/apify_client/_resource_clients/actor_version.py similarity index 90% rename from src/apify_client/clients/resource_clients/actor_version.py rename to src/apify_client/_resource_clients/actor_version.py index fe40e772..7e530cbc 100644 --- a/src/apify_client/clients/resource_clients/actor_version.py +++ b/src/apify_client/_resource_clients/actor_version.py @@ -2,13 +2,14 @@ from typing import TYPE_CHECKING, Any -from apify_client._utils import filter_out_none_values_recursively, maybe_extract_enum_member_value -from apify_client.clients.base import ResourceClient, ResourceClientAsync -from apify_client.clients.resource_clients.actor_env_var import ActorEnvVarClient, ActorEnvVarClientAsync -from apify_client.clients.resource_clients.actor_env_var_collection import ( +from apify_client._models import Version +from apify_client._resource_clients.actor_env_var import ActorEnvVarClient, ActorEnvVarClientAsync +from apify_client._resource_clients.actor_env_var_collection import ( ActorEnvVarCollectionClient, ActorEnvVarCollectionClientAsync, ) +from apify_client._resource_clients.base import ResourceClient, ResourceClientAsync +from apify_client._utils import filter_out_none_values_recursively, maybe_extract_enum_member_value if TYPE_CHECKING: from apify_shared.consts import ActorSourceType @@ -46,7 +47,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'versions') super().__init__(*args, resource_path=resource_path, **kwargs) - def get(self) -> dict | None: + def get(self) -> Version | None: """Return information about the Actor version. https://docs.apify.com/api/v2#/reference/actors/version-object/get-version @@ -54,7 +55,8 @@ def get(self) -> dict | None: Returns: The retrieved Actor version data. """ - return self._get() + result = self._get() + return Version.model_validate(result) if result is not None else None def update( self, @@ -67,7 +69,7 @@ def update( git_repo_url: str | None = None, tarball_url: str | None = None, github_gist_url: str | None = None, - ) -> dict: + ) -> Version: """Update the Actor version with specified fields. https://docs.apify.com/api/v2#/reference/actors/version-object/update-version @@ -102,7 +104,8 @@ def update( github_gist_url=github_gist_url, ) - return self._update(filter_out_none_values_recursively(actor_version_representation)) + result = self._update(filter_out_none_values_recursively(actor_version_representation)) + return Version.model_validate(result) def delete(self) -> None: """Delete the Actor version. @@ -134,7 +137,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'versions') super().__init__(*args, resource_path=resource_path, **kwargs) - async def get(self) -> dict | None: + async def get(self) -> Version | None: """Return information about the Actor version. https://docs.apify.com/api/v2#/reference/actors/version-object/get-version @@ -142,7 +145,8 @@ async def get(self) -> dict | None: Returns: The retrieved Actor version data. """ - return await self._get() + result = await self._get() + return Version.model_validate(result) if result is not None else None async def update( self, @@ -155,7 +159,7 @@ async def update( git_repo_url: str | None = None, tarball_url: str | None = None, github_gist_url: str | None = None, - ) -> dict: + ) -> Version: """Update the Actor version with specified fields. https://docs.apify.com/api/v2#/reference/actors/version-object/update-version @@ -190,7 +194,8 @@ async def update( github_gist_url=github_gist_url, ) - return await self._update(filter_out_none_values_recursively(actor_version_representation)) + result = await self._update(filter_out_none_values_recursively(actor_version_representation)) + return Version.model_validate(result) async def delete(self) -> None: """Delete the Actor version. diff --git a/src/apify_client/clients/resource_clients/actor_version_collection.py b/src/apify_client/_resource_clients/actor_version_collection.py similarity index 90% rename from src/apify_client/clients/resource_clients/actor_version_collection.py rename to src/apify_client/_resource_clients/actor_version_collection.py index 6c3b1b5d..cf6d44ef 100644 --- a/src/apify_client/clients/resource_clients/actor_version_collection.py +++ b/src/apify_client/_resource_clients/actor_version_collection.py @@ -2,14 +2,15 @@ from typing import TYPE_CHECKING, Any +from apify_client._models import Version +from apify_client._resource_clients.actor_version import _get_actor_version_representation +from apify_client._resource_clients.base import ResourceCollectionClient, ResourceCollectionClientAsync from apify_client._utils import filter_out_none_values_recursively -from apify_client.clients.base import ResourceCollectionClient, ResourceCollectionClientAsync -from apify_client.clients.resource_clients.actor_version import _get_actor_version_representation if TYPE_CHECKING: from apify_shared.consts import ActorSourceType - from apify_client.clients.base.resource_collection_client import ListPage + from apify_client._types import ListPage class ActorVersionCollectionClient(ResourceCollectionClient): @@ -19,7 +20,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'versions') super().__init__(*args, resource_path=resource_path, **kwargs) - def list(self) -> ListPage[dict]: + def list(self) -> ListPage[Version]: """List the available Actor versions. https://docs.apify.com/api/v2#/reference/actors/version-collection/get-list-of-versions @@ -41,7 +42,7 @@ def create( git_repo_url: str | None = None, tarball_url: str | None = None, github_gist_url: str | None = None, - ) -> dict: + ) -> Version: """Create a new Actor version. https://docs.apify.com/api/v2#/reference/actors/version-collection/create-version @@ -78,7 +79,8 @@ def create( github_gist_url=github_gist_url, ) - return self._create(filter_out_none_values_recursively(actor_version_representation)) + result = self._create(filter_out_none_values_recursively(actor_version_representation)) + return Version.model_validate(result) class ActorVersionCollectionClientAsync(ResourceCollectionClientAsync): @@ -88,7 +90,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'versions') super().__init__(*args, resource_path=resource_path, **kwargs) - async def list(self) -> ListPage[dict]: + async def list(self) -> ListPage[Version]: """List the available Actor versions. https://docs.apify.com/api/v2#/reference/actors/version-collection/get-list-of-versions @@ -110,7 +112,7 @@ async def create( git_repo_url: str | None = None, tarball_url: str | None = None, github_gist_url: str | None = None, - ) -> dict: + ) -> Version: """Create a new Actor version. https://docs.apify.com/api/v2#/reference/actors/version-collection/create-version @@ -147,4 +149,5 @@ async def create( github_gist_url=github_gist_url, ) - return await self._create(filter_out_none_values_recursively(actor_version_representation)) + result = await self._create(filter_out_none_values_recursively(actor_version_representation)) + return Version.model_validate(result) diff --git a/src/apify_client/clients/base/__init__.py b/src/apify_client/_resource_clients/base/__init__.py similarity index 83% rename from src/apify_client/clients/base/__init__.py rename to src/apify_client/_resource_clients/base/__init__.py index 27d879c9..ee3e164e 100644 --- a/src/apify_client/clients/base/__init__.py +++ b/src/apify_client/_resource_clients/base/__init__.py @@ -1,11 +1,12 @@ from .actor_job_base_client import ActorJobBaseClient, ActorJobBaseClientAsync -from .base_client import BaseClient, BaseClientAsync +from .base_client import BaseBaseClient, BaseClient, BaseClientAsync from .resource_client import ResourceClient, ResourceClientAsync from .resource_collection_client import ResourceCollectionClient, ResourceCollectionClientAsync __all__ = [ 'ActorJobBaseClient', 'ActorJobBaseClientAsync', + 'BaseBaseClient', 'BaseClient', 'BaseClientAsync', 'ResourceClient', diff --git a/src/apify_client/clients/base/actor_job_base_client.py b/src/apify_client/_resource_clients/base/actor_job_base_client.py similarity index 90% rename from src/apify_client/clients/base/actor_job_base_client.py rename to src/apify_client/_resource_clients/base/actor_job_base_client.py index abf7e274..c61c888e 100644 --- a/src/apify_client/clients/base/actor_job_base_client.py +++ b/src/apify_client/_resource_clients/base/actor_job_base_client.py @@ -7,8 +7,8 @@ from apify_shared.consts import ActorJobStatus -from apify_client._utils import catch_not_found_or_throw, parse_date_fields, pluck_data -from apify_client.clients.base.resource_client import ResourceClient, ResourceClientAsync +from apify_client._resource_clients.base.resource_client import ResourceClient, ResourceClientAsync +from apify_client._utils import catch_not_found_or_throw, response_to_dict from apify_client.errors import ApifyApiError DEFAULT_WAIT_FOR_FINISH_SEC = 999999 @@ -37,7 +37,7 @@ def _wait_for_finish(self, wait_secs: int | None = None) -> dict | None: method='GET', params=self._params(waitForFinish=wait_for_finish), ) - job = parse_date_fields(pluck_data(response.json())) + job = response_to_dict(response) seconds_elapsed = math.floor((datetime.now(timezone.utc) - started_at).total_seconds()) if ActorJobStatus(job['status']).is_terminal or ( @@ -68,7 +68,7 @@ def _abort(self, *, gracefully: bool | None = None) -> dict: method='POST', params=self._params(gracefully=gracefully), ) - return parse_date_fields(pluck_data(response.json())) + return response_to_dict(response) class ActorJobBaseClientAsync(ResourceClientAsync): @@ -91,7 +91,7 @@ async def _wait_for_finish(self, wait_secs: int | None = None) -> dict | None: method='GET', params=self._params(waitForFinish=wait_for_finish), ) - job = parse_date_fields(pluck_data(response.json())) + job = response_to_dict(response) seconds_elapsed = math.floor((datetime.now(timezone.utc) - started_at).total_seconds()) if ActorJobStatus(job['status']).is_terminal or ( @@ -122,4 +122,4 @@ async def _abort(self, *, gracefully: bool | None = None) -> dict: method='POST', params=self._params(gracefully=gracefully), ) - return parse_date_fields(pluck_data(response.json())) + return response_to_dict(response) diff --git a/src/apify_client/clients/base/base_client.py b/src/apify_client/_resource_clients/base/base_client.py similarity index 93% rename from src/apify_client/clients/base/base_client.py rename to src/apify_client/_resource_clients/base/base_client.py index c5aa744c..60fa3246 100644 --- a/src/apify_client/clients/base/base_client.py +++ b/src/apify_client/_resource_clients/base/base_client.py @@ -5,13 +5,12 @@ from apify_client._logging import WithLogDetailsClient from apify_client._utils import to_safe_id -# Conditional import only executed when type checking, otherwise we'd get circular dependency issues if TYPE_CHECKING: - from apify_client import ApifyClient, ApifyClientAsync + from apify_client._client import ApifyClient, ApifyClientAsync from apify_client._http_client import HTTPClient, HTTPClientAsync -class _BaseBaseClient(metaclass=WithLogDetailsClient): +class BaseBaseClient(metaclass=WithLogDetailsClient): resource_id: str | None url: str params: dict @@ -47,7 +46,7 @@ def _sub_resource_init_options(self, **kwargs: Any) -> dict: } -class BaseClient(_BaseBaseClient): +class BaseClient(BaseBaseClient): """Base class for sub-clients.""" http_client: HTTPClient @@ -88,7 +87,7 @@ def __init__( self.url = f'{self.url}/{self.safe_id}' -class BaseClientAsync(_BaseBaseClient): +class BaseClientAsync(BaseBaseClient): """Base class for async sub-clients.""" http_client: HTTPClientAsync diff --git a/src/apify_client/clients/base/resource_client.py b/src/apify_client/_resource_clients/base/resource_client.py similarity index 85% rename from src/apify_client/clients/base/resource_client.py rename to src/apify_client/_resource_clients/base/resource_client.py index ddf30adf..01cbd36c 100644 --- a/src/apify_client/clients/base/resource_client.py +++ b/src/apify_client/_resource_clients/base/resource_client.py @@ -1,7 +1,7 @@ from __future__ import annotations -from apify_client._utils import catch_not_found_or_throw, parse_date_fields, pluck_data -from apify_client.clients.base.base_client import BaseClient, BaseClientAsync +from apify_client._resource_clients.base.base_client import BaseClient, BaseClientAsync +from apify_client._utils import catch_not_found_or_throw, response_to_dict from apify_client.errors import ApifyApiError @@ -16,8 +16,7 @@ def _get(self, timeout_secs: int | None = None) -> dict | None: params=self._params(), timeout_secs=timeout_secs, ) - - return parse_date_fields(pluck_data(response.json())) + return response_to_dict(response) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -33,7 +32,7 @@ def _update(self, updated_fields: dict, timeout_secs: int | None = None) -> dict timeout_secs=timeout_secs, ) - return parse_date_fields(pluck_data(response.json())) + return response_to_dict(response) def _delete(self, timeout_secs: int | None = None) -> None: try: @@ -60,7 +59,7 @@ async def _get(self, timeout_secs: int | None = None) -> dict | None: timeout_secs=timeout_secs, ) - return parse_date_fields(pluck_data(response.json())) + return response_to_dict(response) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -76,7 +75,7 @@ async def _update(self, updated_fields: dict, timeout_secs: int | None = None) - timeout_secs=timeout_secs, ) - return parse_date_fields(pluck_data(response.json())) + return response_to_dict(response) async def _delete(self, timeout_secs: int | None = None) -> None: try: diff --git a/src/apify_client/clients/base/resource_collection_client.py b/src/apify_client/_resource_clients/base/resource_collection_client.py similarity index 52% rename from src/apify_client/clients/base/resource_collection_client.py rename to src/apify_client/_resource_clients/base/resource_collection_client.py index 2e9c6063..a4ce6b45 100644 --- a/src/apify_client/clients/base/resource_collection_client.py +++ b/src/apify_client/_resource_clients/base/resource_collection_client.py @@ -1,42 +1,10 @@ from __future__ import annotations -from typing import Any, Generic, TypeVar +from typing import Any -from apify_client._utils import parse_date_fields, pluck_data -from apify_client.clients.base.base_client import BaseClient, BaseClientAsync - -T = TypeVar('T') - - -class ListPage(Generic[T]): - """A single page of items returned from a list() method.""" - - items: list[T] - """List of returned objects on this page""" - - count: int - """Count of the returned objects on this page""" - - offset: int - """The limit on the number of returned objects offset specified in the API call""" - - limit: int - """The offset of the first object specified in the API call""" - - total: int - """Total number of objects matching the API call criteria""" - - desc: bool - """Whether the listing is descending or not""" - - def __init__(self, data: dict) -> None: - """Initialize a ListPage instance from the API response data.""" - self.items = data.get('items', []) - self.offset = data.get('offset', 0) - self.limit = data.get('limit', 0) - self.count = data['count'] if 'count' in data else len(self.items) - self.total = data['total'] if 'total' in data else self.offset + self.count - self.desc = data.get('desc', False) +from apify_client._resource_clients.base.base_client import BaseClient, BaseClientAsync +from apify_client._types import ListPage +from apify_client._utils import response_to_dict class ResourceCollectionClient(BaseClient): @@ -49,7 +17,8 @@ def _list(self, **kwargs: Any) -> ListPage: params=self._params(**kwargs), ) - return ListPage(parse_date_fields(pluck_data(response.json()))) + data = response_to_dict(response) + return ListPage(data) def _create(self, resource: dict) -> dict: response = self.http_client.call( @@ -59,7 +28,7 @@ def _create(self, resource: dict) -> dict: json=resource, ) - return parse_date_fields(pluck_data(response.json())) + return response_to_dict(response) def _get_or_create(self, name: str | None = None, resource: dict | None = None) -> dict: response = self.http_client.call( @@ -69,7 +38,7 @@ def _get_or_create(self, name: str | None = None, resource: dict | None = None) json=resource, ) - return parse_date_fields(pluck_data(response.json())) + return response_to_dict(response) class ResourceCollectionClientAsync(BaseClientAsync): @@ -82,7 +51,8 @@ async def _list(self, **kwargs: Any) -> ListPage: params=self._params(**kwargs), ) - return ListPage(parse_date_fields(pluck_data(response.json()))) + data = response_to_dict(response) + return ListPage(data) async def _create(self, resource: dict) -> dict: response = await self.http_client.call( @@ -92,7 +62,7 @@ async def _create(self, resource: dict) -> dict: json=resource, ) - return parse_date_fields(pluck_data(response.json())) + return response_to_dict(response) async def _get_or_create( self, @@ -106,4 +76,4 @@ async def _get_or_create( json=resource, ) - return parse_date_fields(pluck_data(response.json())) + return response_to_dict(response) diff --git a/src/apify_client/clients/resource_clients/build.py b/src/apify_client/_resource_clients/build.py similarity index 81% rename from src/apify_client/clients/resource_clients/build.py rename to src/apify_client/_resource_clients/build.py index e4f7d6cf..19b4d46c 100644 --- a/src/apify_client/clients/resource_clients/build.py +++ b/src/apify_client/_resource_clients/build.py @@ -2,8 +2,9 @@ from typing import Any -from apify_client.clients.base import ActorJobBaseClient, ActorJobBaseClientAsync -from apify_client.clients.resource_clients.log import LogClient, LogClientAsync +from apify_client._models import Build +from apify_client._resource_clients.base import ActorJobBaseClient, ActorJobBaseClientAsync +from apify_client._resource_clients.log import LogClient, LogClientAsync class BuildClient(ActorJobBaseClient): @@ -13,7 +14,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'actor-builds') super().__init__(*args, resource_path=resource_path, **kwargs) - def get(self) -> dict | None: + def get(self) -> Build | None: """Return information about the Actor build. https://docs.apify.com/api/v2#/reference/actor-builds/build-object/get-build @@ -21,7 +22,8 @@ def get(self) -> dict | None: Returns: The retrieved Actor build data. """ - return self._get() + result = self._get() + return Build.model_validate(result) if result is not None else None def delete(self) -> None: """Delete the build. @@ -30,7 +32,7 @@ def delete(self) -> None: """ return self._delete() - def abort(self) -> dict: + def abort(self) -> Build: """Abort the Actor build which is starting or currently running and return its details. https://docs.apify.com/api/v2#/reference/actor-builds/abort-build/abort-build @@ -38,7 +40,8 @@ def abort(self) -> dict: Returns: The data of the aborted Actor build. """ - return self._abort() + result = self._abort() + return Build.model_validate(result) def get_open_api_definition(self) -> dict | None: """Return OpenAPI definition of the Actor's build. @@ -57,7 +60,7 @@ def get_open_api_definition(self) -> dict | None: return response_data - def wait_for_finish(self, *, wait_secs: int | None = None) -> dict | None: + def wait_for_finish(self, *, wait_secs: int | None = None) -> Build | None: """Wait synchronously until the build finishes or the server times out. Args: @@ -67,7 +70,8 @@ def wait_for_finish(self, *, wait_secs: int | None = None) -> dict | None: The Actor build data. If the status on the object is not one of the terminal statuses (SUCCEEDED, FAILED, TIMED_OUT, ABORTED), then the build has not yet finished. """ - return self._wait_for_finish(wait_secs=wait_secs) + result = self._wait_for_finish(wait_secs=wait_secs) + return Build.model_validate(result) if result is not None else None def log(self) -> LogClient: """Get the client for the log of the Actor build. @@ -89,7 +93,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'actor-builds') super().__init__(*args, resource_path=resource_path, **kwargs) - async def get(self) -> dict | None: + async def get(self) -> Build | None: """Return information about the Actor build. https://docs.apify.com/api/v2#/reference/actor-builds/build-object/get-build @@ -97,9 +101,10 @@ async def get(self) -> dict | None: Returns: The retrieved Actor build data. """ - return await self._get() + result = await self._get() + return Build.model_validate(result) if result is not None else None - async def abort(self) -> dict: + async def abort(self) -> Build: """Abort the Actor build which is starting or currently running and return its details. https://docs.apify.com/api/v2#/reference/actor-builds/abort-build/abort-build @@ -107,7 +112,8 @@ async def abort(self) -> dict: Returns: The data of the aborted Actor build. """ - return await self._abort() + result = await self._abort() + return Build.model_validate(result) async def delete(self) -> None: """Delete the build. @@ -133,7 +139,7 @@ async def get_open_api_definition(self) -> dict | None: return response_data - async def wait_for_finish(self, *, wait_secs: int | None = None) -> dict | None: + async def wait_for_finish(self, *, wait_secs: int | None = None) -> Build | None: """Wait synchronously until the build finishes or the server times out. Args: @@ -143,7 +149,8 @@ async def wait_for_finish(self, *, wait_secs: int | None = None) -> dict | None: The Actor build data. If the status on the object is not one of the terminal statuses (SUCCEEDED, FAILED, TIMED_OUT, ABORTED), then the build has not yet finished. """ - return await self._wait_for_finish(wait_secs=wait_secs) + result = await self._wait_for_finish(wait_secs=wait_secs) + return Build.model_validate(result) if result is not None else None def log(self) -> LogClientAsync: """Get the client for the log of the Actor build. diff --git a/src/apify_client/clients/resource_clients/build_collection.py b/src/apify_client/_resource_clients/build_collection.py similarity index 90% rename from src/apify_client/clients/resource_clients/build_collection.py rename to src/apify_client/_resource_clients/build_collection.py index 4eada958..2e4d7d97 100644 --- a/src/apify_client/clients/resource_clients/build_collection.py +++ b/src/apify_client/_resource_clients/build_collection.py @@ -2,10 +2,11 @@ from typing import TYPE_CHECKING, Any -from apify_client.clients.base import ResourceCollectionClient, ResourceCollectionClientAsync +from apify_client._resource_clients.base import ResourceCollectionClient, ResourceCollectionClientAsync if TYPE_CHECKING: - from apify_client.clients.base.resource_collection_client import ListPage + from apify_client._models import BuildShort + from apify_client._types import ListPage class BuildCollectionClient(ResourceCollectionClient): @@ -21,7 +22,7 @@ def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[BuildShort]: """List all Actor builds. List all Actor builds, either of a single Actor, or all user's Actors, depending on where this client @@ -54,7 +55,7 @@ async def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[BuildShort]: """List all Actor builds. List all Actor builds, either of a single Actor, or all user's Actors, depending on where this client diff --git a/src/apify_client/clients/resource_clients/dataset.py b/src/apify_client/_resource_clients/dataset.py similarity index 96% rename from src/apify_client/clients/resource_clients/dataset.py rename to src/apify_client/_resource_clients/dataset.py index 87d6aab5..c3c6fcd0 100644 --- a/src/apify_client/clients/resource_clients/dataset.py +++ b/src/apify_client/_resource_clients/dataset.py @@ -7,13 +7,10 @@ from apify_shared.utils import create_storage_content_signature +from apify_client._models import Dataset, GetDatasetStatisticsResponse +from apify_client._resource_clients.base import ResourceClient, ResourceClientAsync from apify_client._types import ListPage -from apify_client._utils import ( - catch_not_found_or_throw, - filter_out_none_values_recursively, - pluck_data, -) -from apify_client.clients.base import ResourceClient, ResourceClientAsync +from apify_client._utils import catch_not_found_or_throw, filter_out_none_values_recursively, response_to_dict from apify_client.errors import ApifyApiError if TYPE_CHECKING: @@ -22,7 +19,7 @@ import impit from apify_shared.consts import StorageGeneralAccess - from apify_client._types import JSONSerializable + from apify_client._types import JsonSerializable _SMALL_TIMEOUT = 5 # For fast and common actions. Suitable for idempotent actions. _MEDIUM_TIMEOUT = 30 # For actions that may take longer. @@ -35,7 +32,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'datasets') super().__init__(*args, resource_path=resource_path, **kwargs) - def get(self) -> dict | None: + def get(self) -> Dataset | None: """Retrieve the dataset. https://docs.apify.com/api/v2#/reference/datasets/dataset/get-dataset @@ -43,9 +40,10 @@ def get(self) -> dict | None: Returns: The retrieved dataset, or None, if it does not exist. """ - return self._get(timeout_secs=_SMALL_TIMEOUT) + result = self._get(timeout_secs=_SMALL_TIMEOUT) + return Dataset.model_validate(result) if result is not None else None - def update(self, *, name: str | None = None, general_access: StorageGeneralAccess | None = None) -> dict: + def update(self, *, name: str | None = None, general_access: StorageGeneralAccess | None = None) -> Dataset: """Update the dataset with specified fields. https://docs.apify.com/api/v2#/reference/datasets/dataset/update-dataset @@ -62,7 +60,8 @@ def update(self, *, name: str | None = None, general_access: StorageGeneralAcces 'generalAccess': general_access, } - return self._update(filter_out_none_values_recursively(updated_fields), timeout_secs=_SMALL_TIMEOUT) + result = self._update(filter_out_none_values_recursively(updated_fields), timeout_secs=_SMALL_TIMEOUT) + return Dataset.model_validate(result) def delete(self) -> None: """Delete the dataset. @@ -143,7 +142,7 @@ def list_items( params=request_params, ) - data = response.json() + data = response_to_dict(response) return ListPage( { @@ -531,7 +530,7 @@ def stream_items( if response: response.close() - def push_items(self, items: JSONSerializable) -> None: + def push_items(self, items: JsonSerializable) -> None: """Push items to the dataset. https://docs.apify.com/api/v2#/reference/datasets/item-collection/put-items @@ -558,7 +557,7 @@ def push_items(self, items: JSONSerializable) -> None: timeout_secs=_MEDIUM_TIMEOUT, ) - def get_statistics(self) -> dict | None: + def get_statistics(self) -> GetDatasetStatisticsResponse | None: """Get the dataset statistics. https://docs.apify.com/api/v2#tag/DatasetsStatistics/operation/dataset_statistics_get @@ -573,7 +572,8 @@ def get_statistics(self) -> dict | None: params=self._params(), timeout_secs=_SMALL_TIMEOUT, ) - return pluck_data(response.json()) + result = response.json() + return GetDatasetStatisticsResponse.model_validate(result) if result is not None else None except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -625,10 +625,10 @@ def create_items_public_url( view=view, ) - if dataset and 'urlSigningSecretKey' in dataset: + if dataset and dataset.url_signing_secret_key: signature = create_storage_content_signature( - resource_id=dataset['id'], - url_signing_secret_key=dataset['urlSigningSecretKey'], + resource_id=dataset.id, + url_signing_secret_key=dataset.url_signing_secret_key, expires_in_millis=expires_in_secs * 1000 if expires_in_secs is not None else None, ) request_params['signature'] = signature @@ -648,7 +648,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'datasets') super().__init__(*args, resource_path=resource_path, **kwargs) - async def get(self) -> dict | None: + async def get(self) -> Dataset | None: """Retrieve the dataset. https://docs.apify.com/api/v2#/reference/datasets/dataset/get-dataset @@ -656,9 +656,10 @@ async def get(self) -> dict | None: Returns: The retrieved dataset, or None, if it does not exist. """ - return await self._get(timeout_secs=_SMALL_TIMEOUT) + result = await self._get(timeout_secs=_SMALL_TIMEOUT) + return Dataset.model_validate(result) if result is not None else None - async def update(self, *, name: str | None = None, general_access: StorageGeneralAccess | None = None) -> dict: + async def update(self, *, name: str | None = None, general_access: StorageGeneralAccess | None = None) -> Dataset: """Update the dataset with specified fields. https://docs.apify.com/api/v2#/reference/datasets/dataset/update-dataset @@ -675,7 +676,8 @@ async def update(self, *, name: str | None = None, general_access: StorageGenera 'generalAccess': general_access, } - return await self._update(filter_out_none_values_recursively(updated_fields), timeout_secs=_SMALL_TIMEOUT) + result = await self._update(filter_out_none_values_recursively(updated_fields), timeout_secs=_SMALL_TIMEOUT) + return Dataset.model_validate(result) async def delete(self) -> None: """Delete the dataset. @@ -756,7 +758,7 @@ async def list_items( params=request_params, ) - data = response.json() + data = response_to_dict(response) return ListPage( { @@ -1050,7 +1052,7 @@ async def stream_items( if response: await response.aclose() - async def push_items(self, items: JSONSerializable) -> None: + async def push_items(self, items: JsonSerializable) -> None: """Push items to the dataset. https://docs.apify.com/api/v2#/reference/datasets/item-collection/put-items @@ -1077,7 +1079,7 @@ async def push_items(self, items: JSONSerializable) -> None: timeout_secs=_MEDIUM_TIMEOUT, ) - async def get_statistics(self) -> dict | None: + async def get_statistics(self) -> GetDatasetStatisticsResponse | None: """Get the dataset statistics. https://docs.apify.com/api/v2#tag/DatasetsStatistics/operation/dataset_statistics_get @@ -1092,7 +1094,8 @@ async def get_statistics(self) -> dict | None: params=self._params(), timeout_secs=_SMALL_TIMEOUT, ) - return pluck_data(response.json()) + result = response.json() + return GetDatasetStatisticsResponse.model_validate(result) if result is not None else None except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -1144,10 +1147,10 @@ async def create_items_public_url( view=view, ) - if dataset and 'urlSigningSecretKey' in dataset: + if dataset and dataset.url_signing_secret_key: signature = create_storage_content_signature( - resource_id=dataset['id'], - url_signing_secret_key=dataset['urlSigningSecretKey'], + resource_id=dataset.id, + url_signing_secret_key=dataset.url_signing_secret_key, expires_in_millis=expires_in_secs * 1000 if expires_in_secs is not None else None, ) request_params['signature'] = signature diff --git a/src/apify_client/clients/resource_clients/dataset_collection.py b/src/apify_client/_resource_clients/dataset_collection.py similarity index 83% rename from src/apify_client/clients/resource_clients/dataset_collection.py rename to src/apify_client/_resource_clients/dataset_collection.py index 602497ce..7b10486a 100644 --- a/src/apify_client/clients/resource_clients/dataset_collection.py +++ b/src/apify_client/_resource_clients/dataset_collection.py @@ -2,11 +2,12 @@ from typing import TYPE_CHECKING, Any +from apify_client._models import Dataset, DatasetListItem +from apify_client._resource_clients.base import ResourceCollectionClient, ResourceCollectionClientAsync from apify_client._utils import filter_out_none_values_recursively -from apify_client.clients.base import ResourceCollectionClient, ResourceCollectionClientAsync if TYPE_CHECKING: - from apify_client.clients.base.resource_collection_client import ListPage + from apify_client._types import ListPage class DatasetCollectionClient(ResourceCollectionClient): @@ -23,7 +24,7 @@ def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[DatasetListItem]: """List the available datasets. https://docs.apify.com/api/v2#/reference/datasets/dataset-collection/get-list-of-datasets @@ -39,7 +40,7 @@ def list( """ return self._list(unnamed=unnamed, limit=limit, offset=offset, desc=desc) - def get_or_create(self, *, name: str | None = None, schema: dict | None = None) -> dict: + def get_or_create(self, *, name: str | None = None, schema: dict | None = None) -> Dataset: """Retrieve a named dataset, or create a new one when it doesn't exist. https://docs.apify.com/api/v2#/reference/datasets/dataset-collection/create-dataset @@ -51,7 +52,8 @@ def get_or_create(self, *, name: str | None = None, schema: dict | None = None) Returns: The retrieved or newly-created dataset. """ - return self._get_or_create(name=name, resource=filter_out_none_values_recursively({'schema': schema})) + result = self._get_or_create(name=name, resource=filter_out_none_values_recursively({'schema': schema})) + return Dataset.model_validate(result) class DatasetCollectionClientAsync(ResourceCollectionClientAsync): @@ -68,7 +70,7 @@ async def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[DatasetListItem]: """List the available datasets. https://docs.apify.com/api/v2#/reference/datasets/dataset-collection/get-list-of-datasets @@ -89,7 +91,7 @@ async def get_or_create( *, name: str | None = None, schema: dict | None = None, - ) -> dict: + ) -> Dataset: """Retrieve a named dataset, or create a new one when it doesn't exist. https://docs.apify.com/api/v2#/reference/datasets/dataset-collection/create-dataset @@ -101,4 +103,5 @@ async def get_or_create( Returns: The retrieved or newly-created dataset. """ - return await self._get_or_create(name=name, resource=filter_out_none_values_recursively({'schema': schema})) + result = await self._get_or_create(name=name, resource=filter_out_none_values_recursively({'schema': schema})) + return Dataset.model_validate(result) diff --git a/src/apify_client/clients/resource_clients/key_value_store.py b/src/apify_client/_resource_clients/key_value_store.py similarity index 92% rename from src/apify_client/clients/resource_clients/key_value_store.py rename to src/apify_client/_resource_clients/key_value_store.py index 46d415f1..40812462 100644 --- a/src/apify_client/clients/resource_clients/key_value_store.py +++ b/src/apify_client/_resource_clients/key_value_store.py @@ -7,15 +7,14 @@ from apify_shared.utils import create_hmac_signature, create_storage_content_signature +from apify_client._models import KeyValueStore, ListOfKeysResponse +from apify_client._resource_clients.base import ResourceClient, ResourceClientAsync from apify_client._utils import ( catch_not_found_or_throw, encode_key_value_store_record_value, filter_out_none_values_recursively, maybe_parse_response, - parse_date_fields, - pluck_data, ) -from apify_client.clients.base import ResourceClient, ResourceClientAsync from apify_client.errors import ApifyApiError if TYPE_CHECKING: @@ -34,7 +33,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'key-value-stores') super().__init__(*args, resource_path=resource_path, **kwargs) - def get(self) -> dict | None: + def get(self) -> KeyValueStore | None: """Retrieve the key-value store. https://docs.apify.com/api/v2#/reference/key-value-stores/store-object/get-store @@ -42,9 +41,10 @@ def get(self) -> dict | None: Returns: The retrieved key-value store, or None if it does not exist. """ - return self._get(timeout_secs=_SMALL_TIMEOUT) + result = self._get(timeout_secs=_SMALL_TIMEOUT) + return KeyValueStore.model_validate(result) if result is not None else None - def update(self, *, name: str | None = None, general_access: StorageGeneralAccess | None = None) -> dict: + def update(self, *, name: str | None = None, general_access: StorageGeneralAccess | None = None) -> KeyValueStore: """Update the key-value store with specified fields. https://docs.apify.com/api/v2#/reference/key-value-stores/store-object/update-store @@ -61,7 +61,8 @@ def update(self, *, name: str | None = None, general_access: StorageGeneralAcces 'generalAccess': general_access, } - return self._update(filter_out_none_values_recursively(updated_fields)) + result = self._update(filter_out_none_values_recursively(updated_fields)) + return KeyValueStore.model_validate(result) def delete(self) -> None: """Delete the key-value store. @@ -78,7 +79,7 @@ def list_keys( collection: str | None = None, prefix: str | None = None, signature: str | None = None, - ) -> dict: + ) -> ListOfKeysResponse: """List the keys in the key-value store. https://docs.apify.com/api/v2#/reference/key-value-stores/key-collection/get-list-of-keys @@ -108,7 +109,8 @@ def list_keys( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return ListOfKeysResponse.model_validate(result) def get_record(self, key: str, signature: str | None = None) -> dict | None: """Retrieve the given record from the key-value store. @@ -291,8 +293,8 @@ def get_record_public_url(self, key: str) -> str: request_params = self._params() - if metadata and 'urlSigningSecretKey' in metadata: - request_params['signature'] = create_hmac_signature(metadata['urlSigningSecretKey'], key) + if metadata and metadata.url_signing_secret_key: + request_params['signature'] = create_hmac_signature(metadata.url_signing_secret_key, key) key_public_url = urlparse(self._url(f'records/{key}', public=True)) filtered_params = {k: v for k, v in request_params.items() if v is not None} @@ -334,10 +336,10 @@ def create_keys_public_url( prefix=prefix, ) - if metadata and 'urlSigningSecretKey' in metadata: + if metadata and metadata.url_signing_secret_key: signature = create_storage_content_signature( - resource_id=metadata['id'], - url_signing_secret_key=metadata['urlSigningSecretKey'], + resource_id=metadata.id, + url_signing_secret_key=metadata.url_signing_secret_key, expires_in_millis=expires_in_secs * 1000 if expires_in_secs is not None else None, ) request_params['signature'] = signature @@ -358,7 +360,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'key-value-stores') super().__init__(*args, resource_path=resource_path, **kwargs) - async def get(self) -> dict | None: + async def get(self) -> KeyValueStore | None: """Retrieve the key-value store. https://docs.apify.com/api/v2#/reference/key-value-stores/store-object/get-store @@ -366,9 +368,15 @@ async def get(self) -> dict | None: Returns: The retrieved key-value store, or None if it does not exist. """ - return await self._get(timeout_secs=_SMALL_TIMEOUT) + result = await self._get(timeout_secs=_SMALL_TIMEOUT) + return KeyValueStore.model_validate(result) if result is not None else None - async def update(self, *, name: str | None = None, general_access: StorageGeneralAccess | None = None) -> dict: + async def update( + self, + *, + name: str | None = None, + general_access: StorageGeneralAccess | None = None, + ) -> KeyValueStore: """Update the key-value store with specified fields. https://docs.apify.com/api/v2#/reference/key-value-stores/store-object/update-store @@ -385,7 +393,8 @@ async def update(self, *, name: str | None = None, general_access: StorageGenera 'generalAccess': general_access, } - return await self._update(filter_out_none_values_recursively(updated_fields)) + result = await self._update(filter_out_none_values_recursively(updated_fields)) + return KeyValueStore.model_validate(result) async def delete(self) -> None: """Delete the key-value store. @@ -402,7 +411,7 @@ async def list_keys( collection: str | None = None, prefix: str | None = None, signature: str | None = None, - ) -> dict: + ) -> ListOfKeysResponse: """List the keys in the key-value store. https://docs.apify.com/api/v2#/reference/key-value-stores/key-collection/get-list-of-keys @@ -432,7 +441,8 @@ async def list_keys( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return ListOfKeysResponse.model_validate(result) async def get_record(self, key: str, signature: str | None = None) -> dict | None: """Retrieve the given record from the key-value store. @@ -615,8 +625,8 @@ async def get_record_public_url(self, key: str) -> str: request_params = self._params() - if metadata and 'urlSigningSecretKey' in metadata: - request_params['signature'] = create_hmac_signature(metadata['urlSigningSecretKey'], key) + if metadata and metadata.url_signing_secret_key: + request_params['signature'] = create_hmac_signature(metadata.url_signing_secret_key, key) key_public_url = urlparse(self._url(f'records/{key}', public=True)) filtered_params = {k: v for k, v in request_params.items() if v is not None} @@ -660,10 +670,10 @@ async def create_keys_public_url( prefix=prefix, ) - if metadata and 'urlSigningSecretKey' in metadata: + if metadata and metadata.url_signing_secret_key: signature = create_storage_content_signature( - resource_id=metadata['id'], - url_signing_secret_key=metadata['urlSigningSecretKey'], + resource_id=metadata.id, + url_signing_secret_key=metadata.url_signing_secret_key, expires_in_millis=expires_in_secs * 1000 if expires_in_secs is not None else None, ) request_params['signature'] = signature diff --git a/src/apify_client/clients/resource_clients/key_value_store_collection.py b/src/apify_client/_resource_clients/key_value_store_collection.py similarity index 85% rename from src/apify_client/clients/resource_clients/key_value_store_collection.py rename to src/apify_client/_resource_clients/key_value_store_collection.py index 8af38903..98cecd92 100644 --- a/src/apify_client/clients/resource_clients/key_value_store_collection.py +++ b/src/apify_client/_resource_clients/key_value_store_collection.py @@ -2,11 +2,12 @@ from typing import TYPE_CHECKING, Any +from apify_client._models import KeyValueStore +from apify_client._resource_clients.base import ResourceCollectionClient, ResourceCollectionClientAsync from apify_client._utils import filter_out_none_values_recursively -from apify_client.clients.base import ResourceCollectionClient, ResourceCollectionClientAsync if TYPE_CHECKING: - from apify_client.clients.base.resource_collection_client import ListPage + from apify_client._types import ListPage class KeyValueStoreCollectionClient(ResourceCollectionClient): @@ -23,7 +24,7 @@ def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[KeyValueStore]: """List the available key-value stores. https://docs.apify.com/api/v2#/reference/key-value-stores/store-collection/get-list-of-key-value-stores @@ -44,7 +45,7 @@ def get_or_create( *, name: str | None = None, schema: dict | None = None, - ) -> dict: + ) -> KeyValueStore: """Retrieve a named key-value store, or create a new one when it doesn't exist. https://docs.apify.com/api/v2#/reference/key-value-stores/store-collection/create-key-value-store @@ -56,7 +57,8 @@ def get_or_create( Returns: The retrieved or newly-created key-value store. """ - return self._get_or_create(name=name, resource=filter_out_none_values_recursively({'schema': schema})) + result = self._get_or_create(name=name, resource=filter_out_none_values_recursively({'schema': schema})) + return KeyValueStore.model_validate(result) class KeyValueStoreCollectionClientAsync(ResourceCollectionClientAsync): @@ -73,7 +75,7 @@ async def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[KeyValueStore]: """List the available key-value stores. https://docs.apify.com/api/v2#/reference/key-value-stores/store-collection/get-list-of-key-value-stores @@ -94,7 +96,7 @@ async def get_or_create( *, name: str | None = None, schema: dict | None = None, - ) -> dict: + ) -> KeyValueStore: """Retrieve a named key-value store, or create a new one when it doesn't exist. https://docs.apify.com/api/v2#/reference/key-value-stores/store-collection/create-key-value-store @@ -106,4 +108,5 @@ async def get_or_create( Returns: The retrieved or newly-created key-value store. """ - return await self._get_or_create(name=name, resource=filter_out_none_values_recursively({'schema': schema})) + result = await self._get_or_create(name=name, resource=filter_out_none_values_recursively({'schema': schema})) + return KeyValueStore.model_validate(result) diff --git a/src/apify_client/clients/resource_clients/log.py b/src/apify_client/_resource_clients/log.py similarity index 97% rename from src/apify_client/clients/resource_clients/log.py rename to src/apify_client/_resource_clients/log.py index d333fb6e..ea03541b 100644 --- a/src/apify_client/clients/resource_clients/log.py +++ b/src/apify_client/_resource_clients/log.py @@ -11,8 +11,8 @@ from threading import Thread from typing import TYPE_CHECKING, Any, cast +from apify_client._resource_clients.base import ResourceClient, ResourceClientAsync from apify_client._utils import catch_not_found_or_throw -from apify_client.clients.base import ResourceClient, ResourceClientAsync from apify_client.errors import ApifyApiError if TYPE_CHECKING: @@ -22,7 +22,8 @@ import impit from typing_extensions import Self - from apify_client.clients import RunClient, RunClientAsync + from apify_client._models import Run + from apify_client._resource_clients import RunClient, RunClientAsync class LogClient(ResourceClient): @@ -404,25 +405,25 @@ def __init__(self, *, to_logger: logging.Logger, check_period: timedelta = timed self._check_period = check_period.total_seconds() self._last_status_message = '' - def _log_run_data(self, run_data: dict[str, Any] | None) -> bool: + def _log_run_data(self, run_data: Run | None) -> bool: """Get relevant run data, log them if changed and return `True` if more data is expected. Args: - run_data: The dictionary that contains the run data. + run_data: The Run model that contains the run data. Returns: `True` if more data is expected, `False` otherwise. """ if run_data is not None: - status = run_data.get('status', 'Unknown status') - status_message = run_data.get('statusMessage', '') + status = run_data.status if run_data.status else 'Unknown status' + status_message = run_data.status_message or '' new_status_message = f'Status: {status}, Message: {status_message}' if new_status_message != self._last_status_message: self._last_status_message = new_status_message self._to_logger.info(new_status_message) - return not (run_data.get('isStatusMessageTerminal', False)) + return not (run_data.is_status_message_terminal or False) return True diff --git a/src/apify_client/clients/resource_clients/request_queue.py b/src/apify_client/_resource_clients/request_queue.py similarity index 82% rename from src/apify_client/clients/resource_clients/request_queue.py rename to src/apify_client/_resource_clients/request_queue.py index c3ee1bf6..8f9e8358 100644 --- a/src/apify_client/clients/resource_clients/request_queue.py +++ b/src/apify_client/_resource_clients/request_queue.py @@ -5,17 +5,25 @@ import math from collections.abc import Iterable from queue import Queue -from typing import TYPE_CHECKING, Any, TypedDict +from typing import TYPE_CHECKING, Any from more_itertools import constrained_batches -from apify_client._utils import ( - catch_not_found_or_throw, - filter_out_none_values_recursively, - parse_date_fields, - pluck_data, +from apify_client._models import ( + BatchOperationResponse, + Data12, + GetHeadAndLockResponse, + GetHeadResponse, + ListRequestsResponse, + ProcessedRequest, + ProlongRequestLockResponse, + RequestOperationInfo, + RequestQueue, + RequestQueueItems, + UnprocessedRequest, ) -from apify_client.clients.base import ResourceClient, ResourceClientAsync +from apify_client._resource_clients.base import ResourceClient, ResourceClientAsync +from apify_client._utils import catch_not_found_or_throw, filter_out_none_values_recursively from apify_client.errors import ApifyApiError if TYPE_CHECKING: @@ -33,18 +41,6 @@ _MEDIUM_TIMEOUT = 30 # For actions that may take longer. -class BatchAddRequestsResult(TypedDict): - """Result of the batch add requests operation. - - Args: - processedRequests: List of successfully added requests. - unprocessedRequests: List of requests that failed to be added. - """ - - processedRequests: list[dict] - unprocessedRequests: list[dict] - - class RequestQueueClient(ResourceClient): """Sub-client for manipulating a single request queue.""" @@ -63,7 +59,7 @@ def __init__( # noqa: D417 super().__init__(*args, resource_path=resource_path, **kwargs) self.client_key = client_key - def get(self) -> dict | None: + def get(self) -> RequestQueue | None: """Retrieve the request queue. https://docs.apify.com/api/v2#/reference/request-queues/queue/get-request-queue @@ -71,9 +67,10 @@ def get(self) -> dict | None: Returns: The retrieved request queue, or None, if it does not exist. """ - return self._get(timeout_secs=_SMALL_TIMEOUT) + result = self._get(timeout_secs=_SMALL_TIMEOUT) + return RequestQueue.model_validate(result) if result is not None else None - def update(self, *, name: str | None = None, general_access: StorageGeneralAccess | None = None) -> dict: + def update(self, *, name: str | None = None, general_access: StorageGeneralAccess | None = None) -> RequestQueue: """Update the request queue with specified fields. https://docs.apify.com/api/v2#/reference/request-queues/queue/update-request-queue @@ -90,7 +87,8 @@ def update(self, *, name: str | None = None, general_access: StorageGeneralAcces 'generalAccess': general_access, } - return self._update(filter_out_none_values_recursively(updated_fields), timeout_secs=_SMALL_TIMEOUT) + result = self._update(filter_out_none_values_recursively(updated_fields), timeout_secs=_SMALL_TIMEOUT) + return RequestQueue.model_validate(result) def delete(self) -> None: """Delete the request queue. @@ -99,7 +97,7 @@ def delete(self) -> None: """ return self._delete(timeout_secs=_SMALL_TIMEOUT) - def list_head(self, *, limit: int | None = None) -> dict: + def list_head(self, *, limit: int | None = None) -> GetHeadResponse: """Retrieve a given number of requests from the beginning of the queue. https://docs.apify.com/api/v2#/reference/request-queues/queue-head/get-head @@ -119,9 +117,10 @@ def list_head(self, *, limit: int | None = None) -> dict: timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return GetHeadResponse.model_validate(result) - def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) -> dict: + def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) -> GetHeadAndLockResponse: """Retrieve a given number of unlocked requests from the beginning of the queue and lock them for a given time. https://docs.apify.com/api/v2#/reference/request-queues/queue-head-with-locks/get-head-and-lock @@ -142,9 +141,10 @@ def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) -> dic timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return GetHeadAndLockResponse.model_validate(result) - def add_request(self, request: dict, *, forefront: bool | None = None) -> dict: + def add_request(self, request: dict, *, forefront: bool | None = None) -> RequestOperationInfo: """Add a request to the queue. https://docs.apify.com/api/v2#/reference/request-queues/request-collection/add-request @@ -166,9 +166,10 @@ def add_request(self, request: dict, *, forefront: bool | None = None) -> dict: timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return RequestOperationInfo.model_validate(result) - def get_request(self, request_id: str) -> dict | None: + def get_request(self, request_id: str) -> RequestQueueItems | None: """Retrieve a request from the queue. https://docs.apify.com/api/v2#/reference/request-queues/request/get-request @@ -186,14 +187,15 @@ def get_request(self, request_id: str) -> dict | None: params=self._params(), timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return RequestQueueItems.model_validate(result) except ApifyApiError as exc: catch_not_found_or_throw(exc) return None - def update_request(self, request: dict, *, forefront: bool | None = None) -> dict: + def update_request(self, request: dict, *, forefront: bool | None = None) -> RequestOperationInfo: """Update a request in the queue. https://docs.apify.com/api/v2#/reference/request-queues/request/update-request @@ -217,7 +219,8 @@ def update_request(self, request: dict, *, forefront: bool | None = None) -> dic timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return RequestOperationInfo.model_validate(result) def delete_request(self, request_id: str) -> None: """Delete a request from the queue. @@ -244,7 +247,7 @@ def prolong_request_lock( *, forefront: bool | None = None, lock_secs: int, - ) -> dict: + ) -> ProlongRequestLockResponse: """Prolong the lock on a request. https://docs.apify.com/api/v2#/reference/request-queues/request-lock/prolong-request-lock @@ -263,7 +266,8 @@ def prolong_request_lock( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return ProlongRequestLockResponse.model_validate(result) def delete_request_lock(self, request_id: str, *, forefront: bool | None = None) -> None: """Delete the lock on a request. @@ -291,7 +295,7 @@ def batch_add_requests( max_parallel: int = 1, max_unprocessed_requests_retries: int | None = None, min_delay_between_unprocessed_requests_retries: timedelta | None = None, - ) -> BatchAddRequestsResult: + ) -> BatchOperationResponse: """Add requests to the request queue in batches. Requests are split into batches based on size and processed in parallel. @@ -336,8 +340,8 @@ def batch_add_requests( for batch in batches: queue.put(batch) - processed_requests = list[dict]() - unprocessed_requests = list[dict]() + processed_requests = list[ProcessedRequest]() + unprocessed_requests = list[UnprocessedRequest]() # Process all batches in the queue sequentially. while not queue.empty(): @@ -352,16 +356,18 @@ def batch_add_requests( timeout_secs=_MEDIUM_TIMEOUT, ) - response_parsed = parse_date_fields(pluck_data(response.json())) + response_parsed = response.json() processed_requests.extend(response_parsed.get('processedRequests', [])) unprocessed_requests.extend(response_parsed.get('unprocessedRequests', [])) - return { - 'processedRequests': processed_requests, - 'unprocessedRequests': unprocessed_requests, - } + return BatchOperationResponse( + data=Data12( + processed_requests=processed_requests, + unprocessed_requests=unprocessed_requests, + ) + ) - def batch_delete_requests(self, requests: list[dict]) -> dict: + def batch_delete_requests(self, requests: list[dict]) -> BatchOperationResponse: """Delete given requests from the queue. https://docs.apify.com/api/v2#/reference/request-queues/batch-request-operations/delete-requests @@ -379,14 +385,15 @@ def batch_delete_requests(self, requests: list[dict]) -> dict: timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return BatchOperationResponse.model_validate(result) def list_requests( self, *, limit: int | None = None, exclusive_start_id: str | None = None, - ) -> dict: + ) -> ListRequestsResponse: """List requests in the queue. https://docs.apify.com/api/v2#/reference/request-queues/request-collection/list-requests @@ -404,15 +411,16 @@ def list_requests( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return ListRequestsResponse.model_validate(result) - def unlock_requests(self: RequestQueueClient) -> dict: + def unlock_requests(self: RequestQueueClient) -> BatchOperationResponse: """Unlock all requests in the queue, which were locked by the same clientKey or from the same Actor run. https://docs.apify.com/api/v2#/reference/request-queues/request-collection/unlock-requests Returns: - dict: Result of the unlock operation + Result of the unlock operation """ request_params = self._params(clientKey=self.client_key) @@ -422,7 +430,8 @@ def unlock_requests(self: RequestQueueClient) -> dict: params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return BatchOperationResponse.model_validate(result) class RequestQueueClientAsync(ResourceClientAsync): @@ -443,7 +452,7 @@ def __init__( # noqa: D417 super().__init__(*args, resource_path=resource_path, **kwargs) self.client_key = client_key - async def get(self) -> dict | None: + async def get(self) -> RequestQueue | None: """Retrieve the request queue. https://docs.apify.com/api/v2#/reference/request-queues/queue/get-request-queue @@ -451,9 +460,15 @@ async def get(self) -> dict | None: Returns: The retrieved request queue, or None, if it does not exist. """ - return await self._get(timeout_secs=_SMALL_TIMEOUT) + result = await self._get(timeout_secs=_SMALL_TIMEOUT) + return RequestQueue.model_validate(result) if result is not None else None - async def update(self, *, name: str | None = None, general_access: StorageGeneralAccess | None = None) -> dict: + async def update( + self, + *, + name: str | None = None, + general_access: StorageGeneralAccess | None = None, + ) -> RequestQueue: """Update the request queue with specified fields. https://docs.apify.com/api/v2#/reference/request-queues/queue/update-request-queue @@ -470,7 +485,8 @@ async def update(self, *, name: str | None = None, general_access: StorageGenera 'generalAccess': general_access, } - return await self._update(filter_out_none_values_recursively(updated_fields), timeout_secs=_SMALL_TIMEOUT) + result = await self._update(filter_out_none_values_recursively(updated_fields), timeout_secs=_SMALL_TIMEOUT) + return RequestQueue.model_validate(result) async def delete(self) -> None: """Delete the request queue. @@ -479,7 +495,7 @@ async def delete(self) -> None: """ return await self._delete(timeout_secs=_SMALL_TIMEOUT) - async def list_head(self, *, limit: int | None = None) -> dict: + async def list_head(self, *, limit: int | None = None) -> GetHeadResponse: """Retrieve a given number of requests from the beginning of the queue. https://docs.apify.com/api/v2#/reference/request-queues/queue-head/get-head @@ -499,9 +515,10 @@ async def list_head(self, *, limit: int | None = None) -> dict: timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return GetHeadResponse.model_validate(result) - async def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) -> dict: + async def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) -> GetHeadAndLockResponse: """Retrieve a given number of unlocked requests from the beginning of the queue and lock them for a given time. https://docs.apify.com/api/v2#/reference/request-queues/queue-head-with-locks/get-head-and-lock @@ -522,9 +539,10 @@ async def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return GetHeadAndLockResponse.model_validate(result) - async def add_request(self, request: dict, *, forefront: bool | None = None) -> dict: + async def add_request(self, request: dict, *, forefront: bool | None = None) -> RequestOperationInfo: """Add a request to the queue. https://docs.apify.com/api/v2#/reference/request-queues/request-collection/add-request @@ -546,9 +564,10 @@ async def add_request(self, request: dict, *, forefront: bool | None = None) -> timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return RequestOperationInfo.model_validate(result) - async def get_request(self, request_id: str) -> dict | None: + async def get_request(self, request_id: str) -> RequestQueueItems | None: """Retrieve a request from the queue. https://docs.apify.com/api/v2#/reference/request-queues/request/get-request @@ -566,14 +585,15 @@ async def get_request(self, request_id: str) -> dict | None: params=self._params(), timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return RequestQueueItems.model_validate(result) if result is not None else None except ApifyApiError as exc: catch_not_found_or_throw(exc) return None - async def update_request(self, request: dict, *, forefront: bool | None = None) -> dict: + async def update_request(self, request: dict, *, forefront: bool | None = None) -> RequestOperationInfo: """Update a request in the queue. https://docs.apify.com/api/v2#/reference/request-queues/request/update-request @@ -597,7 +617,8 @@ async def update_request(self, request: dict, *, forefront: bool | None = None) timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return RequestOperationInfo.model_validate(result) async def delete_request(self, request_id: str) -> None: """Delete a request from the queue. @@ -622,7 +643,7 @@ async def prolong_request_lock( *, forefront: bool | None = None, lock_secs: int, - ) -> dict: + ) -> ProlongRequestLockResponse: """Prolong the lock on a request. https://docs.apify.com/api/v2#/reference/request-queues/request-lock/prolong-request-lock @@ -641,7 +662,8 @@ async def prolong_request_lock( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return ProlongRequestLockResponse.model_validate(result) async def delete_request_lock( self, @@ -670,15 +692,15 @@ async def _batch_add_requests_worker( self, queue: asyncio.Queue[Iterable[dict]], request_params: dict, - ) -> BatchAddRequestsResult: + ) -> BatchOperationResponse: """Worker function to process a batch of requests. This worker will process batches from the queue. Return result containing lists of processed and unprocessed requests by the worker. """ - processed_requests = list[dict]() - unprocessed_requests = list[dict]() + processed_requests = list[ProcessedRequest]() + unprocessed_requests = list[UnprocessedRequest]() while True: # Get the next batch from the queue. @@ -697,7 +719,7 @@ async def _batch_add_requests_worker( timeout_secs=_MEDIUM_TIMEOUT, ) - response_parsed = parse_date_fields(pluck_data(response.json())) + response_parsed = response.json() processed_requests.extend(response_parsed.get('processedRequests', [])) unprocessed_requests.extend(response_parsed.get('unprocessedRequests', [])) @@ -705,10 +727,12 @@ async def _batch_add_requests_worker( # Mark the batch as done whether it succeeded or failed. queue.task_done() - return { - 'processedRequests': processed_requests, - 'unprocessedRequests': unprocessed_requests, - } + return BatchOperationResponse( + data=Data12( + processed_requests=processed_requests, + unprocessed_requests=unprocessed_requests, + ) + ) async def batch_add_requests( self, @@ -718,7 +742,7 @@ async def batch_add_requests( max_parallel: int = 5, max_unprocessed_requests_retries: int | None = None, min_delay_between_unprocessed_requests_retries: timedelta | None = None, - ) -> BatchAddRequestsResult: + ) -> BatchOperationResponse: """Add requests to the request queue in batches. Requests are split into batches based on size and processed in parallel. @@ -743,7 +767,7 @@ async def batch_add_requests( logger.warning('`min_delay_between_unprocessed_requests_retries` is deprecated and not used anymore.') tasks = set[asyncio.Task]() - queue: asyncio.Queue[Iterable[dict]] = asyncio.Queue() + asyncio_queue: asyncio.Queue[Iterable[dict]] = asyncio.Queue() request_params = self._params(clientKey=self.client_key, forefront=forefront) # Compute the payload size limit to ensure it doesn't exceed the maximum allowed size. @@ -757,40 +781,42 @@ async def batch_add_requests( ) for batch in batches: - await queue.put(batch) + await asyncio_queue.put(batch) # Start a required number of worker tasks to process the batches. for i in range(max_parallel): coro = self._batch_add_requests_worker( - queue, + asyncio_queue, request_params, ) task = asyncio.create_task(coro, name=f'batch_add_requests_worker_{i}') tasks.add(task) # Wait for all batches to be processed. - await queue.join() + await asyncio_queue.join() # Send cancellation signals to all worker tasks and wait for them to finish. for task in tasks: task.cancel() - results: list[BatchAddRequestsResult] = await asyncio.gather(*tasks) + results: list[BatchOperationResponse] = await asyncio.gather(*tasks) # Combine the results from all workers and return them. - processed_requests = [] - unprocessed_requests = [] + processed_requests = list[ProcessedRequest]() + unprocessed_requests = list[UnprocessedRequest]() for result in results: - processed_requests.extend(result['processedRequests']) - unprocessed_requests.extend(result['unprocessedRequests']) + processed_requests.extend(result.data.processed_requests) + unprocessed_requests.extend(result.data.unprocessed_requests) - return { - 'processedRequests': processed_requests, - 'unprocessedRequests': unprocessed_requests, - } + return BatchOperationResponse( + data=Data12( + processed_requests=processed_requests, + unprocessed_requests=unprocessed_requests, + ) + ) - async def batch_delete_requests(self, requests: list[dict]) -> dict: + async def batch_delete_requests(self, requests: list[dict]) -> BatchOperationResponse: """Delete given requests from the queue. https://docs.apify.com/api/v2#/reference/request-queues/batch-request-operations/delete-requests @@ -807,14 +833,15 @@ async def batch_delete_requests(self, requests: list[dict]) -> dict: json=requests, timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return BatchOperationResponse.model_validate(result) async def list_requests( self, *, limit: int | None = None, exclusive_start_id: str | None = None, - ) -> dict: + ) -> ListRequestsResponse: """List requests in the queue. https://docs.apify.com/api/v2#/reference/request-queues/request-collection/list-requests @@ -832,15 +859,16 @@ async def list_requests( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return ListRequestsResponse.model_validate(result) - async def unlock_requests(self: RequestQueueClientAsync) -> dict: + async def unlock_requests(self: RequestQueueClientAsync) -> BatchOperationResponse: """Unlock all requests in the queue, which were locked by the same clientKey or from the same Actor run. https://docs.apify.com/api/v2#/reference/request-queues/request-collection/unlock-requests Returns: - dict: Result of the unlock operation + Result of the unlock operation """ request_params = self._params(clientKey=self.client_key) @@ -850,4 +878,5 @@ async def unlock_requests(self: RequestQueueClientAsync) -> dict: params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return BatchOperationResponse.model_validate(result) diff --git a/src/apify_client/clients/resource_clients/request_queue_collection.py b/src/apify_client/_resource_clients/request_queue_collection.py similarity index 84% rename from src/apify_client/clients/resource_clients/request_queue_collection.py rename to src/apify_client/_resource_clients/request_queue_collection.py index f2ee80bb..6235a246 100644 --- a/src/apify_client/clients/resource_clients/request_queue_collection.py +++ b/src/apify_client/_resource_clients/request_queue_collection.py @@ -2,10 +2,11 @@ from typing import TYPE_CHECKING, Any -from apify_client.clients.base import ResourceCollectionClient, ResourceCollectionClientAsync +from apify_client._models import RequestQueue, RequestQueueShort +from apify_client._resource_clients.base import ResourceCollectionClient, ResourceCollectionClientAsync if TYPE_CHECKING: - from apify_client.clients.base.resource_collection_client import ListPage + from apify_client._types import ListPage class RequestQueueCollectionClient(ResourceCollectionClient): @@ -22,7 +23,7 @@ def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[RequestQueueShort]: """List the available request queues. https://docs.apify.com/api/v2#/reference/request-queues/queue-collection/get-list-of-request-queues @@ -38,7 +39,7 @@ def list( """ return self._list(unnamed=unnamed, limit=limit, offset=offset, desc=desc) - def get_or_create(self, *, name: str | None = None) -> dict: + def get_or_create(self, *, name: str | None = None) -> RequestQueue: """Retrieve a named request queue, or create a new one when it doesn't exist. https://docs.apify.com/api/v2#/reference/request-queues/queue-collection/create-request-queue @@ -49,7 +50,8 @@ def get_or_create(self, *, name: str | None = None) -> dict: Returns: The retrieved or newly-created request queue. """ - return self._get_or_create(name=name) + result = self._get_or_create(name=name) + return RequestQueue.model_validate(result) class RequestQueueCollectionClientAsync(ResourceCollectionClientAsync): @@ -66,7 +68,7 @@ async def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[RequestQueueShort]: """List the available request queues. https://docs.apify.com/api/v2#/reference/request-queues/queue-collection/get-list-of-request-queues @@ -82,7 +84,7 @@ async def list( """ return await self._list(unnamed=unnamed, limit=limit, offset=offset, desc=desc) - async def get_or_create(self, *, name: str | None = None) -> dict: + async def get_or_create(self, *, name: str | None = None) -> RequestQueue: """Retrieve a named request queue, or create a new one when it doesn't exist. https://docs.apify.com/api/v2#/reference/request-queues/queue-collection/create-request-queue @@ -93,4 +95,5 @@ async def get_or_create(self, *, name: str | None = None) -> dict: Returns: The retrieved or newly-created request queue. """ - return await self._get_or_create(name=name) + result = await self._get_or_create(name=name) + return RequestQueue.model_validate(result) diff --git a/src/apify_client/clients/resource_clients/run.py b/src/apify_client/_resource_clients/run.py similarity index 88% rename from src/apify_client/clients/resource_clients/run.py rename to src/apify_client/_resource_clients/run.py index 7889709e..e3556055 100644 --- a/src/apify_client/clients/resource_clients/run.py +++ b/src/apify_client/_resource_clients/run.py @@ -9,17 +9,11 @@ from typing import TYPE_CHECKING, Any from apify_client._logging import create_redirect_logger -from apify_client._utils import ( - encode_key_value_store_record_value, - filter_out_none_values_recursively, - parse_date_fields, - pluck_data, - to_safe_id, -) -from apify_client.clients.base import ActorJobBaseClient, ActorJobBaseClientAsync -from apify_client.clients.resource_clients.dataset import DatasetClient, DatasetClientAsync -from apify_client.clients.resource_clients.key_value_store import KeyValueStoreClient, KeyValueStoreClientAsync -from apify_client.clients.resource_clients.log import ( +from apify_client._models import Run +from apify_client._resource_clients.base import ActorJobBaseClient, ActorJobBaseClientAsync +from apify_client._resource_clients.dataset import DatasetClient, DatasetClientAsync +from apify_client._resource_clients.key_value_store import KeyValueStoreClient, KeyValueStoreClientAsync +from apify_client._resource_clients.log import ( LogClient, LogClientAsync, StatusMessageWatcherAsync, @@ -27,7 +21,13 @@ StreamedLogAsync, StreamedLogSync, ) -from apify_client.clients.resource_clients.request_queue import RequestQueueClient, RequestQueueClientAsync +from apify_client._resource_clients.request_queue import RequestQueueClient, RequestQueueClientAsync +from apify_client._utils import ( + encode_key_value_store_record_value, + filter_out_none_values_recursively, + response_to_dict, + to_safe_id, +) if TYPE_CHECKING: import logging @@ -43,7 +43,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'actor-runs') super().__init__(*args, resource_path=resource_path, **kwargs) - def get(self) -> dict | None: + def get(self) -> Run | None: """Return information about the Actor run. https://docs.apify.com/api/v2#/reference/actor-runs/run-object/get-run @@ -51,7 +51,8 @@ def get(self) -> dict | None: Returns: The retrieved Actor run data. """ - return self._get() + result = self._get() + return Run.model_validate(result) if result is not None else None def update( self, @@ -59,7 +60,7 @@ def update( status_message: str | None = None, is_status_message_terminal: bool | None = None, general_access: RunGeneralAccess | None = None, - ) -> dict: + ) -> Run: """Update the run with the specified fields. https://docs.apify.com/api/v2#/reference/actor-runs/run-object/update-run @@ -78,7 +79,8 @@ def update( 'generalAccess': general_access, } - return self._update(filter_out_none_values_recursively(updated_fields)) + result = self._update(filter_out_none_values_recursively(updated_fields)) + return Run.model_validate(result) def delete(self) -> None: """Delete the run. @@ -87,7 +89,7 @@ def delete(self) -> None: """ return self._delete() - def abort(self, *, gracefully: bool | None = None) -> dict: + def abort(self, *, gracefully: bool | None = None) -> Run: """Abort the Actor run which is starting or currently running and return its details. https://docs.apify.com/api/v2#/reference/actor-runs/abort-run/abort-run @@ -100,9 +102,10 @@ def abort(self, *, gracefully: bool | None = None) -> dict: Returns: The data of the aborted Actor run. """ - return self._abort(gracefully=gracefully) + result = self._abort(gracefully=gracefully) + return Run.model_validate(result) - def wait_for_finish(self, *, wait_secs: int | None = None) -> dict | None: + def wait_for_finish(self, *, wait_secs: int | None = None) -> Run | None: """Wait synchronously until the run finishes or the server times out. Args: @@ -112,7 +115,8 @@ def wait_for_finish(self, *, wait_secs: int | None = None) -> dict | None: The Actor run data. If the status on the object is not one of the terminal statuses (SUCCEEDED, FAILED, TIMED_OUT, ABORTED), then the run has not yet finished. """ - return self._wait_for_finish(wait_secs=wait_secs) + result = self._wait_for_finish(wait_secs=wait_secs) + return Run.model_validate(result) if result is not None else None def metamorph( self, @@ -121,7 +125,7 @@ def metamorph( target_actor_build: str | None = None, run_input: Any = None, content_type: str | None = None, - ) -> dict: + ) -> Run: """Transform an Actor run into a run of another Actor with a new input. https://docs.apify.com/api/v2#/reference/actor-runs/metamorph-run/metamorph-run @@ -151,7 +155,8 @@ def metamorph( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + data = response_to_dict(response) + return Run.model_validate(data) def resurrect( self, @@ -162,7 +167,7 @@ def resurrect( max_items: int | None = None, max_total_charge_usd: Decimal | None = None, restart_on_error: bool | None = None, - ) -> dict: + ) -> Run: """Resurrect a finished Actor run. Only finished runs, i.e. runs with status FINISHED, FAILED, ABORTED and TIMED-OUT can be resurrected. @@ -202,9 +207,10 @@ def resurrect( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + data = response_to_dict(response) + return Run.model_validate(data) - def reboot(self) -> dict: + def reboot(self) -> Run: """Reboot an Actor run. Only runs that are running, i.e. runs with status RUNNING can be rebooted. https://docs.apify.com/api/v2#/reference/actor-runs/reboot-run/reboot-run @@ -216,7 +222,8 @@ def reboot(self) -> dict: url=self._url('reboot'), method='POST', ) - return parse_date_fields(pluck_data(response.json())) + data = response_to_dict(response) + return Run.model_validate(data) def dataset(self) -> DatasetClient: """Get the client for the default dataset of the Actor run. @@ -281,11 +288,11 @@ def get_streamed_log(self, to_logger: logging.Logger | None = None, *, from_star `StreamedLog` instance for redirected logs. """ run_data = self.get() - run_id = f'runId:{run_data.get("id", "")}' if run_data else '' + run_id = f'runId:{run_data.id}' if run_data and run_data.id else '' - actor_id = run_data.get('actId', '') if run_data else '' - actor_data = self.root_client.actor(actor_id=actor_id).get() or {} - actor_name = actor_data.get('name', '') if run_data else '' + actor_id = run_data.act_id if run_data and run_data.act_id else '' + actor_data = self.root_client.actor(actor_id=actor_id).get() if actor_id else None + actor_name = actor_data.name if actor_data and hasattr(actor_data, 'name') and actor_data.name else '' if not to_logger: name = ' '.join(part for part in (actor_name, run_id) if part) @@ -345,11 +352,11 @@ def get_status_message_watcher( `StatusMessageWatcher` instance. """ run_data = self.get() - run_id = f'runId:{run_data.get("id", "")}' if run_data else '' + run_id = f'runId:{run_data.id}' if run_data and run_data.id else '' - actor_id = run_data.get('actId', '') if run_data else '' - actor_data = self.root_client.actor(actor_id=actor_id).get() or {} - actor_name = actor_data.get('name', '') if run_data else '' + actor_id = run_data.act_id if run_data and run_data.act_id else '' + actor_data = self.root_client.actor(actor_id=actor_id).get() if actor_id else None + actor_name = actor_data.name if actor_data and hasattr(actor_data, 'name') and actor_data.name else '' if not to_logger: name = ' '.join(part for part in (actor_name, run_id) if part) @@ -365,7 +372,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'actor-runs') super().__init__(*args, resource_path=resource_path, **kwargs) - async def get(self) -> dict | None: + async def get(self) -> Run | None: """Return information about the Actor run. https://docs.apify.com/api/v2#/reference/actor-runs/run-object/get-run @@ -373,7 +380,8 @@ async def get(self) -> dict | None: Returns: The retrieved Actor run data. """ - return await self._get() + result = await self._get() + return Run.model_validate(result) if result is not None else None async def update( self, @@ -381,7 +389,7 @@ async def update( status_message: str | None = None, is_status_message_terminal: bool | None = None, general_access: RunGeneralAccess | None = None, - ) -> dict: + ) -> Run: """Update the run with the specified fields. https://docs.apify.com/api/v2#/reference/actor-runs/run-object/update-run @@ -400,9 +408,10 @@ async def update( 'generalAccess': general_access, } - return await self._update(filter_out_none_values_recursively(updated_fields)) + result = await self._update(filter_out_none_values_recursively(updated_fields)) + return Run.model_validate(result) - async def abort(self, *, gracefully: bool | None = None) -> dict: + async def abort(self, *, gracefully: bool | None = None) -> Run: """Abort the Actor run which is starting or currently running and return its details. https://docs.apify.com/api/v2#/reference/actor-runs/abort-run/abort-run @@ -415,9 +424,10 @@ async def abort(self, *, gracefully: bool | None = None) -> dict: Returns: The data of the aborted Actor run. """ - return await self._abort(gracefully=gracefully) + result = await self._abort(gracefully=gracefully) + return Run.model_validate(result) - async def wait_for_finish(self, *, wait_secs: int | None = None) -> dict | None: + async def wait_for_finish(self, *, wait_secs: int | None = None) -> Run | None: """Wait synchronously until the run finishes or the server times out. Args: @@ -427,7 +437,8 @@ async def wait_for_finish(self, *, wait_secs: int | None = None) -> dict | None: The Actor run data. If the status on the object is not one of the terminal statuses (SUCCEEDED, FAILED, TIMED_OUT, ABORTED), then the run has not yet finished. """ - return await self._wait_for_finish(wait_secs=wait_secs) + result = await self._wait_for_finish(wait_secs=wait_secs) + return Run.model_validate(result) if result is not None else None async def delete(self) -> None: """Delete the run. @@ -443,7 +454,7 @@ async def metamorph( target_actor_build: str | None = None, run_input: Any = None, content_type: str | None = None, - ) -> dict: + ) -> Run: """Transform an Actor run into a run of another Actor with a new input. https://docs.apify.com/api/v2#/reference/actor-runs/metamorph-run/metamorph-run @@ -476,7 +487,8 @@ async def metamorph( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + data = response_to_dict(response) + return Run.model_validate(data) async def resurrect( self, @@ -487,7 +499,7 @@ async def resurrect( max_items: int | None = None, max_total_charge_usd: Decimal | None = None, restart_on_error: bool | None = None, - ) -> dict: + ) -> Run: """Resurrect a finished Actor run. Only finished runs, i.e. runs with status FINISHED, FAILED, ABORTED and TIMED-OUT can be resurrected. @@ -527,9 +539,10 @@ async def resurrect( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + data = response_to_dict(response) + return Run.model_validate(data) - async def reboot(self) -> dict: + async def reboot(self) -> Run: """Reboot an Actor run. Only runs that are running, i.e. runs with status RUNNING can be rebooted. https://docs.apify.com/api/v2#/reference/actor-runs/reboot-run/reboot-run @@ -541,7 +554,8 @@ async def reboot(self) -> dict: url=self._url('reboot'), method='POST', ) - return parse_date_fields(pluck_data(response.json())) + data = response_to_dict(response) + return Run.model_validate(data) def dataset(self) -> DatasetClientAsync: """Get the client for the default dataset of the Actor run. @@ -608,11 +622,11 @@ async def get_streamed_log( `StreamedLog` instance for redirected logs. """ run_data = await self.get() - run_id = f'runId:{run_data.get("id", "")}' if run_data else '' + run_id = f'runId:{run_data.id}' if run_data and run_data.id else '' - actor_id = run_data.get('actId', '') if run_data else '' - actor_data = await self.root_client.actor(actor_id=actor_id).get() or {} - actor_name = actor_data.get('name', '') if run_data else '' + actor_id = run_data.act_id if run_data and run_data.act_id else '' + actor_data = await self.root_client.actor(actor_id=actor_id).get() if actor_id else None + actor_name = actor_data.name if actor_data and hasattr(actor_data, 'name') and actor_data.name else '' if not to_logger: name = ' '.join(part for part in (actor_name, run_id) if part) @@ -673,11 +687,11 @@ async def get_status_message_watcher( `StatusMessageWatcher` instance. """ run_data = await self.get() - run_id = f'runId:{run_data.get("id", "")}' if run_data else '' + run_id = f'runId:{run_data.id}' if run_data else '' - actor_id = run_data.get('actId', '') if run_data else '' - actor_data = await self.root_client.actor(actor_id=actor_id).get() or {} - actor_name = actor_data.get('name', '') if run_data else '' + actor_id = run_data.act_id if run_data else '' + actor_data = await self.root_client.actor(actor_id=actor_id).get() + actor_name = actor_data.name if actor_data else '' if not to_logger: name = ' '.join(part for part in (actor_name, run_id) if part) diff --git a/src/apify_client/clients/resource_clients/run_collection.py b/src/apify_client/_resource_clients/run_collection.py similarity index 94% rename from src/apify_client/clients/resource_clients/run_collection.py rename to src/apify_client/_resource_clients/run_collection.py index 77c5bc38..4303044f 100644 --- a/src/apify_client/clients/resource_clients/run_collection.py +++ b/src/apify_client/_resource_clients/run_collection.py @@ -2,15 +2,16 @@ from typing import TYPE_CHECKING, Any +from apify_client._resource_clients.base import ResourceCollectionClient, ResourceCollectionClientAsync from apify_client._utils import maybe_extract_enum_member_value -from apify_client.clients.base import ResourceCollectionClient, ResourceCollectionClientAsync if TYPE_CHECKING: from datetime import datetime from apify_shared.consts import ActorJobStatus - from apify_client.clients.base.resource_collection_client import ListPage + from apify_client._models import RunShort + from apify_client._types import ListPage class RunCollectionClient(ResourceCollectionClient): @@ -29,7 +30,7 @@ def list( status: ActorJobStatus | list[ActorJobStatus] | None = None, started_before: str | datetime | None = None, started_after: str | datetime | None = None, - ) -> ListPage[dict]: + ) -> ListPage[RunShort]: """List all Actor runs. List all Actor runs, either of a single Actor, or all user's Actors, depending on where this client @@ -80,7 +81,7 @@ async def list( status: ActorJobStatus | list[ActorJobStatus] | None = None, started_before: str | datetime | None = None, started_after: str | datetime | None = None, - ) -> ListPage[dict]: + ) -> ListPage[RunShort]: """List all Actor runs. List all Actor runs, either of a single Actor, or all user's Actors, depending on where this client diff --git a/src/apify_client/clients/resource_clients/schedule.py b/src/apify_client/_resource_clients/schedule.py similarity index 83% rename from src/apify_client/clients/resource_clients/schedule.py rename to src/apify_client/_resource_clients/schedule.py index b8908853..5f588dee 100644 --- a/src/apify_client/clients/resource_clients/schedule.py +++ b/src/apify_client/_resource_clients/schedule.py @@ -2,8 +2,9 @@ from typing import Any -from apify_client._utils import catch_not_found_or_throw, filter_out_none_values_recursively, pluck_data_as_list -from apify_client.clients.base import ResourceClient, ResourceClientAsync +from apify_client._models import ScheduleInvoked, ScheduleResponseData +from apify_client._resource_clients.base import ResourceClient, ResourceClientAsync +from apify_client._utils import catch_not_found_or_throw, filter_out_none_values_recursively, response_to_list from apify_client.errors import ApifyApiError @@ -37,7 +38,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'schedules') super().__init__(*args, resource_path=resource_path, **kwargs) - def get(self) -> dict | None: + def get(self) -> ScheduleResponseData | None: """Return information about the schedule. https://docs.apify.com/api/v2#/reference/schedules/schedule-object/get-schedule @@ -45,7 +46,8 @@ def get(self) -> dict | None: Returns: The retrieved schedule. """ - return self._get() + result = self._get() + return ScheduleResponseData.model_validate(result) if result is not None else None def update( self, @@ -58,7 +60,7 @@ def update( description: str | None = None, timezone: str | None = None, title: str | None = None, - ) -> dict: + ) -> ScheduleResponseData: """Update the schedule with specified fields. https://docs.apify.com/api/v2#/reference/schedules/schedule-object/update-schedule @@ -89,7 +91,8 @@ def update( title=title, ) - return self._update(filter_out_none_values_recursively(schedule_representation)) + result = self._update(filter_out_none_values_recursively(schedule_representation)) + return ScheduleResponseData.model_validate(result) def delete(self) -> None: """Delete the schedule. @@ -98,7 +101,7 @@ def delete(self) -> None: """ self._delete() - def get_log(self) -> list | None: + def get_log(self) -> list[ScheduleInvoked] | None: """Return log for the given schedule. https://docs.apify.com/api/v2#/reference/schedules/schedule-log/get-schedule-log @@ -112,7 +115,8 @@ def get_log(self) -> list | None: method='GET', params=self._params(), ) - return pluck_data_as_list(response.json()) + data = response_to_list(response) + return [ScheduleInvoked.model_validate(item) for item in data] if data else None except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -126,7 +130,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'schedules') super().__init__(*args, resource_path=resource_path, **kwargs) - async def get(self) -> dict | None: + async def get(self) -> ScheduleResponseData | None: """Return information about the schedule. https://docs.apify.com/api/v2#/reference/schedules/schedule-object/get-schedule @@ -134,7 +138,8 @@ async def get(self) -> dict | None: Returns: The retrieved schedule. """ - return await self._get() + result = await self._get() + return ScheduleResponseData.model_validate(result) if result is not None else None async def update( self, @@ -147,7 +152,7 @@ async def update( description: str | None = None, timezone: str | None = None, title: str | None = None, - ) -> dict: + ) -> ScheduleResponseData: """Update the schedule with specified fields. https://docs.apify.com/api/v2#/reference/schedules/schedule-object/update-schedule @@ -178,7 +183,8 @@ async def update( title=title, ) - return await self._update(filter_out_none_values_recursively(schedule_representation)) + result = await self._update(filter_out_none_values_recursively(schedule_representation)) + return ScheduleResponseData.model_validate(result) async def delete(self) -> None: """Delete the schedule. @@ -187,7 +193,7 @@ async def delete(self) -> None: """ await self._delete() - async def get_log(self) -> list | None: + async def get_log(self) -> list[ScheduleInvoked] | None: """Return log for the given schedule. https://docs.apify.com/api/v2#/reference/schedules/schedule-log/get-schedule-log @@ -201,7 +207,8 @@ async def get_log(self) -> list | None: method='GET', params=self._params(), ) - return pluck_data_as_list(response.json()) + data = response_to_list(response) + return [ScheduleInvoked.model_validate(item) for item in data] if data else None except ApifyApiError as exc: catch_not_found_or_throw(exc) diff --git a/src/apify_client/clients/resource_clients/schedule_collection.py b/src/apify_client/_resource_clients/schedule_collection.py similarity index 87% rename from src/apify_client/clients/resource_clients/schedule_collection.py rename to src/apify_client/_resource_clients/schedule_collection.py index e8386edf..055914f9 100644 --- a/src/apify_client/clients/resource_clients/schedule_collection.py +++ b/src/apify_client/_resource_clients/schedule_collection.py @@ -2,12 +2,13 @@ from typing import TYPE_CHECKING, Any +from apify_client._models import GetListOfSchedulesResponseDataItems, ScheduleResponseData +from apify_client._resource_clients.base import ResourceCollectionClient, ResourceCollectionClientAsync +from apify_client._resource_clients.schedule import _get_schedule_representation from apify_client._utils import filter_out_none_values_recursively -from apify_client.clients.base import ResourceCollectionClient, ResourceCollectionClientAsync -from apify_client.clients.resource_clients.schedule import _get_schedule_representation if TYPE_CHECKING: - from apify_client.clients.base.resource_collection_client import ListPage + from apify_client._types import ListPage class ScheduleCollectionClient(ResourceCollectionClient): @@ -23,7 +24,7 @@ def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[GetListOfSchedulesResponseDataItems]: """List the available schedules. https://docs.apify.com/api/v2#/reference/schedules/schedules-collection/get-list-of-schedules @@ -49,7 +50,7 @@ def create( description: str | None = None, timezone: str | None = None, title: str | None = None, - ) -> dict: + ) -> ScheduleResponseData: """Create a new schedule. https://docs.apify.com/api/v2#/reference/schedules/schedules-collection/create-schedule @@ -83,7 +84,8 @@ def create( title=title, ) - return self._create(filter_out_none_values_recursively(schedule_representation)) + result = self._create(filter_out_none_values_recursively(schedule_representation)) + return ScheduleResponseData.model_validate(result) class ScheduleCollectionClientAsync(ResourceCollectionClientAsync): @@ -99,7 +101,7 @@ async def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[GetListOfSchedulesResponseDataItems]: """List the available schedules. https://docs.apify.com/api/v2#/reference/schedules/schedules-collection/get-list-of-schedules @@ -125,7 +127,7 @@ async def create( description: str | None = None, timezone: str | None = None, title: str | None = None, - ) -> dict: + ) -> ScheduleResponseData: """Create a new schedule. https://docs.apify.com/api/v2#/reference/schedules/schedules-collection/create-schedule @@ -159,4 +161,5 @@ async def create( title=title, ) - return await self._create(filter_out_none_values_recursively(schedule_representation)) + result = await self._create(filter_out_none_values_recursively(schedule_representation)) + return ScheduleResponseData.model_validate(result) diff --git a/src/apify_client/clients/resource_clients/store_collection.py b/src/apify_client/_resource_clients/store_collection.py similarity index 92% rename from src/apify_client/clients/resource_clients/store_collection.py rename to src/apify_client/_resource_clients/store_collection.py index f04200a0..229caa0a 100644 --- a/src/apify_client/clients/resource_clients/store_collection.py +++ b/src/apify_client/_resource_clients/store_collection.py @@ -2,10 +2,11 @@ from typing import TYPE_CHECKING, Any -from apify_client.clients.base import ResourceCollectionClient, ResourceCollectionClientAsync +from apify_client._resource_clients.base import ResourceCollectionClient, ResourceCollectionClientAsync if TYPE_CHECKING: - from apify_client.clients.base.resource_collection_client import ListPage + from apify_client._models import ActorShort + from apify_client._types import ListPage class StoreCollectionClient(ResourceCollectionClient): @@ -25,7 +26,7 @@ def list( category: str | None = None, username: str | None = None, pricing_model: str | None = None, - ) -> ListPage[dict]: + ) -> ListPage[ActorShort]: """List Actors in Apify store. https://docs.apify.com/api/v2/#/reference/store/store-actors-collection/get-list-of-actors-in-store @@ -71,7 +72,7 @@ async def list( category: str | None = None, username: str | None = None, pricing_model: str | None = None, - ) -> ListPage[dict]: + ) -> ListPage[ActorShort]: """List Actors in Apify store. https://docs.apify.com/api/v2/#/reference/store/store-actors-collection/get-list-of-actors-in-store diff --git a/src/apify_client/clients/resource_clients/task.py b/src/apify_client/_resource_clients/task.py similarity index 94% rename from src/apify_client/clients/resource_clients/task.py rename to src/apify_client/_resource_clients/task.py index da0837d2..827b104c 100644 --- a/src/apify_client/clients/resource_clients/task.py +++ b/src/apify_client/_resource_clients/task.py @@ -2,20 +2,16 @@ from typing import TYPE_CHECKING, Any, cast +from apify_client._models import Run, Task +from apify_client._resource_clients.base import ResourceClient, ResourceClientAsync +from apify_client._resource_clients.run import RunClient, RunClientAsync +from apify_client._resource_clients.run_collection import RunCollectionClient, RunCollectionClientAsync +from apify_client._resource_clients.webhook_collection import WebhookCollectionClient, WebhookCollectionClientAsync from apify_client._utils import ( catch_not_found_or_throw, encode_webhook_list_to_base64, filter_out_none_values_recursively, maybe_extract_enum_member_value, - parse_date_fields, - pluck_data, -) -from apify_client.clients.base import ResourceClient, ResourceClientAsync -from apify_client.clients.resource_clients.run import RunClient, RunClientAsync -from apify_client.clients.resource_clients.run_collection import RunCollectionClient, RunCollectionClientAsync -from apify_client.clients.resource_clients.webhook_collection import ( - WebhookCollectionClient, - WebhookCollectionClientAsync, ) from apify_client.errors import ApifyApiError @@ -70,7 +66,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'actor-tasks') super().__init__(*args, resource_path=resource_path, **kwargs) - def get(self) -> dict | None: + def get(self) -> Task | None: """Retrieve the task. https://docs.apify.com/api/v2#/reference/actor-tasks/task-object/get-task @@ -78,7 +74,8 @@ def get(self) -> dict | None: Returns: The retrieved task. """ - return self._get() + result = self._get() + return Task.model_validate(result) if result is not None else None def update( self, @@ -96,7 +93,7 @@ def update( actor_standby_idle_timeout_secs: int | None = None, actor_standby_build: str | None = None, actor_standby_memory_mbytes: int | None = None, - ) -> dict: + ) -> Task: """Update the task with specified fields. https://docs.apify.com/api/v2#/reference/actor-tasks/task-object/update-task @@ -143,7 +140,8 @@ def update( actor_standby_memory_mbytes=actor_standby_memory_mbytes, ) - return self._update(filter_out_none_values_recursively(task_representation)) + result = self._update(filter_out_none_values_recursively(task_representation)) + return Task.model_validate(result) def delete(self) -> None: """Delete the task. @@ -163,7 +161,7 @@ def start( restart_on_error: bool | None = None, wait_for_finish: int | None = None, webhooks: list[dict] | None = None, - ) -> dict: + ) -> Run: """Start the task and immediately return the Run object. https://docs.apify.com/api/v2#/reference/actor-tasks/run-collection/run-task @@ -211,7 +209,8 @@ def start( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return Run.model_validate(result) def call( self, @@ -224,7 +223,7 @@ def call( restart_on_error: bool | None = None, webhooks: list[dict] | None = None, wait_secs: int | None = None, - ) -> dict | None: + ) -> Run | None: """Start a task and wait for it to finish before returning the Run object. It waits indefinitely, unless the wait_secs argument is provided. @@ -262,7 +261,7 @@ def call( webhooks=webhooks, ) - return self.root_client.run(started_run['id']).wait_for_finish(wait_secs=wait_secs) + return self.root_client.run(started_run.id).wait_for_finish(wait_secs=wait_secs) def get_input(self) -> dict | None: """Retrieve the default input for this task. @@ -338,7 +337,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'actor-tasks') super().__init__(*args, resource_path=resource_path, **kwargs) - async def get(self) -> dict | None: + async def get(self) -> Task | None: """Retrieve the task. https://docs.apify.com/api/v2#/reference/actor-tasks/task-object/get-task @@ -346,7 +345,8 @@ async def get(self) -> dict | None: Returns: The retrieved task. """ - return await self._get() + result = await self._get() + return Task.model_validate(result) if result is not None else None async def update( self, @@ -364,7 +364,7 @@ async def update( actor_standby_idle_timeout_secs: int | None = None, actor_standby_build: str | None = None, actor_standby_memory_mbytes: int | None = None, - ) -> dict: + ) -> Task: """Update the task with specified fields. https://docs.apify.com/api/v2#/reference/actor-tasks/task-object/update-task @@ -411,7 +411,8 @@ async def update( actor_standby_memory_mbytes=actor_standby_memory_mbytes, ) - return await self._update(filter_out_none_values_recursively(task_representation)) + result = await self._update(filter_out_none_values_recursively(task_representation)) + return Task.model_validate(result) async def delete(self) -> None: """Delete the task. @@ -431,7 +432,7 @@ async def start( restart_on_error: bool | None = None, wait_for_finish: int | None = None, webhooks: list[dict] | None = None, - ) -> dict: + ) -> Run: """Start the task and immediately return the Run object. https://docs.apify.com/api/v2#/reference/actor-tasks/run-collection/run-task @@ -479,7 +480,8 @@ async def start( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return Run.model_validate(result) async def call( self, @@ -492,7 +494,7 @@ async def call( restart_on_error: bool | None = None, webhooks: list[dict] | None = None, wait_secs: int | None = None, - ) -> dict | None: + ) -> Run | None: """Start a task and wait for it to finish before returning the Run object. It waits indefinitely, unless the wait_secs argument is provided. @@ -529,8 +531,8 @@ async def call( restart_on_error=restart_on_error, webhooks=webhooks, ) - - return await self.root_client.run(started_run['id']).wait_for_finish(wait_secs=wait_secs) + run_client = self.root_client.run(started_run.id) + return await run_client.wait_for_finish(wait_secs=wait_secs) async def get_input(self) -> dict | None: """Retrieve the default input for this task. diff --git a/src/apify_client/clients/resource_clients/task_collection.py b/src/apify_client/_resource_clients/task_collection.py similarity index 93% rename from src/apify_client/clients/resource_clients/task_collection.py rename to src/apify_client/_resource_clients/task_collection.py index 0f8fe188..11be2e93 100644 --- a/src/apify_client/clients/resource_clients/task_collection.py +++ b/src/apify_client/_resource_clients/task_collection.py @@ -2,12 +2,13 @@ from typing import TYPE_CHECKING, Any +from apify_client._models import Task, TaskShort +from apify_client._resource_clients.base import ResourceCollectionClient, ResourceCollectionClientAsync +from apify_client._resource_clients.task import get_task_representation from apify_client._utils import filter_out_none_values_recursively -from apify_client.clients.base import ResourceCollectionClient, ResourceCollectionClientAsync -from apify_client.clients.resource_clients.task import get_task_representation if TYPE_CHECKING: - from apify_client.clients.base.resource_collection_client import ListPage + from apify_client._types import ListPage class TaskCollectionClient(ResourceCollectionClient): @@ -23,7 +24,7 @@ def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[TaskShort]: """List the available tasks. https://docs.apify.com/api/v2#/reference/actor-tasks/task-collection/get-list-of-tasks @@ -55,7 +56,7 @@ def create( actor_standby_idle_timeout_secs: int | None = None, actor_standby_build: str | None = None, actor_standby_memory_mbytes: int | None = None, - ) -> dict: + ) -> Task: """Create a new task. https://docs.apify.com/api/v2#/reference/actor-tasks/task-collection/create-task @@ -104,7 +105,8 @@ def create( actor_standby_memory_mbytes=actor_standby_memory_mbytes, ) - return self._create(filter_out_none_values_recursively(task_representation)) + result = self._create(filter_out_none_values_recursively(task_representation)) + return Task.model_validate(result) class TaskCollectionClientAsync(ResourceCollectionClientAsync): @@ -120,7 +122,7 @@ async def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[TaskShort]: """List the available tasks. https://docs.apify.com/api/v2#/reference/actor-tasks/task-collection/get-list-of-tasks @@ -152,7 +154,7 @@ async def create( actor_standby_idle_timeout_secs: int | None = None, actor_standby_build: str | None = None, actor_standby_memory_mbytes: int | None = None, - ) -> dict: + ) -> Task: """Create a new task. https://docs.apify.com/api/v2#/reference/actor-tasks/task-collection/create-task @@ -201,4 +203,5 @@ async def create( actor_standby_memory_mbytes=actor_standby_memory_mbytes, ) - return await self._create(filter_out_none_values_recursively(task_representation)) + result = await self._create(filter_out_none_values_recursively(task_representation)) + return Task.model_validate(result) diff --git a/src/apify_client/clients/resource_clients/user.py b/src/apify_client/_resource_clients/user.py similarity index 76% rename from src/apify_client/clients/resource_clients/user.py rename to src/apify_client/_resource_clients/user.py index 86a81c07..1d58a5bb 100644 --- a/src/apify_client/clients/resource_clients/user.py +++ b/src/apify_client/_resource_clients/user.py @@ -2,13 +2,9 @@ from typing import Any -from apify_client._utils import ( - catch_not_found_or_throw, - filter_out_none_values_recursively, - parse_date_fields, - pluck_data, -) -from apify_client.clients.base import ResourceClient, ResourceClientAsync +from apify_client._models import AccountLimits, MonthlyUsage, UserPrivateInfo, UserPublicInfo +from apify_client._resource_clients.base import ResourceClient, ResourceClientAsync +from apify_client._utils import catch_not_found_or_throw, filter_out_none_values_recursively, response_to_dict from apify_client.errors import ApifyApiError @@ -22,7 +18,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'users') super().__init__(*args, resource_id=resource_id, resource_path=resource_path, **kwargs) - def get(self) -> dict | None: + def get(self) -> UserPublicInfo | UserPrivateInfo | None: """Return information about user account. You receive all or only public info based on your token permissions. @@ -32,9 +28,16 @@ def get(self) -> dict | None: Returns: The retrieved user data, or None if the user does not exist. """ - return self._get() + result = self._get() + if result is None: + return None + # Try to parse as UserPrivateInfo first (has more fields), fall back to UserPublicInfo + try: + return UserPrivateInfo.model_validate(result) + except Exception: + return UserPublicInfo.model_validate(result) - def monthly_usage(self) -> dict | None: + def monthly_usage(self) -> MonthlyUsage | None: """Return monthly usage of the user account. This includes a complete usage summary for the current usage cycle, an overall sum, as well as a daily breakdown @@ -52,14 +55,15 @@ def monthly_usage(self) -> dict | None: method='GET', params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + data = response_to_dict(response) + return MonthlyUsage.model_validate(data) if data is not None else None except ApifyApiError as exc: catch_not_found_or_throw(exc) return None - def limits(self) -> dict | None: + def limits(self) -> AccountLimits | None: """Return a complete summary of the user account's limits. It is the same information which is available on the account's Limits page. The returned data includes @@ -76,7 +80,8 @@ def limits(self) -> dict | None: method='GET', params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + data = response_to_dict(response) + return AccountLimits.model_validate(data) if data is not None else None except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -113,7 +118,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'users') super().__init__(*args, resource_id=resource_id, resource_path=resource_path, **kwargs) - async def get(self) -> dict | None: + async def get(self) -> UserPublicInfo | UserPrivateInfo | None: """Return information about user account. You receive all or only public info based on your token permissions. @@ -123,9 +128,16 @@ async def get(self) -> dict | None: Returns: The retrieved user data, or None if the user does not exist. """ - return await self._get() + result = await self._get() + if result is None: + return None + # Try to parse as UserPrivateInfo first (has more fields), fall back to UserPublicInfo + try: + return UserPrivateInfo.model_validate(result) + except Exception: + return UserPublicInfo.model_validate(result) - async def monthly_usage(self) -> dict | None: + async def monthly_usage(self) -> MonthlyUsage | None: """Return monthly usage of the user account. This includes a complete usage summary for the current usage cycle, an overall sum, as well as a daily breakdown @@ -143,14 +155,15 @@ async def monthly_usage(self) -> dict | None: method='GET', params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + data = response_to_dict(response) + return MonthlyUsage.model_validate(data) if data is not None else None except ApifyApiError as exc: catch_not_found_or_throw(exc) return None - async def limits(self) -> dict | None: + async def limits(self) -> AccountLimits | None: """Return a complete summary of the user account's limits. It is the same information which is available on the account's Limits page. The returned data includes @@ -167,7 +180,8 @@ async def limits(self) -> dict | None: method='GET', params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + data = response_to_dict(response) + return AccountLimits.model_validate(data) if data is not None else None except ApifyApiError as exc: catch_not_found_or_throw(exc) diff --git a/src/apify_client/clients/resource_clients/webhook.py b/src/apify_client/_resource_clients/webhook.py similarity index 88% rename from src/apify_client/clients/resource_clients/webhook.py rename to src/apify_client/_resource_clients/webhook.py index 559485a6..7764c087 100644 --- a/src/apify_client/clients/resource_clients/webhook.py +++ b/src/apify_client/_resource_clients/webhook.py @@ -2,17 +2,16 @@ from typing import TYPE_CHECKING, Any +from apify_client._models import Webhook, WebhookDispatch +from apify_client._resource_clients.base import ResourceClient, ResourceClientAsync +from apify_client._resource_clients.webhook_dispatch_collection import ( + WebhookDispatchCollectionClient, + WebhookDispatchCollectionClientAsync, +) from apify_client._utils import ( catch_not_found_or_throw, filter_out_none_values_recursively, maybe_extract_enum_member_value, - parse_date_fields, - pluck_data, -) -from apify_client.clients.base import ResourceClient, ResourceClientAsync -from apify_client.clients.resource_clients.webhook_dispatch_collection import ( - WebhookDispatchCollectionClient, - WebhookDispatchCollectionClientAsync, ) from apify_client.errors import ApifyApiError @@ -66,7 +65,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'webhooks') super().__init__(*args, resource_path=resource_path, **kwargs) - def get(self) -> dict | None: + def get(self) -> Webhook | None: """Retrieve the webhook. https://docs.apify.com/api/v2#/reference/webhooks/webhook-object/get-webhook @@ -74,7 +73,8 @@ def get(self) -> dict | None: Returns: The retrieved webhook, or None if it does not exist. """ - return self._get() + result = self._get() + return Webhook.model_validate(result) if result is not None else None def update( self, @@ -89,7 +89,7 @@ def update( ignore_ssl_errors: bool | None = None, do_not_retry: bool | None = None, is_ad_hoc: bool | None = None, - ) -> dict: + ) -> Webhook: """Update the webhook. https://docs.apify.com/api/v2#/reference/webhooks/webhook-object/update-webhook @@ -123,7 +123,8 @@ def update( is_ad_hoc=is_ad_hoc, ) - return self._update(filter_out_none_values_recursively(webhook_representation)) + result = self._update(filter_out_none_values_recursively(webhook_representation)) + return Webhook.model_validate(result) def delete(self) -> None: """Delete the webhook. @@ -132,7 +133,7 @@ def delete(self) -> None: """ return self._delete() - def test(self) -> dict | None: + def test(self) -> WebhookDispatch | None: """Test a webhook. Creates a webhook dispatch with a dummy payload. @@ -149,7 +150,8 @@ def test(self) -> dict | None: params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return WebhookDispatch.model_validate(result) if result is not None else None except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -176,7 +178,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'webhooks') super().__init__(*args, resource_path=resource_path, **kwargs) - async def get(self) -> dict | None: + async def get(self) -> Webhook | None: """Retrieve the webhook. https://docs.apify.com/api/v2#/reference/webhooks/webhook-object/get-webhook @@ -184,7 +186,8 @@ async def get(self) -> dict | None: Returns: The retrieved webhook, or None if it does not exist. """ - return await self._get() + result = await self._get() + return Webhook.model_validate(result) if result is not None else None async def update( self, @@ -199,7 +202,7 @@ async def update( ignore_ssl_errors: bool | None = None, do_not_retry: bool | None = None, is_ad_hoc: bool | None = None, - ) -> dict: + ) -> Webhook: """Update the webhook. https://docs.apify.com/api/v2#/reference/webhooks/webhook-object/update-webhook @@ -233,7 +236,8 @@ async def update( is_ad_hoc=is_ad_hoc, ) - return await self._update(filter_out_none_values_recursively(webhook_representation)) + result = await self._update(filter_out_none_values_recursively(webhook_representation)) + return Webhook.model_validate(result) async def delete(self) -> None: """Delete the webhook. @@ -242,7 +246,7 @@ async def delete(self) -> None: """ return await self._delete() - async def test(self) -> dict | None: + async def test(self) -> WebhookDispatch | None: """Test a webhook. Creates a webhook dispatch with a dummy payload. @@ -259,7 +263,8 @@ async def test(self) -> dict | None: params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + result = response.json() + return WebhookDispatch.model_validate(result) if result is not None else None except ApifyApiError as exc: catch_not_found_or_throw(exc) diff --git a/src/apify_client/clients/resource_clients/webhook_collection.py b/src/apify_client/_resource_clients/webhook_collection.py similarity index 91% rename from src/apify_client/clients/resource_clients/webhook_collection.py rename to src/apify_client/_resource_clients/webhook_collection.py index 2add4361..9c77dfeb 100644 --- a/src/apify_client/clients/resource_clients/webhook_collection.py +++ b/src/apify_client/_resource_clients/webhook_collection.py @@ -2,14 +2,15 @@ from typing import TYPE_CHECKING, Any +from apify_client._models import Webhook, WebhookShort +from apify_client._resource_clients.base import ResourceCollectionClient, ResourceCollectionClientAsync +from apify_client._resource_clients.webhook import get_webhook_representation from apify_client._utils import filter_out_none_values_recursively -from apify_client.clients.base import ResourceCollectionClient, ResourceCollectionClientAsync -from apify_client.clients.resource_clients.webhook import get_webhook_representation if TYPE_CHECKING: from apify_shared.consts import WebhookEventType - from apify_client.clients.base.resource_collection_client import ListPage + from apify_client._types import ListPage class WebhookCollectionClient(ResourceCollectionClient): @@ -25,7 +26,7 @@ def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[WebhookShort]: """List the available webhooks. https://docs.apify.com/api/v2#/reference/webhooks/webhook-collection/get-list-of-webhooks @@ -54,7 +55,7 @@ def create( do_not_retry: bool | None = None, idempotency_key: str | None = None, is_ad_hoc: bool | None = None, - ) -> dict: + ) -> Webhook: """Create a new webhook. You have to specify exactly one out of actor_id, actor_task_id or actor_run_id. @@ -93,7 +94,8 @@ def create( is_ad_hoc=is_ad_hoc, ) - return self._create(filter_out_none_values_recursively(webhook_representation)) + result = self._create(filter_out_none_values_recursively(webhook_representation)) + return Webhook.model_validate(result) class WebhookCollectionClientAsync(ResourceCollectionClientAsync): @@ -109,7 +111,7 @@ async def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[WebhookShort]: """List the available webhooks. https://docs.apify.com/api/v2#/reference/webhooks/webhook-collection/get-list-of-webhooks @@ -138,7 +140,7 @@ async def create( do_not_retry: bool | None = None, idempotency_key: str | None = None, is_ad_hoc: bool | None = None, - ) -> dict: + ) -> Webhook: """Create a new webhook. You have to specify exactly one out of actor_id, actor_task_id or actor_run_id. @@ -177,4 +179,5 @@ async def create( is_ad_hoc=is_ad_hoc, ) - return await self._create(filter_out_none_values_recursively(webhook_representation)) + result = await self._create(filter_out_none_values_recursively(webhook_representation)) + return Webhook.model_validate(result) diff --git a/src/apify_client/clients/resource_clients/webhook_dispatch.py b/src/apify_client/_resource_clients/webhook_dispatch.py similarity index 72% rename from src/apify_client/clients/resource_clients/webhook_dispatch.py rename to src/apify_client/_resource_clients/webhook_dispatch.py index 30a2a26e..c76150f2 100644 --- a/src/apify_client/clients/resource_clients/webhook_dispatch.py +++ b/src/apify_client/_resource_clients/webhook_dispatch.py @@ -2,7 +2,8 @@ from typing import Any -from apify_client.clients.base import ResourceClient, ResourceClientAsync +from apify_client._models import WebhookDispatch +from apify_client._resource_clients.base import ResourceClient, ResourceClientAsync class WebhookDispatchClient(ResourceClient): @@ -12,7 +13,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'webhook-dispatches') super().__init__(*args, resource_path=resource_path, **kwargs) - def get(self) -> dict | None: + def get(self) -> WebhookDispatch | None: """Retrieve the webhook dispatch. https://docs.apify.com/api/v2#/reference/webhook-dispatches/webhook-dispatch-object/get-webhook-dispatch @@ -20,7 +21,8 @@ def get(self) -> dict | None: Returns: The retrieved webhook dispatch, or None if it does not exist. """ - return self._get() + result = self._get() + return WebhookDispatch.model_validate(result) if result is not None else None class WebhookDispatchClientAsync(ResourceClientAsync): @@ -30,7 +32,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'webhook-dispatches') super().__init__(*args, resource_path=resource_path, **kwargs) - async def get(self) -> dict | None: + async def get(self) -> WebhookDispatch | None: """Retrieve the webhook dispatch. https://docs.apify.com/api/v2#/reference/webhook-dispatches/webhook-dispatch-object/get-webhook-dispatch @@ -38,4 +40,5 @@ async def get(self) -> dict | None: Returns: The retrieved webhook dispatch, or None if it does not exist. """ - return await self._get() + result = await self._get() + return WebhookDispatch.model_validate(result) if result is not None else None diff --git a/src/apify_client/clients/resource_clients/webhook_dispatch_collection.py b/src/apify_client/_resource_clients/webhook_dispatch_collection.py similarity index 89% rename from src/apify_client/clients/resource_clients/webhook_dispatch_collection.py rename to src/apify_client/_resource_clients/webhook_dispatch_collection.py index 60ac1df1..4e38268c 100644 --- a/src/apify_client/clients/resource_clients/webhook_dispatch_collection.py +++ b/src/apify_client/_resource_clients/webhook_dispatch_collection.py @@ -2,10 +2,11 @@ from typing import TYPE_CHECKING, Any -from apify_client.clients.base import ResourceCollectionClient, ResourceCollectionClientAsync +from apify_client._resource_clients.base import ResourceCollectionClient, ResourceCollectionClientAsync if TYPE_CHECKING: - from apify_client.clients.base.resource_collection_client import ListPage + from apify_client._models import WebhookDispatch + from apify_client._types import ListPage class WebhookDispatchCollectionClient(ResourceCollectionClient): @@ -21,7 +22,7 @@ def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[WebhookDispatch]: """List all webhook dispatches of a user. https://docs.apify.com/api/v2#/reference/webhook-dispatches/webhook-dispatches-collection/get-list-of-webhook-dispatches @@ -50,7 +51,7 @@ async def list( limit: int | None = None, offset: int | None = None, desc: bool | None = None, - ) -> ListPage[dict]: + ) -> ListPage[WebhookDispatch]: """List all webhook dispatches of a user. https://docs.apify.com/api/v2#/reference/webhook-dispatches/webhook-dispatches-collection/get-list-of-webhook-dispatches diff --git a/src/apify_client/_statistics.py b/src/apify_client/_statistics.py deleted file mode 100644 index d06d8d82..00000000 --- a/src/apify_client/_statistics.py +++ /dev/null @@ -1,27 +0,0 @@ -from collections import defaultdict -from dataclasses import dataclass, field - - -@dataclass -class Statistics: - """Statistics about API client usage and rate limit errors.""" - - calls: int = 0 - """Total number of API method calls made by the client.""" - - requests: int = 0 - """Total number of HTTP requests sent, including retries.""" - - rate_limit_errors: defaultdict[int, int] = field(default_factory=lambda: defaultdict(int)) - """List tracking which retry attempts encountered rate limit (429) errors.""" - - def add_rate_limit_error(self, attempt: int) -> None: - """Add rate limit error for specific attempt. - - Args: - attempt: The attempt number (1-based indexing). - """ - if attempt < 1: - raise ValueError('Attempt must be greater than 0') - - self.rate_limit_errors[attempt - 1] += 1 diff --git a/src/apify_client/_types.py b/src/apify_client/_types.py index 5cfe08c9..7371eaa2 100644 --- a/src/apify_client/_types.py +++ b/src/apify_client/_types.py @@ -1,15 +1,17 @@ from __future__ import annotations +from collections import defaultdict +from dataclasses import dataclass, field from typing import Any, Generic, TypeVar -JSONSerializable = str | int | float | bool | None | dict[str, Any] | list[Any] +T = TypeVar('T') + +JsonSerializable = str | int | float | bool | None | dict[str, Any] | list[Any] """Type for representing json-serializable values. It's close enough to the real thing supported by json.parse, and the best we can do until mypy supports recursive types. It was suggested in a discussion with (and approved by) Guido van Rossum, so I'd consider it correct enough. """ -T = TypeVar('T') - class ListPage(Generic[T]): """A single page of items returned from a list() method.""" @@ -24,7 +26,7 @@ class ListPage(Generic[T]): """The limit on the number of returned objects offset specified in the API call.""" limit: int - """The offset of the first object specified in the API call""" + """The offset of the first object specified in the API call.""" total: int """Total number of objects matching the API call criteria.""" @@ -32,11 +34,36 @@ class ListPage(Generic[T]): desc: bool """Whether the listing is descending or not.""" - def __init__(self: ListPage, data: dict) -> None: - """Initialize a ListPage instance from the API response data.""" + def __init__(self, data: dict) -> None: + """Initialize a new instance.""" self.items = data.get('items', []) self.offset = data.get('offset', 0) self.limit = data.get('limit', 0) self.count = data['count'] if 'count' in data else len(self.items) self.total = data.get('total', self.offset + self.count) self.desc = data.get('desc', False) + + +@dataclass +class Statistics: + """Statistics about API client usage and rate limit errors.""" + + calls: int = 0 + """Total number of API method calls made by the client.""" + + requests: int = 0 + """Total number of HTTP requests sent, including retries.""" + + rate_limit_errors: defaultdict[int, int] = field(default_factory=lambda: defaultdict(int)) + """List tracking which retry attempts encountered rate limit (429) errors.""" + + def add_rate_limit_error(self, attempt: int) -> None: + """Add rate limit error for specific attempt. + + Args: + attempt: The attempt number (1-based indexing). + """ + if attempt < 1: + raise ValueError('Attempt must be greater than 0') + + self.rate_limit_errors[attempt - 1] += 1 diff --git a/src/apify_client/_utils.py b/src/apify_client/_utils.py index e33b76d1..371f4edb 100644 --- a/src/apify_client/_utils.py +++ b/src/apify_client/_utils.py @@ -2,15 +2,11 @@ import asyncio import base64 -import contextlib import io import json -import json as jsonlib import random import re import time -from collections.abc import Callable -from datetime import datetime, timezone from enum import Enum from http import HTTPStatus from typing import TYPE_CHECKING, Any, TypeVar, cast @@ -20,141 +16,121 @@ from apify_client.errors import InvalidResponseBodyError if TYPE_CHECKING: - from collections.abc import Awaitable + from collections.abc import Awaitable, Callable from impit import Response from apify_client.errors import ApifyApiError -PARSE_DATE_FIELDS_MAX_DEPTH = 3 -PARSE_DATE_FIELDS_KEY_SUFFIX = 'At' -RECORD_NOT_FOUND_EXCEPTION_TYPES = ['record-not-found', 'record-or-token-not-found'] - T = TypeVar('T') -StopRetryingType = Callable[[], None] -ListOrDict = TypeVar('ListOrDict', list, dict) - -def filter_out_none_values_recursively(dictionary: dict) -> dict: - """Return copy of the dictionary, recursively omitting all keys for which values are None.""" - return cast('dict', filter_out_none_values_recursively_internal(dictionary)) - -def filter_out_none_values_recursively_internal( +def filter_out_none_values_recursively( dictionary: dict, *, remove_empty_dicts: bool | None = None, -) -> dict | None: - """Recursively filters out None values from a dictionary. - - Unfortunately, it's necessary to have an internal function for the correct result typing, - without having to create complicated overloads - """ - result = {} - for k, v in dictionary.items(): - if isinstance(v, dict): - v = filter_out_none_values_recursively_internal( # noqa: PLW2901 - v, remove_empty_dicts=remove_empty_dicts is True or remove_empty_dicts is None - ) - if v is not None: - result[k] = v - if not result and remove_empty_dicts: - return None - return result - - -def parse_date_fields(data: ListOrDict, max_depth: int = PARSE_DATE_FIELDS_MAX_DEPTH) -> ListOrDict: - """Recursively parse date fields in a list or dictionary up to the specified depth.""" - if max_depth < 0: - return data - - if isinstance(data, list): - return [parse_date_fields(item, max_depth - 1) for item in data] - - if isinstance(data, dict): - - def parse(key: str, value: object) -> object: - parsed_value = value - if key.endswith(PARSE_DATE_FIELDS_KEY_SUFFIX) and isinstance(value, str): - with contextlib.suppress(ValueError): - parsed_value = datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%fZ').replace(tzinfo=timezone.utc) - elif isinstance(value, dict): - parsed_value = parse_date_fields(value, max_depth - 1) - elif isinstance(value, list): - parsed_value = parse_date_fields(value, max_depth) - return parsed_value +) -> dict: + """Return a copy of the dictionary with all None values recursively removed. - return {key: parse(key, value) for (key, value) in data.items()} - - return data + Args: + dictionary: The dictionary to filter. + remove_empty_dicts: If True, also remove empty dictionaries after filtering. + Returns: + A new dictionary without None values. + """ -def is_content_type_json(content_type: str) -> bool: - """Check if the given content type is JSON.""" - return bool(re.search(r'^application/json', content_type, flags=re.IGNORECASE)) + def _internal(dictionary: dict, *, remove_empty: bool | None = None) -> dict | None: + result = {} + for key, val in dictionary.items(): + if isinstance(val, dict): + val = _internal(val, remove_empty=remove_empty) # noqa: PLW2901 + if val is not None: + result[key] = val + if not result and remove_empty: + return None + return result + return cast('dict', _internal(dictionary, remove_empty=remove_empty_dicts)) -def is_content_type_xml(content_type: str) -> bool: - """Check if the given content type is XML.""" - return bool(re.search(r'^application/.*xml$', content_type, flags=re.IGNORECASE)) +def maybe_extract_enum_member_value(maybe_enum_member: Any) -> Any: + """Extract the value from an Enum member, or return the input unchanged if not an Enum.""" + if isinstance(maybe_enum_member, Enum): + return maybe_enum_member.value + return maybe_enum_member -def is_content_type_text(content_type: str) -> bool: - """Check if the given content type is text.""" - return bool(re.search(r'^text/', content_type, flags=re.IGNORECASE)) +def to_safe_id(id: str) -> str: + """Convert a resource ID to URL-safe format by replacing `/` with `~`. -def is_file_or_bytes(value: Any) -> bool: - """Check if the input value is a file-like object or bytes. + Args: + id: The resource identifier (format: `resource_id` or `username/resource_id`). - The check for IOBase is not ideal, it would be better to use duck typing, - but then the check would be super complex, judging from how the 'requests' library does it. - This way should be good enough for the vast majority of use cases, if it causes issues, we can improve it later. + Returns: + The resource identifier with `/` replaced by `~`. """ - return isinstance(value, (bytes, bytearray, io.IOBase)) + return id.replace('/', '~') -def json_dumps(obj: Any) -> str: - """Dump JSON to a string with the correct settings and serializer.""" - return json.dumps(obj, ensure_ascii=False, indent=2, default=str) +def response_to_dict(response: impit.Response) -> dict: + """Ensure the API response is a dictionary. + Args: + response: The parsed API response (typically from `response.json()`). -def maybe_extract_enum_member_value(maybe_enum_member: Any) -> Any: - """Extract the value of an enumeration member if it is an Enum, otherwise return the original value.""" - if isinstance(maybe_enum_member, Enum): - return maybe_enum_member.value - return maybe_enum_member + Returns: + The response as a dictionary. + Raises: + ValueError: If the response is not a dictionary. + """ + data = response.json() + if isinstance(data, dict): + return data -def to_safe_id(id: str) -> str: - # Identificators of resources in the API are either in the format `resource_id` or `username/resource_id`. - # Since the `/` character has a special meaning in URL paths, - # we replace it with `~` for proper route parsing on the API, where after parsing the URL it's replaced back to `/`. - return id.replace('/', '~') + raise ValueError('The response is not a dictionary.') -def pluck_data(parsed_response: Any) -> dict: - if isinstance(parsed_response, dict) and 'data' in parsed_response: - return cast('dict', parsed_response['data']) +def response_to_list(response: impit.Response) -> list: + """Ensure the API response is a list. - raise ValueError('The "data" property is missing in the response.') + Args: + response: The parsed API response (typically from `response.json()`). + Returns: + The response as a list. -def pluck_data_as_list(parsed_response: Any) -> list: - if isinstance(parsed_response, dict) and 'data' in parsed_response: - return cast('list', parsed_response['data']) + Raises: + ValueError: If the response is not a list. + """ + data = response.json() + if isinstance(data, list): + return data - raise ValueError('The "data" property is missing in the response.') + raise ValueError('The response is not a list.') def retry_with_exp_backoff( - func: Callable[[StopRetryingType, int], T], + func: Callable[[Callable[[], None], int], T], *, max_retries: int = 8, backoff_base_millis: int = 500, backoff_factor: float = 2, random_factor: float = 1, ) -> T: + """Retry a function with exponential backoff. + + Args: + func: Function to retry. Receives a stop_retrying callback and attempt number. + max_retries: Maximum number of retry attempts. + backoff_base_millis: Base backoff delay in milliseconds. + backoff_factor: Exponential backoff multiplier (1-10). + random_factor: Random jitter factor (0-1). + + Returns: + The return value of the function. + """ random_factor = min(max(0, random_factor), 1) backoff_factor = min(max(1, backoff_factor), 10) swallow = True @@ -181,13 +157,25 @@ def stop_retrying() -> None: async def retry_with_exp_backoff_async( - async_func: Callable[[StopRetryingType, int], Awaitable[T]], + async_func: Callable[[Callable[[], None], int], Awaitable[T]], *, max_retries: int = 8, backoff_base_millis: int = 500, backoff_factor: float = 2, random_factor: float = 1, ) -> T: + """Retry an async function with exponential backoff. + + Args: + async_func: Async function to retry. Receives a stop_retrying callback and attempt number. + max_retries: Maximum number of retry attempts. + backoff_base_millis: Base backoff delay in milliseconds. + backoff_factor: Exponential backoff multiplier (1-10). + random_factor: Random jitter factor (0-1). + + Returns: + The return value of the async function. + """ random_factor = min(max(0, random_factor), 1) backoff_factor = min(max(1, backoff_factor), 10) swallow = True @@ -214,15 +202,30 @@ def stop_retrying() -> None: def catch_not_found_or_throw(exc: ApifyApiError) -> None: + """Suppress 404 Not Found errors, re-raise all other exceptions. + + Args: + exc: The API error to check. + + Raises: + ApifyApiError: If the error is not a 404 Not Found error. + """ is_not_found_status = exc.status_code == HTTPStatus.NOT_FOUND - is_not_found_type = exc.type in RECORD_NOT_FOUND_EXCEPTION_TYPES + is_not_found_type = exc.type in ['record-not-found', 'record-or-token-not-found'] if not (is_not_found_status and is_not_found_type): raise exc def encode_webhook_list_to_base64(webhooks: list[dict]) -> str: - """Encode a list of dictionaries representing webhooks to their base64-encoded representation for the API.""" - data = [] + """Encode a list of webhook dictionaries to base64 for API transmission. + + Args: + webhooks: List of webhook dictionaries with keys like "event_types", "request_url", etc. + + Returns: + Base64-encoded JSON string. + """ + data = list[dict]() for webhook in webhooks: webhook_representation = { 'eventTypes': [maybe_extract_enum_member_value(event_type) for event_type in webhook['event_types']], @@ -234,25 +237,49 @@ def encode_webhook_list_to_base64(webhooks: list[dict]) -> str: webhook_representation['headersTemplate'] = webhook['headers_template'] data.append(webhook_representation) - return base64.b64encode(jsonlib.dumps(data).encode('utf-8')).decode('ascii') + return base64.b64encode(json.dumps(data).encode('utf-8')).decode('ascii') def encode_key_value_store_record_value(value: Any, content_type: str | None = None) -> tuple[Any, str]: + """Encode a value for storage in a key-value store record. + + Args: + value: The value to encode (can be dict, str, bytes, or file-like object). + content_type: The content type. If None, it's inferred from the value type. + + Returns: + A tuple of (encoded_value, content_type). + """ if not content_type: - if is_file_or_bytes(value): + if isinstance(value, (bytes, bytearray, io.IOBase)): content_type = 'application/octet-stream' elif isinstance(value, str): content_type = 'text/plain; charset=utf-8' else: content_type = 'application/json; charset=utf-8' - if 'application/json' in content_type and not is_file_or_bytes(value) and not isinstance(value, str): - value = jsonlib.dumps(value, ensure_ascii=False, indent=2, allow_nan=False, default=str).encode('utf-8') + if ( + 'application/json' in content_type + and not isinstance(value, (bytes, bytearray, io.IOBase)) + and not isinstance(value, str) + ): + value = json.dumps(value, ensure_ascii=False, indent=2, allow_nan=False, default=str).encode('utf-8') return (value, content_type) def maybe_parse_response(response: Response) -> Any: + """Parse an HTTP response based on its content type. + + Args: + response: The HTTP response to parse. + + Returns: + Parsed response data (JSON dict/list, text string, or raw bytes). + + Raises: + InvalidResponseBodyError: If the response body cannot be parsed. + """ if response.status_code == HTTPStatus.NO_CONTENT: return None @@ -261,9 +288,11 @@ def maybe_parse_response(response: Response) -> Any: content_type = response.headers['content-type'].split(';')[0].strip() try: - if is_content_type_json(content_type): + if re.search(r'^application/json', content_type, flags=re.IGNORECASE): response_data = response.json() - elif is_content_type_xml(content_type) or is_content_type_text(content_type): + elif re.search(r'^application/.*xml$', content_type, flags=re.IGNORECASE) or re.search( + r'^text/', content_type, flags=re.IGNORECASE + ): response_data = response.text else: response_data = response.content @@ -274,7 +303,14 @@ def maybe_parse_response(response: Response) -> Any: def is_retryable_error(exc: Exception) -> bool: - """Check if the given error is retryable.""" + """Check if an exception should be retried. + + Args: + exc: The exception to check. + + Returns: + True if the exception is retryable (network errors, timeouts, etc.). + """ return isinstance( exc, ( diff --git a/src/apify_client/clients/__init__.py b/src/apify_client/clients/__init__.py deleted file mode 100644 index 6f1fdaaa..00000000 --- a/src/apify_client/clients/__init__.py +++ /dev/null @@ -1,131 +0,0 @@ -from .base import ( - ActorJobBaseClient, - ActorJobBaseClientAsync, - BaseClient, - BaseClientAsync, - ResourceClient, - ResourceClientAsync, - ResourceCollectionClient, - ResourceCollectionClientAsync, -) -from .resource_clients import ( - ActorClient, - ActorClientAsync, - ActorCollectionClient, - ActorCollectionClientAsync, - ActorEnvVarClient, - ActorEnvVarClientAsync, - ActorEnvVarCollectionClient, - ActorEnvVarCollectionClientAsync, - ActorVersionClient, - ActorVersionClientAsync, - ActorVersionCollectionClient, - ActorVersionCollectionClientAsync, - BuildClient, - BuildClientAsync, - BuildCollectionClient, - BuildCollectionClientAsync, - DatasetClient, - DatasetClientAsync, - DatasetCollectionClient, - DatasetCollectionClientAsync, - KeyValueStoreClient, - KeyValueStoreClientAsync, - KeyValueStoreCollectionClient, - KeyValueStoreCollectionClientAsync, - LogClient, - LogClientAsync, - RequestQueueClient, - RequestQueueClientAsync, - RequestQueueCollectionClient, - RequestQueueCollectionClientAsync, - RunClient, - RunClientAsync, - RunCollectionClient, - RunCollectionClientAsync, - ScheduleClient, - ScheduleClientAsync, - ScheduleCollectionClient, - ScheduleCollectionClientAsync, - StoreCollectionClient, - StoreCollectionClientAsync, - TaskClient, - TaskClientAsync, - TaskCollectionClient, - TaskCollectionClientAsync, - UserClient, - UserClientAsync, - WebhookClient, - WebhookClientAsync, - WebhookCollectionClient, - WebhookCollectionClientAsync, - WebhookDispatchClient, - WebhookDispatchClientAsync, - WebhookDispatchCollectionClient, - WebhookDispatchCollectionClientAsync, -) - -__all__ = [ - 'ActorClient', - 'ActorClientAsync', - 'ActorCollectionClient', - 'ActorCollectionClientAsync', - 'ActorEnvVarClient', - 'ActorEnvVarClientAsync', - 'ActorEnvVarCollectionClient', - 'ActorEnvVarCollectionClientAsync', - 'ActorJobBaseClient', - 'ActorJobBaseClientAsync', - 'ActorVersionClient', - 'ActorVersionClientAsync', - 'ActorVersionCollectionClient', - 'ActorVersionCollectionClientAsync', - 'BaseClient', - 'BaseClientAsync', - 'BuildClient', - 'BuildClientAsync', - 'BuildCollectionClient', - 'BuildCollectionClientAsync', - 'DatasetClient', - 'DatasetClientAsync', - 'DatasetCollectionClient', - 'DatasetCollectionClientAsync', - 'KeyValueStoreClient', - 'KeyValueStoreClientAsync', - 'KeyValueStoreCollectionClient', - 'KeyValueStoreCollectionClientAsync', - 'LogClient', - 'LogClientAsync', - 'RequestQueueClient', - 'RequestQueueClientAsync', - 'RequestQueueCollectionClient', - 'RequestQueueCollectionClientAsync', - 'ResourceClient', - 'ResourceClientAsync', - 'ResourceCollectionClient', - 'ResourceCollectionClientAsync', - 'RunClient', - 'RunClientAsync', - 'RunCollectionClient', - 'RunCollectionClientAsync', - 'ScheduleClient', - 'ScheduleClientAsync', - 'ScheduleCollectionClient', - 'ScheduleCollectionClientAsync', - 'StoreCollectionClient', - 'StoreCollectionClientAsync', - 'TaskClient', - 'TaskClientAsync', - 'TaskCollectionClient', - 'TaskCollectionClientAsync', - 'UserClient', - 'UserClientAsync', - 'WebhookClient', - 'WebhookClientAsync', - 'WebhookCollectionClient', - 'WebhookCollectionClientAsync', - 'WebhookDispatchClient', - 'WebhookDispatchClientAsync', - 'WebhookDispatchCollectionClient', - 'WebhookDispatchCollectionClientAsync', -] diff --git a/src/apify_client/errors.py b/src/apify_client/errors.py index 1b83003c..3b6b0801 100644 --- a/src/apify_client/errors.py +++ b/src/apify_client/errors.py @@ -11,20 +11,19 @@ class ApifyClientError(Exception): class ApifyApiError(ApifyClientError): - """Error specific to requests to the Apify API. + """Error from Apify API responses (rate limits, validation errors, internal errors). - An `ApifyApiError` is thrown for successful HTTP requests that reach the API, but the API responds with - an error response. Typically, those are rate limit errors and internal errors, which are automatically retried, - or validation errors, which are thrown immediately, because a correction by the user is needed. + Thrown when HTTP request succeeds but API returns an error response. Rate limit and internal errors are + retried automatically, while validation errors are thrown immediately for user correction. """ def __init__(self, response: impit.Response, attempt: int, method: str = 'GET') -> None: - """Initialize a new instance. + """Initialize an API error from a failed response. Args: - response: The response to the failed API call. - attempt: Which attempt was the request that failed. - method: The HTTP method used for the request. + response: The failed API response. + attempt: The attempt number when the request failed. + method: The HTTP method used. """ self.message: str | None = None self.type: str | None = None @@ -33,7 +32,7 @@ def __init__(self, response: impit.Response, attempt: int, method: str = 'GET') self.message = f'Unexpected error: {response.text}' try: response_data = response.json() - if 'error' in response_data: + if isinstance(response_data, dict) and 'error' in response_data: self.message = response_data['error']['message'] self.type = response_data['error']['type'] if 'data' in response_data['error']: @@ -48,25 +47,18 @@ def __init__(self, response: impit.Response, attempt: int, method: str = 'GET') self.attempt = attempt self.http_method = method - # TODO: self.client_method # noqa: TD003 - # TODO: self.original_stack # noqa: TD003 - # TODO: self.path # noqa: TD003 - # TODO: self.stack # noqa: TD003 - class InvalidResponseBodyError(ApifyClientError): - """Error caused by the response body failing to be parsed. + """Error when response body cannot be parsed (e.g., partial JSON). - This error exists for the quite common situation, where only a partial JSON response is received and an attempt - to parse the JSON throws an error. In most cases this can be resolved by retrying the request. We do that by - identifying this error in the HTTPClient. + Commonly occurs when only partial JSON is received. Usually resolved by retrying the request. """ def __init__(self, response: impit.Response) -> None: """Initialize a new instance. Args: - response: The response which failed to be parsed. + response: The response that failed to parse. """ super().__init__('Response body could not be parsed') diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 5e1d4de1..f299f00f 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -59,19 +59,22 @@ def test_dataset_of_another_user(api_token_2: str) -> Generator[TestDataset]: dataset_name = f'API-test-permissions-{crypto_random_object_id()}' dataset = client.datasets().get_or_create(name=dataset_name) - dataset_client = client.dataset(dataset_id=dataset['id']) + dataset_client = client.dataset(dataset_id=dataset.id) expected_content = [{'item1': 1, 'item2': 2, 'item3': 3}, {'item1': 4, 'item2': 5, 'item3': 6}] # Push data to dataset dataset_client.push_items(json.dumps(expected_content)) + assert dataset.url_signing_secret_key is not None + # Generate signature for the test signature = create_storage_content_signature( - resource_id=dataset['id'], url_signing_secret_key=dataset['urlSigningSecretKey'] + resource_id=dataset.id, + url_signing_secret_key=dataset.url_signing_secret_key, ) yield TestDataset( - id=dataset['id'], + id=dataset.id, signature=signature, expected_content=[{'item1': 1, 'item2': 2, 'item3': 3}, {'item1': 4, 'item2': 5, 'item3': 6}], ) @@ -86,7 +89,7 @@ def test_kvs_of_another_user(api_token_2: str) -> Generator[TestKvs]: kvs_name = f'API-test-permissions-{crypto_random_object_id()}' kvs = client.key_value_stores().get_or_create(name=kvs_name) - kvs_client = client.key_value_store(key_value_store_id=kvs['id']) + kvs_client = client.key_value_store(key_value_store_id=kvs.id) expected_content = {'key1': 1, 'key2': 2, 'key3': 3} # Push data to kvs @@ -95,14 +98,14 @@ def test_kvs_of_another_user(api_token_2: str) -> Generator[TestKvs]: # Generate signature for the test signature = create_storage_content_signature( - resource_id=kvs['id'], url_signing_secret_key=kvs['urlSigningSecretKey'] + resource_id=kvs.id, url_signing_secret_key=kvs.url_signing_secret_key or '' ) yield TestKvs( - id=kvs['id'], + id=kvs.id, signature=signature, expected_content=expected_content, - keys_signature={key: create_hmac_signature(kvs['urlSigningSecretKey'], key) for key in expected_content}, + keys_signature={key: create_hmac_signature(kvs.url_signing_secret_key or '', key) for key in expected_content}, ) kvs_client.delete() diff --git a/tests/integration/test_apify_client.py b/tests/integration/test_apify_client.py new file mode 100644 index 00000000..30a67fa6 --- /dev/null +++ b/tests/integration/test_apify_client.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from apify_client._models import UserPrivateInfo + +if TYPE_CHECKING: + from apify_client import ApifyClient, ApifyClientAsync + + +def test_apify_client_sync(apify_client: ApifyClient) -> None: + user_client = apify_client.user('me') + me = user_client.get() + assert isinstance(me, UserPrivateInfo) + assert me.id is not None + assert me.username is not None + + +async def test_apify_client_async(apify_client_async: ApifyClientAsync) -> None: + user_client = apify_client_async.user('me') + me = await user_client.get() + assert isinstance(me, UserPrivateInfo) + assert me.id is not None + assert me.username is not None diff --git a/tests/integration/test_basic.py b/tests/integration/test_basic.py deleted file mode 100644 index b8eec5f4..00000000 --- a/tests/integration/test_basic.py +++ /dev/null @@ -1,22 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from apify_client import ApifyClient, ApifyClientAsync - - -class TestBasicSync: - def test_basic(self, apify_client: ApifyClient) -> None: - me = apify_client.user('me').get() - assert me is not None - assert me.get('id') is not None - assert me.get('username') is not None - - -class TestBasicAsync: - async def test_basic(self, apify_client_async: ApifyClientAsync) -> None: - me = await apify_client_async.user('me').get() - assert me is not None - assert me.get('id') is not None - assert me.get('username') is not None diff --git a/tests/integration/test_dataset.py b/tests/integration/test_dataset.py index cb33f426..2d01cbbf 100644 --- a/tests/integration/test_dataset.py +++ b/tests/integration/test_dataset.py @@ -10,7 +10,7 @@ from integration.integration_test_utils import TestDataset, parametrized_api_urls, random_resource_name from apify_client import ApifyClient, ApifyClientAsync -from apify_client.client import DEFAULT_API_URL +from apify_client._client import DEFAULT_API_URL from apify_client.errors import ApifyApiError MOCKED_API_DATASET_RESPONSE = """{ @@ -40,217 +40,219 @@ }""" -class TestDatasetSync: - def test_dataset_should_create_public_items_expiring_url_with_params(self, apify_client: ApifyClient) -> None: - created_dataset = apify_client.datasets().get_or_create(name=random_resource_name('dataset')) +def test_dataset_should_create_public_items_expiring_url_with_params_sync(apify_client: ApifyClient) -> None: + created_dataset = apify_client.datasets().get_or_create(name=random_resource_name('dataset')) - dataset = apify_client.dataset(created_dataset['id']) - items_public_url = dataset.create_items_public_url( - expires_in_secs=2000, - limit=10, - offset=0, - ) + dataset = apify_client.dataset(created_dataset.id) + items_public_url = dataset.create_items_public_url( + expires_in_secs=2000, + limit=10, + offset=0, + ) - assert 'signature=' in items_public_url - assert 'limit=10' in items_public_url - assert 'offset=0' in items_public_url - - impit_client = impit.Client() - response = impit_client.get(items_public_url, timeout=5) - assert response.status_code == 200 - - dataset.delete() - assert apify_client.dataset(created_dataset['id']).get() is None - - def test_dataset_should_create_public_items_non_expiring_url(self, apify_client: ApifyClient) -> None: - created_dataset = apify_client.datasets().get_or_create(name=random_resource_name('dataset')) - - dataset = apify_client.dataset(created_dataset['id']) - items_public_url = dataset.create_items_public_url() - - assert 'signature=' in items_public_url - - impit_client = impit.Client() - response = impit_client.get(items_public_url, timeout=5) - assert response.status_code == 200 - - dataset.delete() - assert apify_client.dataset(created_dataset['id']).get() is None - - @parametrized_api_urls - def test_public_url(self, api_token: str, api_url: str, api_public_url: str) -> None: - apify_client = ApifyClient(token=api_token, api_url=api_url, api_public_url=api_public_url) - dataset = apify_client.dataset('someID') - - # Mock the API call to return predefined response - mock_response = Mock() - mock_response.json.return_value = json.loads(MOCKED_API_DATASET_RESPONSE) - with mock.patch.object(apify_client.http_client, 'call', return_value=mock_response): - public_url = dataset.create_items_public_url() - assert public_url == ( - f'{(api_public_url or DEFAULT_API_URL).strip("/")}/v2/datasets/' - f'someID/items?signature={public_url.split("signature=")[1]}' - ) - - def test_list_items_signature(self, apify_client: ApifyClient, test_dataset_of_another_user: TestDataset) -> None: - dataset = apify_client.dataset(dataset_id=test_dataset_of_another_user.id) - - # Permission error without valid signature - with pytest.raises( - ApifyApiError, - match=r"Insufficient permissions for the dataset. Make sure you're passing a " - r'correct API token and that it has the required permissions.', - ): - dataset.list_items() - - # Dataset content retrieved with correct signature - assert ( - test_dataset_of_another_user.expected_content - == dataset.list_items(signature=test_dataset_of_another_user.signature).items - ) + assert 'signature=' in items_public_url + assert 'limit=10' in items_public_url + assert 'offset=0' in items_public_url - def test_iterate_items_signature( - self, apify_client: ApifyClient, test_dataset_of_another_user: TestDataset - ) -> None: - dataset = apify_client.dataset(dataset_id=test_dataset_of_another_user.id) - - # Permission error without valid signature - with pytest.raises( - ApifyApiError, - match=r"Insufficient permissions for the dataset. Make sure you're passing a " - r'correct API token and that it has the required permissions.', - ): - list(dataset.iterate_items()) - - # Dataset content retrieved with correct signature - assert test_dataset_of_another_user.expected_content == list( - dataset.iterate_items(signature=test_dataset_of_another_user.signature) - ) + impit_client = impit.Client() + response = impit_client.get(items_public_url, timeout=5) + assert response.status_code == 200 + + dataset.delete() + assert apify_client.dataset(created_dataset.id).get() is None + + +def test_dataset_should_create_public_items_non_expiring_url_sync(apify_client: ApifyClient) -> None: + created_dataset = apify_client.datasets().get_or_create(name=random_resource_name('dataset')) + + dataset = apify_client.dataset(created_dataset.id) + items_public_url = dataset.create_items_public_url() + + assert 'signature=' in items_public_url + + impit_client = impit.Client() + response = impit_client.get(items_public_url, timeout=5) + assert response.status_code == 200 + + dataset.delete() + assert apify_client.dataset(created_dataset.id).get() is None - def test_get_items_as_bytes_signature( - self, apify_client: ApifyClient, test_dataset_of_another_user: TestDataset - ) -> None: - dataset = apify_client.dataset(dataset_id=test_dataset_of_another_user.id) - - # Permission error without valid signature - with pytest.raises( - ApifyApiError, - match=r"Insufficient permissions for the dataset. Make sure you're passing a " - r'correct API token and that it has the required permissions.', - ): - dataset.get_items_as_bytes() - - # Dataset content retrieved with correct signature - raw_data = dataset.get_items_as_bytes(signature=test_dataset_of_another_user.signature) - assert test_dataset_of_another_user.expected_content == json.loads(raw_data.decode('utf-8')) - - -class TestDatasetAsync: - async def test_dataset_should_create_public_items_expiring_url_with_params( - self, apify_client_async: ApifyClientAsync - ) -> None: - created_dataset = await apify_client_async.datasets().get_or_create(name=random_resource_name('dataset')) - - dataset = apify_client_async.dataset(created_dataset['id']) - items_public_url = await dataset.create_items_public_url( - expires_in_secs=2000, - limit=10, - offset=0, + +@parametrized_api_urls +def test_public_url_sync(api_token: str, api_url: str, api_public_url: str) -> None: + apify_client = ApifyClient(token=api_token, api_url=api_url, api_public_url=api_public_url) + dataset = apify_client.dataset('someID') + + # Mock the API call to return predefined response + mock_response = Mock() + mock_response.json.return_value = json.loads(MOCKED_API_DATASET_RESPONSE) + with mock.patch.object(apify_client.http_client, 'call', return_value=mock_response): + public_url = dataset.create_items_public_url() + assert public_url == ( + f'{(api_public_url or DEFAULT_API_URL).strip("/")}/v2/datasets/' + f'someID/items?signature={public_url.split("signature=")[1]}' ) - assert 'signature=' in items_public_url - assert 'limit=10' in items_public_url - assert 'offset=0' in items_public_url - - impit_async_client = impit.AsyncClient() - response = await impit_async_client.get(items_public_url, timeout=5) - assert response.status_code == 200 - - await dataset.delete() - assert await apify_client_async.dataset(created_dataset['id']).get() is None - - async def test_dataset_should_create_public_items_non_expiring_url( - self, apify_client_async: ApifyClientAsync - ) -> None: - created_dataset = await apify_client_async.datasets().get_or_create(name=random_resource_name('dataset')) - - dataset = apify_client_async.dataset(created_dataset['id']) - items_public_url = await dataset.create_items_public_url() - - assert 'signature=' in items_public_url - - impit_async_client = impit.AsyncClient() - response = await impit_async_client.get(items_public_url, timeout=5) - assert response.status_code == 200 - - await dataset.delete() - assert await apify_client_async.dataset(created_dataset['id']).get() is None - - @parametrized_api_urls - async def test_public_url(self, api_token: str, api_url: str, api_public_url: str) -> None: - apify_client = ApifyClientAsync(token=api_token, api_url=api_url, api_public_url=api_public_url) - dataset = apify_client.dataset('someID') - - # Mock the API call to return predefined response - mock_response = Mock() - mock_response.json.return_value = json.loads(MOCKED_API_DATASET_RESPONSE) - with mock.patch.object(apify_client.http_client, 'call', return_value=mock_response): - public_url = await dataset.create_items_public_url() - assert public_url == ( - f'{(api_public_url or DEFAULT_API_URL).strip("/")}/v2/datasets/' - f'someID/items?signature={public_url.split("signature=")[1]}' - ) - - async def test_list_items_signature( - self, apify_client_async: ApifyClientAsync, test_dataset_of_another_user: TestDataset - ) -> None: - dataset = apify_client_async.dataset(dataset_id=test_dataset_of_another_user.id) - - # Permission error without valid signature - with pytest.raises( - ApifyApiError, - match=r"Insufficient permissions for the dataset. Make sure you're passing a " - r'correct API token and that it has the required permissions.', - ): - await dataset.list_items() - - # Dataset content retrieved with correct signature - assert ( - test_dataset_of_another_user.expected_content - == (await dataset.list_items(signature=test_dataset_of_another_user.signature)).items + +def test_list_items_signature_sync(apify_client: ApifyClient, test_dataset_of_another_user: TestDataset) -> None: + dataset = apify_client.dataset(dataset_id=test_dataset_of_another_user.id) + + # Permission error without valid signature + with pytest.raises( + ApifyApiError, + match=r"Insufficient permissions for the dataset. Make sure you're passing a " + r'correct API token and that it has the required permissions.', + ): + dataset.list_items() + + # Dataset content retrieved with correct signature + assert ( + test_dataset_of_another_user.expected_content + == dataset.list_items(signature=test_dataset_of_another_user.signature).items + ) + + +def test_iterate_items_signature_sync(apify_client: ApifyClient, test_dataset_of_another_user: TestDataset) -> None: + dataset = apify_client.dataset(dataset_id=test_dataset_of_another_user.id) + + # Permission error without valid signature + with pytest.raises( + ApifyApiError, + match=r"Insufficient permissions for the dataset. Make sure you're passing a " + r'correct API token and that it has the required permissions.', + ): + list(dataset.iterate_items()) + + # Dataset content retrieved with correct signature + assert test_dataset_of_another_user.expected_content == list( + dataset.iterate_items(signature=test_dataset_of_another_user.signature) + ) + + +def test_get_items_as_bytes_signature(apify_client: ApifyClient, test_dataset_of_another_user: TestDataset) -> None: + dataset = apify_client.dataset(dataset_id=test_dataset_of_another_user.id) + + # Permission error without valid signature + with pytest.raises( + ApifyApiError, + match=r"Insufficient permissions for the dataset. Make sure you're passing a " + r'correct API token and that it has the required permissions.', + ): + dataset.get_items_as_bytes() + + # Dataset content retrieved with correct signature + raw_data = dataset.get_items_as_bytes(signature=test_dataset_of_another_user.signature) + assert test_dataset_of_another_user.expected_content == json.loads(raw_data.decode('utf-8')) + + +async def test_dataset_should_create_public_items_expiring_url_with_params_async( + apify_client_async: ApifyClientAsync, +) -> None: + created_dataset = await apify_client_async.datasets().get_or_create(name=random_resource_name('dataset')) + + dataset = apify_client_async.dataset(created_dataset.id) + items_public_url = await dataset.create_items_public_url( + expires_in_secs=2000, + limit=10, + offset=0, + ) + + assert 'signature=' in items_public_url + assert 'limit=10' in items_public_url + assert 'offset=0' in items_public_url + + impit_async_client = impit.AsyncClient() + response = await impit_async_client.get(items_public_url, timeout=5) + assert response.status_code == 200 + + await dataset.delete() + assert await apify_client_async.dataset(created_dataset.id).get() is None + + +async def test_dataset_should_create_public_items_non_expiring_url_async(apify_client_async: ApifyClientAsync) -> None: + created_dataset = await apify_client_async.datasets().get_or_create(name=random_resource_name('dataset')) + + dataset = apify_client_async.dataset(created_dataset.id) + items_public_url = await dataset.create_items_public_url() + + assert 'signature=' in items_public_url + + impit_async_client = impit.AsyncClient() + response = await impit_async_client.get(items_public_url, timeout=5) + assert response.status_code == 200 + + await dataset.delete() + assert await apify_client_async.dataset(created_dataset.id).get() is None + + +@parametrized_api_urls +async def test_public_url_async(api_token: str, api_url: str, api_public_url: str) -> None: + apify_client = ApifyClientAsync(token=api_token, api_url=api_url, api_public_url=api_public_url) + dataset = apify_client.dataset('someID') + + # Mock the API call to return predefined response + mock_response = Mock() + mock_response.json.return_value = json.loads(MOCKED_API_DATASET_RESPONSE) + with mock.patch.object(apify_client.http_client, 'call', return_value=mock_response): + public_url = await dataset.create_items_public_url() + assert public_url == ( + f'{(api_public_url or DEFAULT_API_URL).strip("/")}/v2/datasets/' + f'someID/items?signature={public_url.split("signature=")[1]}' ) - async def test_iterate_items_signature( - self, apify_client_async: ApifyClientAsync, test_dataset_of_another_user: TestDataset - ) -> None: - dataset = apify_client_async.dataset(dataset_id=test_dataset_of_another_user.id) - - # Permission error without valid signature - with pytest.raises( - ApifyApiError, - match=r"Insufficient permissions for the dataset. Make sure you're passing a " - r'correct API token and that it has the required permissions.', - ): - [item async for item in dataset.iterate_items()] - - # Dataset content retrieved with correct signature - assert test_dataset_of_another_user.expected_content == [ - item async for item in dataset.iterate_items(signature=test_dataset_of_another_user.signature) - ] - - async def test_get_items_as_bytes_signature( - self, apify_client_async: ApifyClientAsync, test_dataset_of_another_user: TestDataset - ) -> None: - dataset = apify_client_async.dataset(dataset_id=test_dataset_of_another_user.id) - - # Permission error without valid signature - with pytest.raises( - ApifyApiError, - match=r"Insufficient permissions for the dataset. Make sure you're passing a " - r'correct API token and that it has the required permissions.', - ): - await dataset.get_items_as_bytes() - - # Dataset content retrieved with correct signature - raw_data = await dataset.get_items_as_bytes(signature=test_dataset_of_another_user.signature) - assert test_dataset_of_another_user.expected_content == json.loads(raw_data.decode('utf-8')) + +async def test_list_items_signature_async( + apify_client_async: ApifyClientAsync, test_dataset_of_another_user: TestDataset +) -> None: + dataset = apify_client_async.dataset(dataset_id=test_dataset_of_another_user.id) + + # Permission error without valid signature + with pytest.raises( + ApifyApiError, + match=r"Insufficient permissions for the dataset. Make sure you're passing a " + r'correct API token and that it has the required permissions.', + ): + await dataset.list_items() + + # Dataset content retrieved with correct signature + assert ( + test_dataset_of_another_user.expected_content + == (await dataset.list_items(signature=test_dataset_of_another_user.signature)).items + ) + + +async def test_iterate_items_signature_async( + apify_client_async: ApifyClientAsync, test_dataset_of_another_user: TestDataset +) -> None: + dataset = apify_client_async.dataset(dataset_id=test_dataset_of_another_user.id) + + # Permission error without valid signature + with pytest.raises( + ApifyApiError, + match=r"Insufficient permissions for the dataset. Make sure you're passing a " + r'correct API token and that it has the required permissions.', + ): + [item async for item in dataset.iterate_items()] + + # Dataset content retrieved with correct signature + assert test_dataset_of_another_user.expected_content == [ + item async for item in dataset.iterate_items(signature=test_dataset_of_another_user.signature) + ] + + +async def test_get_items_as_bytes_signature_async( + apify_client_async: ApifyClientAsync, test_dataset_of_another_user: TestDataset +) -> None: + dataset = apify_client_async.dataset(dataset_id=test_dataset_of_another_user.id) + + # Permission error without valid signature + with pytest.raises( + ApifyApiError, + match=r"Insufficient permissions for the dataset. Make sure you're passing a " + r'correct API token and that it has the required permissions.', + ): + await dataset.get_items_as_bytes() + + # Dataset content retrieved with correct signature + raw_data = await dataset.get_items_as_bytes(signature=test_dataset_of_another_user.signature) + assert test_dataset_of_another_user.expected_content == json.loads(raw_data.decode('utf-8')) diff --git a/tests/integration/test_key_value_store.py b/tests/integration/test_key_value_store.py index 470d8ec8..90067aa3 100644 --- a/tests/integration/test_key_value_store.py +++ b/tests/integration/test_key_value_store.py @@ -10,7 +10,7 @@ from .integration_test_utils import TestKvs, parametrized_api_urls, random_resource_name from apify_client import ApifyClient, ApifyClientAsync -from apify_client.client import DEFAULT_API_URL +from apify_client._client import DEFAULT_API_URL from apify_client.errors import ApifyApiError MOCKED_ID = 'someID' @@ -42,319 +42,331 @@ def _get_mocked_api_kvs_response(signing_key: str | None = None) -> Mock: return mock_response -class TestKeyValueStoreSync: - def test_key_value_store_should_create_expiring_keys_public_url_with_params( - self, apify_client: ApifyClient - ) -> None: - created_store = apify_client.key_value_stores().get_or_create(name=random_resource_name('key-value-store')) +def test_key_value_store_should_create_expiring_keys_public_url_with_params_sync( + apify_client: ApifyClient, +) -> None: + created_store = apify_client.key_value_stores().get_or_create(name=random_resource_name('key-value-store')) - store = apify_client.key_value_store(created_store['id']) - keys_public_url = store.create_keys_public_url( - expires_in_secs=2000, - limit=10, - ) + store = apify_client.key_value_store(created_store.id) + keys_public_url = store.create_keys_public_url( + expires_in_secs=2000, + limit=10, + ) - assert 'signature=' in keys_public_url - assert 'limit=10' in keys_public_url - - impit_client = impit.Client() - response = impit_client.get(keys_public_url, timeout=5) - assert response.status_code == 200 - - store.delete() - assert apify_client.key_value_store(created_store['id']).get() is None - - def test_key_value_store_should_create_public_keys_non_expiring_url(self, apify_client: ApifyClient) -> None: - created_store = apify_client.key_value_stores().get_or_create(name=random_resource_name('key-value-store')) - - store = apify_client.key_value_store(created_store['id']) - keys_public_url = store.create_keys_public_url() - - assert 'signature=' in keys_public_url - - impit_client = impit.Client() - response = impit_client.get(keys_public_url, timeout=5) - assert response.status_code == 200 - - store.delete() - assert apify_client.key_value_store(created_store['id']).get() is None - - @pytest.mark.parametrize('signing_key', [None, 'custom-signing-key']) - @parametrized_api_urls - def test_public_url(self, api_token: str, api_url: str, api_public_url: str, signing_key: str) -> None: - apify_client = ApifyClient(token=api_token, api_url=api_url, api_public_url=api_public_url) - kvs = apify_client.key_value_store(MOCKED_ID) - - # Mock the API call to return predefined response - with mock.patch.object( - apify_client.http_client, - 'call', - return_value=_get_mocked_api_kvs_response(signing_key=signing_key), - ): - public_url = kvs.create_keys_public_url() - if signing_key: - signature_value = create_storage_content_signature( - resource_id=MOCKED_ID, url_signing_secret_key=signing_key - ) - expected_signature = f'?signature={signature_value}' - else: - expected_signature = '' - assert public_url == ( - f'{(api_public_url or DEFAULT_API_URL).strip("/")}/v2/key-value-stores/someID/keys{expected_signature}' - ) + assert 'signature=' in keys_public_url + assert 'limit=10' in keys_public_url - @pytest.mark.parametrize('signing_key', [None, 'custom-signing-key']) - @parametrized_api_urls - def test_record_public_url(self, api_token: str, api_url: str, api_public_url: str, signing_key: str) -> None: - apify_client = ApifyClient(token=api_token, api_url=api_url, api_public_url=api_public_url) - key = 'some_key' - kvs = apify_client.key_value_store(MOCKED_ID) - - # Mock the API call to return predefined response - with mock.patch.object( - apify_client.http_client, - 'call', - return_value=_get_mocked_api_kvs_response(signing_key=signing_key), - ): - public_url = kvs.get_record_public_url(key=key) - expected_signature = f'?signature={create_hmac_signature(signing_key, key)}' if signing_key else '' - assert public_url == ( - f'{(api_public_url or DEFAULT_API_URL).strip("/")}/v2/key-value-stores/someID/' - f'records/{key}{expected_signature}' - ) + impit_client = impit.Client() + response = impit_client.get(keys_public_url, timeout=5) + assert response.status_code == 200 - def test_list_keys_signature(self, apify_client: ApifyClient, test_kvs_of_another_user: TestKvs) -> None: - kvs = apify_client.key_value_store(key_value_store_id=test_kvs_of_another_user.id) + store.delete() + assert apify_client.key_value_store(created_store.id).get() is None - # Permission error without valid signature - with pytest.raises( - ApifyApiError, - match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" - r' API token and that it has the required permissions.', - ): - kvs.list_keys() - # Kvs content retrieved with correct signature - raw_items = kvs.list_keys(signature=test_kvs_of_another_user.signature)['items'] +def test_key_value_store_should_create_public_keys_non_expiring_url_sync(apify_client: ApifyClient) -> None: + created_store = apify_client.key_value_stores().get_or_create(name=random_resource_name('key-value-store')) - assert set(test_kvs_of_another_user.expected_content) == {item['key'] for item in raw_items} + store = apify_client.key_value_store(created_store.id) + keys_public_url = store.create_keys_public_url() - def test_get_record_signature(self, apify_client: ApifyClient, test_kvs_of_another_user: TestKvs) -> None: - key = 'key1' - kvs = apify_client.key_value_store(key_value_store_id=test_kvs_of_another_user.id) + assert 'signature=' in keys_public_url - # Permission error without valid signature - with pytest.raises( - ApifyApiError, - match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" - r' API token and that it has the required permissions.', - ): - kvs.get_record(key=key) + impit_client = impit.Client() + response = impit_client.get(keys_public_url, timeout=5) + assert response.status_code == 200 - # Kvs content retrieved with correct signature - record = kvs.get_record(key=key, signature=test_kvs_of_another_user.keys_signature[key]) - assert record - assert test_kvs_of_another_user.expected_content[key] == record['value'] + store.delete() + assert apify_client.key_value_store(created_store.id).get() is None - def test_get_record_as_bytes_signature(self, apify_client: ApifyClient, test_kvs_of_another_user: TestKvs) -> None: - key = 'key1' - kvs = apify_client.key_value_store(key_value_store_id=test_kvs_of_another_user.id) - # Permission error without valid signature - with pytest.raises( - ApifyApiError, - match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" - r' API token and that it has the required permissions.', - ): - kvs.get_record_as_bytes(key=key) - - # Kvs content retrieved with correct signature - item = kvs.get_record_as_bytes(key=key, signature=test_kvs_of_another_user.keys_signature[key]) - assert item - assert test_kvs_of_another_user.expected_content[key] == json.loads(item['value'].decode('utf-8')) - - def test_stream_record_signature(self, apify_client: ApifyClient, test_kvs_of_another_user: TestKvs) -> None: - key = 'key1' - kvs = apify_client.key_value_store(key_value_store_id=test_kvs_of_another_user.id) - - # Permission error without valid signature - with ( - pytest.raises( - ApifyApiError, - match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" - r' API token and that it has the required permissions.', - ), - kvs.stream_record(key=key), - ): - pass +@pytest.mark.parametrize('signing_key', [None, 'custom-signing-key']) +@parametrized_api_urls +def test_public_url_sync(api_token: str, api_url: str, api_public_url: str, signing_key: str) -> None: + apify_client = ApifyClient(token=api_token, api_url=api_url, api_public_url=api_public_url) + kvs = apify_client.key_value_store(MOCKED_ID) - # Kvs content retrieved with correct signature - with kvs.stream_record(key=key, signature=test_kvs_of_another_user.keys_signature[key]) as stream: - assert stream - value = json.loads(stream['value'].content.decode('utf-8')) - assert test_kvs_of_another_user.expected_content[key] == value + # Mock the API call to return predefined response + with mock.patch.object( + apify_client.http_client, + 'call', + return_value=_get_mocked_api_kvs_response(signing_key=signing_key), + ): + public_url = kvs.create_keys_public_url() + if signing_key: + signature_value = create_storage_content_signature( + resource_id=MOCKED_ID, url_signing_secret_key=signing_key + ) + expected_signature = f'?signature={signature_value}' + else: + expected_signature = '' + assert public_url == ( + f'{(api_public_url or DEFAULT_API_URL).strip("/")}/v2/key-value-stores/someID/keys{expected_signature}' + ) -class TestKeyValueStoreAsync: - async def test_key_value_store_should_create_expiring_keys_public_url_with_params( - self, apify_client_async: ApifyClientAsync - ) -> None: - created_store = await apify_client_async.key_value_stores().get_or_create( - name=random_resource_name('key-value-store') +@pytest.mark.parametrize('signing_key', [None, 'custom-signing-key']) +@parametrized_api_urls +def test_record_public_url_sync(api_token: str, api_url: str, api_public_url: str, signing_key: str) -> None: + apify_client = ApifyClient(token=api_token, api_url=api_url, api_public_url=api_public_url) + key = 'some_key' + kvs = apify_client.key_value_store(MOCKED_ID) + + # Mock the API call to return predefined response + with mock.patch.object( + apify_client.http_client, + 'call', + return_value=_get_mocked_api_kvs_response(signing_key=signing_key), + ): + public_url = kvs.get_record_public_url(key=key) + expected_signature = f'?signature={create_hmac_signature(signing_key, key)}' if signing_key else '' + assert public_url == ( + f'{(api_public_url or DEFAULT_API_URL).strip("/")}/v2/key-value-stores/someID/' + f'records/{key}{expected_signature}' ) - store = apify_client_async.key_value_store(created_store['id']) - keys_public_url = await store.create_keys_public_url( - expires_in_secs=2000, - limit=10, - ) - assert 'signature=' in keys_public_url - assert 'limit=10' in keys_public_url +def test_list_keys_signature_sync(apify_client: ApifyClient, test_kvs_of_another_user: TestKvs) -> None: + kvs = apify_client.key_value_store(key_value_store_id=test_kvs_of_another_user.id) - impit_async_client = impit.AsyncClient() - response = await impit_async_client.get(keys_public_url, timeout=5) - assert response.status_code == 200 + # Permission error without valid signature + with pytest.raises( + ApifyApiError, + match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" + r' API token and that it has the required permissions.', + ): + kvs.list_keys() - await store.delete() - assert await apify_client_async.key_value_store(created_store['id']).get() is None + # Kvs content retrieved with correct signature + raw_items = kvs.list_keys(signature=test_kvs_of_another_user.signature).items - async def test_key_value_store_should_create_public_keys_non_expiring_url( - self, apify_client_async: ApifyClientAsync - ) -> None: - created_store = await apify_client_async.key_value_stores().get_or_create( - name=random_resource_name('key-value-store') - ) + assert set(test_kvs_of_another_user.expected_content) == {item.key for item in raw_items} - store = apify_client_async.key_value_store(created_store['id']) - keys_public_url = await store.create_keys_public_url() - - assert 'signature=' in keys_public_url - - impit_async_client = impit.AsyncClient() - response = await impit_async_client.get(keys_public_url, timeout=5) - assert response.status_code == 200 - - await store.delete() - assert await apify_client_async.key_value_store(created_store['id']).get() is None - - @pytest.mark.parametrize('signing_key', [None, 'custom-signing-key']) - @parametrized_api_urls - async def test_public_url(self, api_token: str, api_url: str, api_public_url: str, signing_key: str) -> None: - apify_client = ApifyClientAsync(token=api_token, api_url=api_url, api_public_url=api_public_url) - kvs = apify_client.key_value_store(MOCKED_ID) - - # Mock the API call to return predefined response - with mock.patch.object( - apify_client.http_client, - 'call', - return_value=_get_mocked_api_kvs_response(signing_key=signing_key), - ): - public_url = await kvs.create_keys_public_url() - if signing_key: - signature_value = create_storage_content_signature( - resource_id=MOCKED_ID, url_signing_secret_key=signing_key - ) - expected_signature = f'?signature={signature_value}' - else: - expected_signature = '' - assert public_url == ( - f'{(api_public_url or DEFAULT_API_URL).strip("/")}/v2/key-value-stores/someID/keys{expected_signature}' - ) - @pytest.mark.parametrize('signing_key', [None, 'custom-signing-key']) - @parametrized_api_urls - async def test_record_public_url(self, api_token: str, api_url: str, api_public_url: str, signing_key: str) -> None: - apify_client = ApifyClientAsync(token=api_token, api_url=api_url, api_public_url=api_public_url) - key = 'some_key' - kvs = apify_client.key_value_store(MOCKED_ID) - - # Mock the API call to return predefined response - with mock.patch.object( - apify_client.http_client, - 'call', - return_value=_get_mocked_api_kvs_response(signing_key=signing_key), - ): - public_url = await kvs.get_record_public_url(key=key) - expected_signature = f'?signature={create_hmac_signature(signing_key, key)}' if signing_key else '' - assert public_url == ( - f'{(api_public_url or DEFAULT_API_URL).strip("/")}/v2/key-value-stores/someID/' - f'records/{key}{expected_signature}' - ) +def test_get_record_signature_sync(apify_client: ApifyClient, test_kvs_of_another_user: TestKvs) -> None: + key = 'key1' + kvs = apify_client.key_value_store(key_value_store_id=test_kvs_of_another_user.id) - async def test_list_keys_signature( - self, apify_client_async: ApifyClientAsync, test_kvs_of_another_user: TestKvs - ) -> None: - kvs = apify_client_async.key_value_store(key_value_store_id=test_kvs_of_another_user.id) + # Permission error without valid signature + with pytest.raises( + ApifyApiError, + match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" + r' API token and that it has the required permissions.', + ): + kvs.get_record(key=key) - # Permission error without valid signature - with pytest.raises( - ApifyApiError, - match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" - r' API token and that it has the required permissions.', - ): - await kvs.list_keys() + # Kvs content retrieved with correct signature + record = kvs.get_record(key=key, signature=test_kvs_of_another_user.keys_signature[key]) + assert record + assert test_kvs_of_another_user.expected_content[key] == record['value'] - # Kvs content retrieved with correct signature - raw_items = (await kvs.list_keys(signature=test_kvs_of_another_user.signature))['items'] - assert set(test_kvs_of_another_user.expected_content) == {item['key'] for item in raw_items} +def test_get_record_as_bytes_signature_sync(apify_client: ApifyClient, test_kvs_of_another_user: TestKvs) -> None: + key = 'key1' + kvs = apify_client.key_value_store(key_value_store_id=test_kvs_of_another_user.id) - async def test_get_record_signature( - self, apify_client_async: ApifyClientAsync, test_kvs_of_another_user: TestKvs - ) -> None: - key = 'key1' - kvs = apify_client_async.key_value_store(key_value_store_id=test_kvs_of_another_user.id) + # Permission error without valid signature + with pytest.raises( + ApifyApiError, + match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" + r' API token and that it has the required permissions.', + ): + kvs.get_record_as_bytes(key=key) - # Permission error without valid signature - with pytest.raises( - ApifyApiError, - match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" - r' API token and that it has the required permissions.', - ): - await kvs.get_record(key=key) - - # Kvs content retrieved with correct signature - record = await kvs.get_record(key=key, signature=test_kvs_of_another_user.keys_signature[key]) - assert record - assert test_kvs_of_another_user.expected_content[key] == record['value'] - - async def test_get_record_as_bytes_signature( - self, apify_client_async: ApifyClientAsync, test_kvs_of_another_user: TestKvs - ) -> None: - key = 'key1' - kvs = apify_client_async.key_value_store(key_value_store_id=test_kvs_of_another_user.id) - - # Permission error without valid signature - with pytest.raises( - ApifyApiError, - match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" - r' API token and that it has the required permissions.', - ): - await kvs.get_record_as_bytes(key=key) - - # Kvs content retrieved with correct signature - item = await kvs.get_record_as_bytes(key=key, signature=test_kvs_of_another_user.keys_signature[key]) - assert item - assert test_kvs_of_another_user.expected_content[key] == json.loads(item['value'].decode('utf-8')) - - async def test_stream_record_signature( - self, apify_client_async: ApifyClientAsync, test_kvs_of_another_user: TestKvs - ) -> None: - key = 'key1' - kvs = apify_client_async.key_value_store(key_value_store_id=test_kvs_of_another_user.id) - - # Permission error without valid signature - with pytest.raises( + # Kvs content retrieved with correct signature + item = kvs.get_record_as_bytes(key=key, signature=test_kvs_of_another_user.keys_signature[key]) + assert item + assert test_kvs_of_another_user.expected_content[key] == json.loads(item['value'].decode('utf-8')) + + +def test_stream_record_signature_sync(apify_client: ApifyClient, test_kvs_of_another_user: TestKvs) -> None: + key = 'key1' + kvs = apify_client.key_value_store(key_value_store_id=test_kvs_of_another_user.id) + + # Permission error without valid signature + with ( + pytest.raises( ApifyApiError, match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" r' API token and that it has the required permissions.', - ): - async with kvs.stream_record(key=key): - pass - - # Kvs content retrieved with correct signature - async with kvs.stream_record(key=key, signature=test_kvs_of_another_user.keys_signature[key]) as stream: - assert stream - value = json.loads(stream['value'].content.decode('utf-8')) - assert test_kvs_of_another_user.expected_content[key] == value + ), + kvs.stream_record(key=key), + ): + pass + + # Kvs content retrieved with correct signature + with kvs.stream_record(key=key, signature=test_kvs_of_another_user.keys_signature[key]) as stream: + assert stream + value = json.loads(stream['value'].content.decode('utf-8')) + assert test_kvs_of_another_user.expected_content[key] == value + + +async def test_key_value_store_should_create_expiring_keys_public_url_with_params_async( + apify_client_async: ApifyClientAsync, +) -> None: + created_store = await apify_client_async.key_value_stores().get_or_create( + name=random_resource_name('key-value-store') + ) + + store = apify_client_async.key_value_store(created_store.id) + keys_public_url = await store.create_keys_public_url( + expires_in_secs=2000, + limit=10, + ) + + assert 'signature=' in keys_public_url + assert 'limit=10' in keys_public_url + + impit_async_client = impit.AsyncClient() + response = await impit_async_client.get(keys_public_url, timeout=5) + assert response.status_code == 200 + + await store.delete() + assert await apify_client_async.key_value_store(created_store.id).get() is None + + +async def test_key_value_store_should_create_public_keys_non_expiring_url_async( + apify_client_async: ApifyClientAsync, +) -> None: + created_store = await apify_client_async.key_value_stores().get_or_create( + name=random_resource_name('key-value-store') + ) + + store = apify_client_async.key_value_store(created_store.id) + keys_public_url = await store.create_keys_public_url() + + assert 'signature=' in keys_public_url + + impit_async_client = impit.AsyncClient() + response = await impit_async_client.get(keys_public_url, timeout=5) + assert response.status_code == 200 + + await store.delete() + assert await apify_client_async.key_value_store(created_store.id).get() is None + + +@pytest.mark.parametrize('signing_key', [None, 'custom-signing-key']) +@parametrized_api_urls +async def test_public_url_async(api_token: str, api_url: str, api_public_url: str, signing_key: str) -> None: + apify_client = ApifyClientAsync(token=api_token, api_url=api_url, api_public_url=api_public_url) + kvs = apify_client.key_value_store(MOCKED_ID) + + # Mock the API call to return predefined response + with mock.patch.object( + apify_client.http_client, + 'call', + return_value=_get_mocked_api_kvs_response(signing_key=signing_key), + ): + public_url = await kvs.create_keys_public_url() + if signing_key: + signature_value = create_storage_content_signature( + resource_id=MOCKED_ID, url_signing_secret_key=signing_key + ) + expected_signature = f'?signature={signature_value}' + else: + expected_signature = '' + assert public_url == ( + f'{(api_public_url or DEFAULT_API_URL).strip("/")}/v2/key-value-stores/someID/keys{expected_signature}' + ) + + +@pytest.mark.parametrize('signing_key', [None, 'custom-signing-key']) +@parametrized_api_urls +async def test_record_public_url_async(api_token: str, api_url: str, api_public_url: str, signing_key: str) -> None: + apify_client = ApifyClientAsync(token=api_token, api_url=api_url, api_public_url=api_public_url) + key = 'some_key' + kvs = apify_client.key_value_store(MOCKED_ID) + + # Mock the API call to return predefined response + with mock.patch.object( + apify_client.http_client, + 'call', + return_value=_get_mocked_api_kvs_response(signing_key=signing_key), + ): + public_url = await kvs.get_record_public_url(key=key) + expected_signature = f'?signature={create_hmac_signature(signing_key, key)}' if signing_key else '' + assert public_url == ( + f'{(api_public_url or DEFAULT_API_URL).strip("/")}/v2/key-value-stores/someID/' + f'records/{key}{expected_signature}' + ) + + +async def test_list_keys_signature_async( + apify_client_async: ApifyClientAsync, test_kvs_of_another_user: TestKvs +) -> None: + kvs = apify_client_async.key_value_store(key_value_store_id=test_kvs_of_another_user.id) + + # Permission error without valid signature + with pytest.raises( + ApifyApiError, + match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" + r' API token and that it has the required permissions.', + ): + await kvs.list_keys() + + # Kvs content retrieved with correct signature + raw_items = (await kvs.list_keys(signature=test_kvs_of_another_user.signature)).items + + assert set(test_kvs_of_another_user.expected_content) == {item.key for item in raw_items} + + +async def test_get_record_signature_async( + apify_client_async: ApifyClientAsync, test_kvs_of_another_user: TestKvs +) -> None: + key = 'key1' + kvs = apify_client_async.key_value_store(key_value_store_id=test_kvs_of_another_user.id) + + # Permission error without valid signature + with pytest.raises( + ApifyApiError, + match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" + r' API token and that it has the required permissions.', + ): + await kvs.get_record(key=key) + + # Kvs content retrieved with correct signature + record = await kvs.get_record(key=key, signature=test_kvs_of_another_user.keys_signature[key]) + assert record + assert test_kvs_of_another_user.expected_content[key] == record['value'] + + +async def test_get_record_as_bytes_signature_async( + apify_client_async: ApifyClientAsync, test_kvs_of_another_user: TestKvs +) -> None: + key = 'key1' + kvs = apify_client_async.key_value_store(key_value_store_id=test_kvs_of_another_user.id) + + # Permission error without valid signature + with pytest.raises( + ApifyApiError, + match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" + r' API token and that it has the required permissions.', + ): + await kvs.get_record_as_bytes(key=key) + + # Kvs content retrieved with correct signature + item = await kvs.get_record_as_bytes(key=key, signature=test_kvs_of_another_user.keys_signature[key]) + assert item + assert test_kvs_of_another_user.expected_content[key] == json.loads(item['value'].decode('utf-8')) + + +async def test_stream_record_signature_async( + apify_client_async: ApifyClientAsync, test_kvs_of_another_user: TestKvs +) -> None: + key = 'key1' + kvs = apify_client_async.key_value_store(key_value_store_id=test_kvs_of_another_user.id) + + # Permission error without valid signature + with pytest.raises( + ApifyApiError, + match=r"Insufficient permissions for the key-value store. Make sure you're passing a correct" + r' API token and that it has the required permissions.', + ): + async with kvs.stream_record(key=key): + pass + + # Kvs content retrieved with correct signature + async with kvs.stream_record(key=key, signature=test_kvs_of_another_user.keys_signature[key]) as stream: + assert stream + value = json.loads(stream['value'].content.decode('utf-8')) + assert test_kvs_of_another_user.expected_content[key] == value diff --git a/tests/integration/test_request_queue.py b/tests/integration/test_request_queue.py index 64759e47..79cc9aee 100644 --- a/tests/integration/test_request_queue.py +++ b/tests/integration/test_request_queue.py @@ -8,108 +8,137 @@ from apify_client import ApifyClient, ApifyClientAsync -class TestRequestQueueSync: - def test_request_queue_lock(self, apify_client: ApifyClient) -> None: - created_queue = apify_client.request_queues().get_or_create(name=random_resource_name('queue')) - queue = apify_client.request_queue(created_queue['id'], client_key=random_string(10)) - - # Add requests and check if correct number of requests was locked - for i in range(15): - queue.add_request({'url': f'http://test-lock.com/{i}', 'uniqueKey': f'http://test-lock.com/{i}'}) - locked_requests_list = queue.list_and_lock_head(limit=10, lock_secs=10) - locked_requests = locked_requests_list['items'] - for locked_request in locked_requests: - assert locked_request['lockExpiresAt'] is not None - - # Check if the delete request works - queue.delete_request_lock(locked_requests[1]['id']) - delete_lock_request = queue.get_request(locked_requests[1]['id']) - assert delete_lock_request is not None - assert delete_lock_request.get('lockExpiresAt') is None - queue.delete_request_lock(locked_requests[2]['id'], forefront=True) - delete_lock_request2 = queue.get_request(locked_requests[2]['id']) - assert delete_lock_request2 is not None - assert delete_lock_request2.get('lockExpiresAt') is None - - # Check if the prolong request works - assert queue.prolong_request_lock(locked_requests[3]['id'], lock_secs=15)['lockExpiresAt'] is not None - - queue.delete() - assert apify_client.request_queue(created_queue['id']).get() is None - - def test_request_batch_operations(self, apify_client: ApifyClient) -> None: - created_queue = apify_client.request_queues().get_or_create(name=random_resource_name('queue')) - queue = apify_client.request_queue(created_queue['id']) - - # Add requests to queue and check if they were added - requests_to_add = [ - {'url': f'http://test-batch.com/{i}', 'uniqueKey': f'http://test-batch.com/{i}'} for i in range(25) - ] - added_requests = queue.batch_add_requests(requests_to_add) - assert len(added_requests.get('processedRequests', [])) > 0 - requests_in_queue = queue.list_requests() - assert len(requests_in_queue['items']) == len(added_requests['processedRequests']) - - # Delete requests from queue and check if they were deleted - requests_to_delete = requests_in_queue['items'][:20] - delete_response = queue.batch_delete_requests( - [{'uniqueKey': req.get('uniqueKey')} for req in requests_to_delete] - ) - requests_in_queue2 = queue.list_requests() - assert len(requests_in_queue2['items']) == 25 - len(delete_response['processedRequests']) - - queue.delete() - - -class TestRequestQueueAsync: - async def test_request_queue_lock(self, apify_client_async: ApifyClientAsync) -> None: - created_queue = await apify_client_async.request_queues().get_or_create(name=random_resource_name('queue')) - queue = apify_client_async.request_queue(created_queue['id'], client_key=random_string(10)) - - # Add requests and check if correct number of requests was locked - for i in range(15): - await queue.add_request({'url': f'http://test-lock.com/{i}', 'uniqueKey': f'http://test-lock.com/{i}'}) - locked_requests_list = await queue.list_and_lock_head(limit=10, lock_secs=10) - locked_requests = locked_requests_list['items'] - for locked_request in locked_requests: - assert locked_request['lockExpiresAt'] is not None - - # Check if the delete request works - await queue.delete_request_lock(locked_requests[1]['id']) - delete_lock_request = await queue.get_request(locked_requests[1]['id']) - assert delete_lock_request is not None - assert delete_lock_request.get('lockExpiresAt') is None - await queue.delete_request_lock(locked_requests[2]['id'], forefront=True) - delete_lock_request2 = await queue.get_request(locked_requests[2]['id']) - assert delete_lock_request2 is not None - assert delete_lock_request2.get('lockExpiresAt') is None - - # Check if the prolong request works - prolonged_request = await queue.prolong_request_lock(locked_requests[3]['id'], lock_secs=15) - assert prolonged_request['lockExpiresAt'] is not None - - await queue.delete() - assert await apify_client_async.request_queue(created_queue['id']).get() is None - - async def test_request_batch_operations(self, apify_client_async: ApifyClientAsync) -> None: - created_queue = await apify_client_async.request_queues().get_or_create(name=random_resource_name('queue')) - queue = apify_client_async.request_queue(created_queue['id']) - - # Add requests to queue and check if they were added - requests_to_add = [ - {'url': f'http://test-batch.com/{i}', 'uniqueKey': f'http://test-batch.com/{i}'} for i in range(25) - ] - added_requests = await queue.batch_add_requests(requests_to_add) - assert len(added_requests.get('processedRequests', [])) > 0 - requests_in_queue = await queue.list_requests() - assert len(requests_in_queue['items']) == len(added_requests['processedRequests']) - - # Delete requests from queue and check if they were deleted - requests_to_delete = requests_in_queue['items'][:20] - delete_response = await queue.batch_delete_requests( - [{'uniqueKey': req.get('uniqueKey')} for req in requests_to_delete] - ) - requests_in_queue2 = await queue.list_requests() - assert len(requests_in_queue2['items']) == 25 - len(delete_response['processedRequests']) - - await queue.delete() +def test_request_queue_lock_sync(apify_client: ApifyClient) -> None: + created_rq = apify_client.request_queues().get_or_create(name=random_resource_name('queue')) + rq = apify_client.request_queue(created_rq.id, client_key=random_string(10)) + + # Add requests and check if correct number of requests was locked + for i in range(15): + rq.add_request({'url': f'http://test-lock.com/{i}', 'uniqueKey': f'http://test-lock.com/{i}'}) + + get_head_and_lock_response = rq.list_and_lock_head(limit=10, lock_secs=10) + + for locked_request in get_head_and_lock_response.data.items: + assert locked_request.lock_expires_at is not None + + # Check if the delete request works + rq.delete_request_lock(get_head_and_lock_response.data.items[1].id) + + """This is probably not working: + delete_lock_request = rq.get_request(get_head_and_lock_response.data.items[1].id) + assert delete_lock_request is not None + assert delete_lock_request.lock_expires_at is None + """ + + rq.delete_request_lock(get_head_and_lock_response.data.items[2].id, forefront=True) + + """This is probably not working: + delete_lock_request2 = rq.get_request(get_head_and_lock_response.data.items[2].id) + assert delete_lock_request2 is not None + assert delete_lock_request2.lock_expires_at is None + """ + + # Check if the prolong request works + prolong_request_lock_response = rq.prolong_request_lock( + get_head_and_lock_response.data.items[3].id, + lock_secs=15, + ) + assert prolong_request_lock_response.data is not None + assert prolong_request_lock_response.data.lock_expires_at is not None + + rq.delete() + assert apify_client.request_queue(created_rq.id).get() is None + + +def test_request_batch_operations_sync(apify_client: ApifyClient) -> None: + created_rq = apify_client.request_queues().get_or_create(name=random_resource_name('queue')) + rq = apify_client.request_queue(created_rq.id) + + # Add requests to queue and check if they were added + requests_to_add = [ + {'url': f'http://test-batch.com/{i}', 'uniqueKey': f'http://test-batch.com/{i}'} for i in range(25) + ] + + batch_response = rq.batch_add_requests(requests_to_add) + assert len(batch_response.data.processed_requests or []) > 0 + + list_requests_response = rq.list_requests() + assert len(list_requests_response.data.items) == len(batch_response.data.processed_requests or []) + + # Delete requests from queue and check if they were deleted + requests_to_delete = list_requests_response.data.items[:20] + delete_response = rq.batch_delete_requests([{'uniqueKey': req.unique_key} for req in requests_to_delete]) + requests_in_queue2 = rq.list_requests() + assert len(requests_in_queue2.data.items) == 25 - len(delete_response.data.processed_requests or []) + + rq.delete() + + +async def test_request_queue_lock_async(apify_client_async: ApifyClientAsync) -> None: + created_rq = await apify_client_async.request_queues().get_or_create(name=random_resource_name('queue')) + rq = apify_client_async.request_queue(created_rq.id, client_key=random_string(10)) + + # Add requests and check if correct number of requests was locked + for i in range(15): + await rq.add_request({'url': f'http://test-lock.com/{i}', 'uniqueKey': f'http://test-lock.com/{i}'}) + + get_head_and_lock_response = await rq.list_and_lock_head(limit=10, lock_secs=10) + + for locked_request in get_head_and_lock_response.data.items: + assert locked_request.lock_expires_at is not None + + # Check if the delete request works + await rq.delete_request_lock(get_head_and_lock_response.data.items[1].id) + + """This is probably not working: + delete_lock_request = await rq.get_request(get_head_and_lock_response.data.items[1].id) + assert delete_lock_request is not None + assert delete_lock_request.lock_expires_at is None + """ + + await rq.delete_request_lock(get_head_and_lock_response.data.items[2].id, forefront=True) + + """This is probably not working: + delete_lock_request2 = await rq.get_request(get_head_and_lock_response.data.items[2].id) + assert delete_lock_request2 is not None + assert delete_lock_request2.lock_expires_at is None + """ + + # Check if the prolong request works + prolong_request_lock_response = await rq.prolong_request_lock( + get_head_and_lock_response.data.items[3].id, + lock_secs=15, + ) + assert prolong_request_lock_response.data is not None + assert prolong_request_lock_response.data.lock_expires_at is not None + + await rq.delete() + assert await apify_client_async.request_queue(created_rq.id).get() is None + + +async def test_request_batch_operations_async(apify_client_async: ApifyClientAsync) -> None: + created_rq = await apify_client_async.request_queues().get_or_create(name=random_resource_name('queue')) + rq = apify_client_async.request_queue(created_rq.id) + + # Add requests to queue and check if they were added + requests_to_add = [ + { + 'url': f'http://test-batch.com/{i}', + 'uniqueKey': f'http://test-batch.com/{i}', + } + for i in range(25) + ] + + batch_response = await rq.batch_add_requests(requests_to_add) + assert len(batch_response.data.processed_requests or []) > 0 + + requests_in_queue = await rq.list_requests() + assert len(requests_in_queue.data.items) == len(batch_response.data.processed_requests or []) + + # Delete requests from queue and check if they were deleted + requests_to_delete = requests_in_queue.data.items[:20] + delete_response = await rq.batch_delete_requests([{'uniqueKey': req.unique_key} for req in requests_to_delete]) + requests_in_queue2 = await rq.list_requests() + assert len(requests_in_queue2.data.items) == 25 - len(delete_response.data.processed_requests or []) + + await rq.delete() diff --git a/tests/integration/test_run_collection.py b/tests/integration/test_run_collection.py index bfff83cf..a3763065 100644 --- a/tests/integration/test_run_collection.py +++ b/tests/integration/test_run_collection.py @@ -3,73 +3,71 @@ from datetime import datetime, timezone from typing import TYPE_CHECKING -import pytest +from apify_shared.consts import ActorJobStatus + +from apify_client._models import Run if TYPE_CHECKING: from apify_client import ApifyClient -from apify_shared.consts import ActorJobStatus - -pytestmark = pytest.mark.integration +APIFY_HELLO_WORLD_ACTOR = 'apify/hello-world' -class TestRunCollectionSync: - APIFY_HELLO_WORLD_ACTOR = 'apify/hello-world' - created_runs: list[dict] +def test_run_collection_list_multiple_statuses_sync(apify_client: ApifyClient) -> None: + created_runs = list[Run]() - def setup_runs(self, apify_client: ApifyClient) -> None: - self.created_runs = [] + successful_run = apify_client.actor(APIFY_HELLO_WORLD_ACTOR).call() + if successful_run is not None: + created_runs.append(successful_run) - successful_run = apify_client.actor(self.APIFY_HELLO_WORLD_ACTOR).call() - if successful_run is not None: - self.created_runs.append(successful_run) + timed_out_run = apify_client.actor(APIFY_HELLO_WORLD_ACTOR).call(timeout_secs=1) + if timed_out_run is not None: + created_runs.append(timed_out_run) - timed_out_run = apify_client.actor(self.APIFY_HELLO_WORLD_ACTOR).call(timeout_secs=1) - if timed_out_run is not None: - self.created_runs.append(timed_out_run) + run_collection = apify_client.actor(APIFY_HELLO_WORLD_ACTOR).runs() - def teadown_runs(self, apify_client: ApifyClient) -> None: - for run in self.created_runs: - run_id = run.get('id') - if isinstance(run_id, str): - apify_client.run(run_id).delete() + multiple_status_runs = run_collection.list(status=[ActorJobStatus.SUCCEEDED, ActorJobStatus.TIMED_OUT]) + single_status_runs = run_collection.list(status=ActorJobStatus.SUCCEEDED) - async def test_run_collection_list_multiple_statuses(self, apify_client: ApifyClient) -> None: - self.setup_runs(apify_client) + assert multiple_status_runs is not None + assert single_status_runs is not None - run_collection = apify_client.actor(self.APIFY_HELLO_WORLD_ACTOR).runs() + assert hasattr(multiple_status_runs, 'items') + assert hasattr(single_status_runs, 'items') - multiple_status_runs = run_collection.list(status=[ActorJobStatus.SUCCEEDED, ActorJobStatus.TIMED_OUT]) - single_status_runs = run_collection.list(status=ActorJobStatus.SUCCEEDED) + assert all(run.status in [ActorJobStatus.SUCCEEDED, ActorJobStatus.TIMED_OUT] for run in multiple_status_runs.items) + assert all(run.status == ActorJobStatus.SUCCEEDED for run in single_status_runs.items) - assert multiple_status_runs is not None - assert single_status_runs is not None + for run in created_runs: + run_id = run.id + if isinstance(run_id, str): + apify_client.run(run_id).delete() - assert hasattr(multiple_status_runs, 'items') - assert hasattr(single_status_runs, 'items') - assert all( - run.get('status') in [ActorJobStatus.SUCCEEDED, ActorJobStatus.TIMED_OUT] - for run in multiple_status_runs.items - ) - assert all(run.get('status') == ActorJobStatus.SUCCEEDED for run in single_status_runs.items) +def test_run_collection_list_accept_date_range_sync(apify_client: ApifyClient) -> None: + created_runs = list[Run]() - self.teadown_runs(apify_client) + successful_run = apify_client.actor(APIFY_HELLO_WORLD_ACTOR).call() + if successful_run is not None: + created_runs.append(successful_run) - # Here we test that date fields can be passed both as datetime objects and as ISO 8601 strings - async def test_run_collection_list_accept_date_range(self, apify_client: ApifyClient) -> None: - self.setup_runs(apify_client) + timed_out_run = apify_client.actor(APIFY_HELLO_WORLD_ACTOR).call(timeout_secs=1) + if timed_out_run is not None: + created_runs.append(timed_out_run) - run_collection = apify_client.runs() + run_collection = apify_client.runs() - date_obj = datetime(2100, 1, 1, 0, 0, 0, tzinfo=timezone.utc) - iso_date_str = date_obj.strftime('%Y-%m-%dT%H:%M:%SZ') + date_obj = datetime(2100, 1, 1, 0, 0, 0, tzinfo=timezone.utc) + iso_date_str = date_obj.strftime('%Y-%m-%dT%H:%M:%SZ') - # Here we test that date fields can be passed both as datetime objects and as ISO 8601 strings - runs_in_range_date_format = run_collection.list(started_before=date_obj, started_after=date_obj) - runs_in_range_string_format = run_collection.list(started_before=iso_date_str, started_after=iso_date_str) + # Here we test that date fields can be passed both as datetime objects and as ISO 8601 strings + runs_in_range_date_format = run_collection.list(started_before=date_obj, started_after=date_obj) + runs_in_range_string_format = run_collection.list(started_before=iso_date_str, started_after=iso_date_str) - assert hasattr(runs_in_range_date_format, 'items') - assert hasattr(runs_in_range_string_format, 'items') + assert hasattr(runs_in_range_date_format, 'items') + assert hasattr(runs_in_range_string_format, 'items') - self.teadown_runs(apify_client) + for run in created_runs: + run_id = run.id + if isinstance(run_id, str): + apify_client.run(run_id).delete() diff --git a/tests/integration/test_store.py b/tests/integration/test_store.py index fa2ce27b..a0e0fe53 100644 --- a/tests/integration/test_store.py +++ b/tests/integration/test_store.py @@ -6,15 +6,13 @@ from apify_client import ApifyClient, ApifyClientAsync -class TestStoreCollectionSync: - def test_list(self, apify_client: ApifyClient) -> None: - actors_list = apify_client.store().list() - assert actors_list is not None - assert len(actors_list.items) != 0 +def test_store_list_sync(apify_client: ApifyClient) -> None: + actors_list = apify_client.store().list() + assert actors_list is not None + assert len(actors_list.items) != 0 -class TestStoreCollectionAsync: - async def test_list(self, apify_client_async: ApifyClientAsync) -> None: - actors_list = await apify_client_async.store().list() - assert actors_list is not None - assert len(actors_list.items) != 0 +async def test_store_list_async(apify_client_async: ApifyClientAsync) -> None: + actors_list = await apify_client_async.store().list() + assert actors_list is not None + assert len(actors_list.items) != 0 diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 747a99bd..131179bd 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -28,6 +28,6 @@ def httpserver(make_httpserver: HTTPServer) -> Iterable[HTTPServer]: @pytest.fixture def patch_basic_url(httpserver: HTTPServer, monkeypatch: pytest.MonkeyPatch) -> Iterator[None]: server_url = httpserver.url_for('/').removesuffix('/') - monkeypatch.setattr('apify_client.client.DEFAULT_API_URL', server_url) + monkeypatch.setattr('apify_client._client.DEFAULT_API_URL', server_url) yield monkeypatch.undo() diff --git a/tests/unit/test_client_request_queue.py b/tests/unit/test_client_request_queue.py index bc082c90..1b601578 100644 --- a/tests/unit/test_client_request_queue.py +++ b/tests/unit/test_client_request_queue.py @@ -5,8 +5,8 @@ import pytest -import apify_client from apify_client import ApifyClient, ApifyClientAsync +from apify_client.errors import ApifyApiError if TYPE_CHECKING: from pytest_httpserver import HTTPServer @@ -43,7 +43,7 @@ async def test_batch_not_processed_raises_exception_async(httpserver: HTTPServer ] rq_client = client.request_queue(request_queue_id='whatever') - with pytest.raises(apify_client.errors.ApifyApiError): + with pytest.raises(ApifyApiError): await rq_client.batch_add_requests(requests=requests) @@ -60,9 +60,10 @@ async def test_batch_processed_partially_async(httpserver: HTTPServer) -> None: ] rq_client = client.request_queue(request_queue_id='whatever') - response = await rq_client.batch_add_requests(requests=requests) - assert requests[0]['uniqueKey'] in {request['uniqueKey'] for request in response['processedRequests']} - assert response['unprocessedRequests'] == [requests[1]] + batch_response = await rq_client.batch_add_requests(requests=requests) + assert requests[0]['uniqueKey'] in {request.unique_key for request in batch_response.data.processed_requests} + assert len(batch_response.data.unprocessed_requests) == 1 + assert batch_response.data.unprocessed_requests[0].unique_key == requests[1]['uniqueKey'] @pytest.mark.usefixtures('patch_basic_url') @@ -77,7 +78,7 @@ def test_batch_not_processed_raises_exception_sync(httpserver: HTTPServer) -> No ] rq_client = client.request_queue(request_queue_id='whatever') - with pytest.raises(apify_client.errors.ApifyApiError): + with pytest.raises(ApifyApiError): rq_client.batch_add_requests(requests=requests) @@ -94,6 +95,7 @@ async def test_batch_processed_partially_sync(httpserver: HTTPServer) -> None: ] rq_client = client.request_queue(request_queue_id='whatever') - response = rq_client.batch_add_requests(requests=requests) - assert requests[0]['uniqueKey'] in {request['uniqueKey'] for request in response['processedRequests']} - assert response['unprocessedRequests'] == [requests[1]] + batch_response = rq_client.batch_add_requests(requests=requests) + assert requests[0]['uniqueKey'] in {request.unique_key for request in batch_response.data.processed_requests} + assert len(batch_response.data.unprocessed_requests) == 1 + assert batch_response.data.unprocessed_requests[0].unique_key == requests[1]['uniqueKey'] diff --git a/tests/unit/test_client_timeouts.py b/tests/unit/test_client_timeouts.py index 70d09b90..eba86992 100644 --- a/tests/unit/test_client_timeouts.py +++ b/tests/unit/test_client_timeouts.py @@ -7,11 +7,16 @@ from impit import Response, TimeoutException from apify_client import ApifyClient +from apify_client._client import DEFAULT_TIMEOUT from apify_client._http_client import HTTPClient, HTTPClientAsync -from apify_client.client import DEFAULT_TIMEOUT -from apify_client.clients import DatasetClient, KeyValueStoreClient, RequestQueueClient -from apify_client.clients.resource_clients import dataset, request_queue -from apify_client.clients.resource_clients import key_value_store as kvs +from apify_client._resource_clients import ( + DatasetClient, + KeyValueStoreClient, + RequestQueueClient, + dataset, + request_queue, +) +from apify_client._resource_clients import key_value_store as kvs if TYPE_CHECKING: from collections.abc import Iterator diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 61f7113f..709550a7 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -14,7 +14,7 @@ from apify_client import ApifyClient, ApifyClientAsync from apify_client._logging import RedirectLogFormatter -from apify_client.clients.resource_clients.log import StatusMessageWatcher, StreamedLog +from apify_client._resource_clients.log import StatusMessageWatcher, StreamedLog if TYPE_CHECKING: from collections.abc import Iterator @@ -192,7 +192,7 @@ async def test_redirected_logs_async( run_client = ApifyClientAsync(token='mocked_token', api_url=api_url).run(run_id=_MOCKED_RUN_ID) - with patch('apify_client.clients.resource_clients.log.datetime') as mocked_datetime: + with patch('apify_client._resource_clients.log.datetime') as mocked_datetime: # Mock `now()` so that it has timestamp bigger than the first 3 logs mocked_datetime.now.return_value = datetime.fromisoformat('2025-05-13T07:24:14.132+00:00') streamed_log = await run_client.get_streamed_log(from_start=log_from_start) @@ -232,7 +232,7 @@ def test_redirected_logs_sync( run_client = ApifyClient(token='mocked_token', api_url=api_url).run(run_id=_MOCKED_RUN_ID) - with patch('apify_client.clients.resource_clients.log.datetime') as mocked_datetime: + with patch('apify_client._resource_clients.log.datetime') as mocked_datetime: # Mock `now()` so that it has timestamp bigger than the first 3 logs mocked_datetime.now.return_value = datetime.fromisoformat('2025-05-13T07:24:14.132+00:00') streamed_log = run_client.get_streamed_log(from_start=log_from_start) diff --git a/tests/unit/test_statistics.py b/tests/unit/test_statistics.py index 0372ee07..f9087ccc 100644 --- a/tests/unit/test_statistics.py +++ b/tests/unit/test_statistics.py @@ -1,6 +1,6 @@ import pytest -from apify_client._statistics import Statistics +from apify_client._types import Statistics @pytest.mark.parametrize( diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 95f359f8..f127bed2 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -7,7 +7,6 @@ from apify_client._utils import ( encode_webhook_list_to_base64, - pluck_data, retry_with_exp_backoff, retry_with_exp_backoff_async, to_safe_id, @@ -20,20 +19,6 @@ def test__to_safe_id() -> None: assert to_safe_id('abc~def') == 'abc~def' -def test_pluck_data() -> None: - # works correctly when data is present - assert pluck_data({'data': {}}) == {} - assert pluck_data({'a': 'b', 'data': {'b': 'c'}}) == {'b': 'c'} - - # throws the right error when it is not - with pytest.raises(ValueError, match=r'The "data" property is missing in the response.'): - pluck_data({'a': 'b'}) - with pytest.raises(ValueError, match=r'The "data" property is missing in the response.'): - pluck_data(None) - with pytest.raises(ValueError, match=r'The "data" property is missing in the response.'): - pluck_data('{"a": "b"}') - - def test__retry_with_exp_backoff() -> None: attempt_counter = 0 diff --git a/uv.lock b/uv.lock index 2fe1d321..221eeb15 100644 --- a/uv.lock +++ b/uv.lock @@ -2,6 +2,29 @@ version = 1 revision = 3 requires-python = ">=3.10" +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266, upload-time = "2025-11-28T23:37:38.911Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" }, +] + [[package]] name = "apify-client" version = "2.3.1" @@ -15,6 +38,7 @@ dependencies = [ [package.dev-dependencies] dev = [ + { name = "datamodel-code-generator", extra = ["http", "ruff"] }, { name = "dycw-pytest-only" }, { name = "griffe" }, { name = "mypy" }, @@ -43,6 +67,7 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ + { name = "datamodel-code-generator", extras = ["http", "ruff"], specifier = "<1.0.0" }, { name = "dycw-pytest-only", specifier = "<3.0.0" }, { name = "griffe" }, { name = "mypy", specifier = "~=1.19.0" }, @@ -79,6 +104,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3b/00/2344469e2084fb287c2e0b57b72910309874c3245463acd6cf5e3db69324/appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", size = 9566, upload-time = "2020-05-11T07:59:49.499Z" }, ] +[[package]] +name = "argcomplete" +version = "3.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/38/61/0b9ae6399dd4a58d8c1b1dc5a27d6f2808023d0b5dd3104bb99f45a33ff6/argcomplete-3.6.3.tar.gz", hash = "sha256:62e8ed4fd6a45864acc8235409461b72c9a28ee785a2011cc5eb78318786c89c", size = 73754, upload-time = "2025-10-20T03:33:34.741Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/f5/9373290775639cb67a2fce7f629a1c240dce9f12fe927bc32b2736e16dfc/argcomplete-3.6.3-py3-none-any.whl", hash = "sha256:f5007b3a600ccac5d25bbce33089211dfd49eab4a7718da3f10e3082525a92ce", size = 43846, upload-time = "2025-10-20T03:33:33.021Z" }, +] + [[package]] name = "backports-asyncio-runner" version = "1.2.0" @@ -102,7 +136,7 @@ wheels = [ [[package]] name = "black" -version = "23.12.1" +version = "25.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -110,24 +144,38 @@ dependencies = [ { name = "packaging" }, { name = "pathspec" }, { name = "platformdirs" }, + { name = "pytokens" }, { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fd/f4/a57cde4b60da0e249073009f4a9087e9e0a955deae78d3c2a493208d0c5c/black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5", size = 620809, upload-time = "2023-12-22T23:06:17.382Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/58/677da52d845b59505a8a787ff22eff9cfd9046b5789aa2bd387b236db5c5/black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2", size = 1560531, upload-time = "2023-12-22T23:18:20.555Z" }, - { url = "https://files.pythonhosted.org/packages/11/92/522a4f1e4b2b8da62e4ec0cb8acf2d257e6d39b31f4214f0fd94d2eeb5bd/black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba", size = 1404644, upload-time = "2023-12-22T23:17:46.425Z" }, - { url = "https://files.pythonhosted.org/packages/a4/dc/af67d8281e9a24f73d24b060f3f03f6d9ad6be259b3c6acef2845e17d09c/black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0", size = 1711153, upload-time = "2023-12-22T23:08:34.4Z" }, - { url = "https://files.pythonhosted.org/packages/7e/0f/94d7c36b421ea187359c413be7b9fc66dc105620c3a30b1c94310265830a/black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3", size = 1332918, upload-time = "2023-12-22T23:10:28.188Z" }, - { url = "https://files.pythonhosted.org/packages/ed/2c/d9b1a77101e6e5f294f6553d76c39322122bfea2a438aeea4eb6d4b22749/black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba", size = 1541926, upload-time = "2023-12-22T23:23:17.72Z" }, - { url = "https://files.pythonhosted.org/packages/72/e2/d981a3ff05ba9abe3cfa33e70c986facb0614fd57c4f802ef435f4dd1697/black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b", size = 1388465, upload-time = "2023-12-22T23:19:00.611Z" }, - { url = "https://files.pythonhosted.org/packages/eb/59/1f5c8eb7bba8a8b1bb5c87f097d16410c93a48a6655be3773db5d2783deb/black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59", size = 1691993, upload-time = "2023-12-22T23:08:32.018Z" }, - { url = "https://files.pythonhosted.org/packages/37/bf/a80abc6fcdb00f0d4d3d74184b172adbf2197f6b002913fa0fb6af4dc6db/black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50", size = 1340929, upload-time = "2023-12-22T23:09:37.088Z" }, - { url = "https://files.pythonhosted.org/packages/66/16/8726cedc83be841dfa854bbeef1288ee82272282a71048d7935292182b0b/black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e", size = 1569989, upload-time = "2023-12-22T23:20:22.158Z" }, - { url = "https://files.pythonhosted.org/packages/d2/1e/30f5eafcc41b8378890ba39b693fa111f7dca8a2620ba5162075d95ffe46/black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec", size = 1398647, upload-time = "2023-12-22T23:19:57.225Z" }, - { url = "https://files.pythonhosted.org/packages/99/de/ddb45cc044256431d96d846ce03164d149d81ca606b5172224d1872e0b58/black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e", size = 1720450, upload-time = "2023-12-22T23:08:52.675Z" }, - { url = "https://files.pythonhosted.org/packages/98/2b/54e5dbe9be5a10cbea2259517206ff7b6a452bb34e07508c7e1395950833/black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9", size = 1351070, upload-time = "2023-12-22T23:09:32.762Z" }, - { url = "https://files.pythonhosted.org/packages/7b/14/4da7b12a9abc43a601c215cb5a3d176734578da109f0dbf0a832ed78be09/black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e", size = 194363, upload-time = "2023-12-22T23:06:14.278Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/c4/d9/07b458a3f1c525ac392b5edc6b191ff140b596f9d77092429417a54e249d/black-25.12.0.tar.gz", hash = "sha256:8d3dd9cea14bff7ddc0eb243c811cdb1a011ebb4800a5f0335a01a68654796a7", size = 659264, upload-time = "2025-12-08T01:40:52.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/d5/8d3145999d380e5d09bb00b0f7024bf0a8ccb5c07b5648e9295f02ec1d98/black-25.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f85ba1ad15d446756b4ab5f3044731bf68b777f8f9ac9cdabd2425b97cd9c4e8", size = 1895720, upload-time = "2025-12-08T01:46:58.197Z" }, + { url = "https://files.pythonhosted.org/packages/06/97/7acc85c4add41098f4f076b21e3e4e383ad6ed0a3da26b2c89627241fc11/black-25.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:546eecfe9a3a6b46f9d69d8a642585a6eaf348bcbbc4d87a19635570e02d9f4a", size = 1727193, upload-time = "2025-12-08T01:52:26.674Z" }, + { url = "https://files.pythonhosted.org/packages/24/f0/fdf0eb8ba907ddeb62255227d29d349e8256ef03558fbcadfbc26ecfe3b2/black-25.12.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:17dcc893da8d73d8f74a596f64b7c98ef5239c2cd2b053c0f25912c4494bf9ea", size = 1774506, upload-time = "2025-12-08T01:46:25.721Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f5/9203a78efe00d13336786b133c6180a9303d46908a9aa72d1104ca214222/black-25.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:09524b0e6af8ba7a3ffabdfc7a9922fb9adef60fed008c7cd2fc01f3048e6e6f", size = 1416085, upload-time = "2025-12-08T01:46:06.073Z" }, + { url = "https://files.pythonhosted.org/packages/ba/cc/7a6090e6b081c3316282c05c546e76affdce7bf7a3b7d2c3a2a69438bd01/black-25.12.0-cp310-cp310-win_arm64.whl", hash = "sha256:b162653ed89eb942758efeb29d5e333ca5bb90e5130216f8369857db5955a7da", size = 1226038, upload-time = "2025-12-08T01:45:29.388Z" }, + { url = "https://files.pythonhosted.org/packages/60/ad/7ac0d0e1e0612788dbc48e62aef8a8e8feffac7eb3d787db4e43b8462fa8/black-25.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0cfa263e85caea2cff57d8f917f9f51adae8e20b610e2b23de35b5b11ce691a", size = 1877003, upload-time = "2025-12-08T01:43:29.967Z" }, + { url = "https://files.pythonhosted.org/packages/e8/dd/a237e9f565f3617a88b49284b59cbca2a4f56ebe68676c1aad0ce36a54a7/black-25.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1a2f578ae20c19c50a382286ba78bfbeafdf788579b053d8e4980afb079ab9be", size = 1712639, upload-time = "2025-12-08T01:52:46.756Z" }, + { url = "https://files.pythonhosted.org/packages/12/80/e187079df1ea4c12a0c63282ddd8b81d5107db6d642f7d7b75a6bcd6fc21/black-25.12.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e1b65634b0e471d07ff86ec338819e2ef860689859ef4501ab7ac290431f9b", size = 1758143, upload-time = "2025-12-08T01:45:29.137Z" }, + { url = "https://files.pythonhosted.org/packages/93/b5/3096ccee4f29dc2c3aac57274326c4d2d929a77e629f695f544e159bfae4/black-25.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a3fa71e3b8dd9f7c6ac4d818345237dfb4175ed3bf37cd5a581dbc4c034f1ec5", size = 1420698, upload-time = "2025-12-08T01:45:53.379Z" }, + { url = "https://files.pythonhosted.org/packages/7e/39/f81c0ffbc25ffbe61c7d0385bf277e62ffc3e52f5ee668d7369d9854fadf/black-25.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:51e267458f7e650afed8445dc7edb3187143003d52a1b710c7321aef22aa9655", size = 1229317, upload-time = "2025-12-08T01:46:35.606Z" }, + { url = "https://files.pythonhosted.org/packages/d1/bd/26083f805115db17fda9877b3c7321d08c647df39d0df4c4ca8f8450593e/black-25.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:31f96b7c98c1ddaeb07dc0f56c652e25bdedaac76d5b68a059d998b57c55594a", size = 1924178, upload-time = "2025-12-08T01:49:51.048Z" }, + { url = "https://files.pythonhosted.org/packages/89/6b/ea00d6651561e2bdd9231c4177f4f2ae19cc13a0b0574f47602a7519b6ca/black-25.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05dd459a19e218078a1f98178c13f861fe6a9a5f88fc969ca4d9b49eb1809783", size = 1742643, upload-time = "2025-12-08T01:49:59.09Z" }, + { url = "https://files.pythonhosted.org/packages/6d/f3/360fa4182e36e9875fabcf3a9717db9d27a8d11870f21cff97725c54f35b/black-25.12.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1f68c5eff61f226934be6b5b80296cf6939e5d2f0c2f7d543ea08b204bfaf59", size = 1800158, upload-time = "2025-12-08T01:44:27.301Z" }, + { url = "https://files.pythonhosted.org/packages/f8/08/2c64830cb6616278067e040acca21d4f79727b23077633953081c9445d61/black-25.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:274f940c147ddab4442d316b27f9e332ca586d39c85ecf59ebdea82cc9ee8892", size = 1426197, upload-time = "2025-12-08T01:45:51.198Z" }, + { url = "https://files.pythonhosted.org/packages/d4/60/a93f55fd9b9816b7432cf6842f0e3000fdd5b7869492a04b9011a133ee37/black-25.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:169506ba91ef21e2e0591563deda7f00030cb466e747c4b09cb0a9dae5db2f43", size = 1237266, upload-time = "2025-12-08T01:45:10.556Z" }, + { url = "https://files.pythonhosted.org/packages/c8/52/c551e36bc95495d2aa1a37d50566267aa47608c81a53f91daa809e03293f/black-25.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a05ddeb656534c3e27a05a29196c962877c83fa5503db89e68857d1161ad08a5", size = 1923809, upload-time = "2025-12-08T01:46:55.126Z" }, + { url = "https://files.pythonhosted.org/packages/a0/f7/aac9b014140ee56d247e707af8db0aae2e9efc28d4a8aba92d0abd7ae9d1/black-25.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9ec77439ef3e34896995503865a85732c94396edcc739f302c5673a2315e1e7f", size = 1742384, upload-time = "2025-12-08T01:49:37.022Z" }, + { url = "https://files.pythonhosted.org/packages/74/98/38aaa018b2ab06a863974c12b14a6266badc192b20603a81b738c47e902e/black-25.12.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e509c858adf63aa61d908061b52e580c40eae0dfa72415fa47ac01b12e29baf", size = 1798761, upload-time = "2025-12-08T01:46:05.386Z" }, + { url = "https://files.pythonhosted.org/packages/16/3a/a8ac542125f61574a3f015b521ca83b47321ed19bb63fe6d7560f348bfe1/black-25.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:252678f07f5bac4ff0d0e9b261fbb029fa530cfa206d0a636a34ab445ef8ca9d", size = 1429180, upload-time = "2025-12-08T01:45:34.903Z" }, + { url = "https://files.pythonhosted.org/packages/e6/2d/bdc466a3db9145e946762d52cd55b1385509d9f9004fec1c97bdc8debbfb/black-25.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bc5b1c09fe3c931ddd20ee548511c64ebf964ada7e6f0763d443947fd1c603ce", size = 1239350, upload-time = "2025-12-08T01:46:09.458Z" }, + { url = "https://files.pythonhosted.org/packages/35/46/1d8f2542210c502e2ae1060b2e09e47af6a5e5963cb78e22ec1a11170b28/black-25.12.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:0a0953b134f9335c2434864a643c842c44fba562155c738a2a37a4d61f00cad5", size = 1917015, upload-time = "2025-12-08T01:53:27.987Z" }, + { url = "https://files.pythonhosted.org/packages/41/37/68accadf977672beb8e2c64e080f568c74159c1aaa6414b4cd2aef2d7906/black-25.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2355bbb6c3b76062870942d8cc450d4f8ac71f9c93c40122762c8784df49543f", size = 1741830, upload-time = "2025-12-08T01:54:36.861Z" }, + { url = "https://files.pythonhosted.org/packages/ac/76/03608a9d8f0faad47a3af3a3c8c53af3367f6c0dd2d23a84710456c7ac56/black-25.12.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9678bd991cc793e81d19aeeae57966ee02909877cb65838ccffef24c3ebac08f", size = 1791450, upload-time = "2025-12-08T01:44:52.581Z" }, + { url = "https://files.pythonhosted.org/packages/06/99/b2a4bd7dfaea7964974f947e1c76d6886d65fe5d24f687df2d85406b2609/black-25.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:97596189949a8aad13ad12fcbb4ae89330039b96ad6742e6f6b45e75ad5cfd83", size = 1452042, upload-time = "2025-12-08T01:46:13.188Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7c/d9825de75ae5dd7795d007681b752275ea85a1c5d83269b4b9c754c2aaab/black-25.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:778285d9ea197f34704e3791ea9404cd6d07595745907dd2ce3da7a13627b29b", size = 1267446, upload-time = "2025-12-08T01:46:14.497Z" }, + { url = "https://files.pythonhosted.org/packages/68/11/21331aed19145a952ad28fca2756a1433ee9308079bd03bd898e903a2e53/black-25.12.0-py3-none-any.whl", hash = "sha256:48ceb36c16dbc84062740049eef990bb2ce07598272e673c17d1a7720c71c828", size = 206191, upload-time = "2025-12-08T01:40:50.963Z" }, ] [[package]] @@ -402,6 +450,35 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/46/0f/a2f53f5e7be49bfa98dcb4e552382a6dc8c74ea74e755723654b85062316/databind.json-4.5.2-py3-none-any.whl", hash = "sha256:a803bf440634685984361cb2a5a975887e487c854ed48d81ff7aaf3a1ed1e94c", size = 1473, upload-time = "2024-05-31T15:29:05.857Z" }, ] +[[package]] +name = "datamodel-code-generator" +version = "0.42.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "argcomplete" }, + { name = "black" }, + { name = "genson" }, + { name = "inflect" }, + { name = "isort" }, + { name = "jinja2" }, + { name = "packaging" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "tomli", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/88/e0/372aed4838be433829b0309868f1cf8776475fc8f8f7f47d784ecb394ea7/datamodel_code_generator-0.42.2.tar.gz", hash = "sha256:f7ac71eab3aa4bb1da2ebe0aabd4b7fe8abd2a66dd03cc492d57807e90a3a54e", size = 511348, upload-time = "2025-12-08T21:04:54.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/bd/fde595e1c48e100673fc6e1075b0a0e038b54f1c8f30770133d4e9f473aa/datamodel_code_generator-0.42.2-py3-none-any.whl", hash = "sha256:54430d6be3ea54ce5e70203d42e42881ef8156b6da395bca7355db5587653d91", size = 172014, upload-time = "2025-12-08T21:04:53.472Z" }, +] + +[package.optional-dependencies] +http = [ + { name = "httpx" }, +] +ruff = [ + { name = "ruff" }, +] + [[package]] name = "deprecated" version = "1.3.1" @@ -425,30 +502,30 @@ wheels = [ [[package]] name = "docspec" -version = "2.2.2" +version = "2.2.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "databind-core" }, { name = "databind-json" }, { name = "deprecated" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/fe/1ad244d0ca186b5386050ec30dfd59bd3dbeea5baec33ca861dd43b922e6/docspec-2.2.2.tar.gz", hash = "sha256:c772c6facfce839176b647701082c7a22b3d22d872d392552cf5d65e0348c919", size = 14086, upload-time = "2025-05-06T12:39:59.466Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/39/7a71382107445b2cd50c67c6194e3e584f19748a817c3b29e8be8a14f00f/docspec-2.2.1.tar.gz", hash = "sha256:4854e77edc0e2de40e785e57e95880f7095a05fe978f8b54cef7a269586e15ff", size = 8646, upload-time = "2023-05-28T11:24:18.68Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/57/1011f2e88743a818cced9a95d54200ba6a05decaf43fd91d8c6ed9f6470d/docspec-2.2.2-py3-none-any.whl", hash = "sha256:854d25401e7ec2d155b0c1e001e25819d16b6df3a7575212a7f340ae8b00122e", size = 9726, upload-time = "2025-05-06T12:39:58.047Z" }, + { url = "https://files.pythonhosted.org/packages/33/aa/0c9d71cc9d450afd3993d09835e2910810a45b0703f585e1aee1d9b78969/docspec-2.2.1-py3-none-any.whl", hash = "sha256:7538f750095a9688c6980ff9a4e029a823a500f64bd00b6b4bdb27951feb31cb", size = 9844, upload-time = "2023-05-28T11:24:15.419Z" }, ] [[package]] name = "docspec-python" -version = "2.2.1" +version = "2.2.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "black" }, { name = "docspec" }, { name = "nr-util" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/52/88/99c5e27a894f01290364563c84838cf68f1a8629474b5bbfc3bf35a8d923/docspec_python-2.2.1.tar.gz", hash = "sha256:c41b850b4d6f4de30999ea6f82c9cdb9183d9bcba45559ee9173d3dab7281559", size = 13838, upload-time = "2023-05-28T11:24:19.846Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/ea/e6d9d9c2f805c6ac8072d0e3ee5b1da2dd61886c662327df937dec9f282c/docspec_python-2.2.2.tar.gz", hash = "sha256:429be834d09549461b95bf45eb53c16859f3dfb3e9220408b3bfb12812ccb3fb", size = 22154, upload-time = "2025-05-06T12:40:33.286Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/49/b8d1a2fa01b6f7a1a9daa1d485efc7684489028d6a356fc2bc5b40131061/docspec_python-2.2.1-py3-none-any.whl", hash = "sha256:76ac41d35a8face35b2d766c2e8a416fb8832359785d396f0d53bcb00f178e54", size = 16093, upload-time = "2023-05-28T11:24:17.261Z" }, + { url = "https://files.pythonhosted.org/packages/03/c2/b3226746fb6b91893da270a60e77bb420d59cf33a7b9a4e719a236955971/docspec_python-2.2.2-py3-none-any.whl", hash = "sha256:caa32dc1e8c470af8a5ecad67cca614e68c1563ac01dab0c0486c4d7f709d6b1", size = 15988, upload-time = "2025-05-06T12:40:31.554Z" }, ] [[package]] @@ -499,6 +576,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, ] +[[package]] +name = "genson" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/cf/2303c8ad276dcf5ee2ad6cf69c4338fd86ef0f471a5207b069adf7a393cf/genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37", size = 34919, upload-time = "2024-05-15T22:08:49.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/5c/e226de133afd8bb267ec27eead9ae3d784b95b39a287ed404caab39a5f50/genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7", size = 21470, upload-time = "2024-05-15T22:08:47.056Z" }, +] + [[package]] name = "griffe" version = "1.15.0" @@ -511,6 +597,43 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9c/83/3b1d03d36f224edded98e9affd0467630fc09d766c0e56fb1498cbb04a9b/griffe-1.15.0-py3-none-any.whl", hash = "sha256:6f6762661949411031f5fcda9593f586e6ce8340f0ba88921a0f2ef7a81eb9a3", size = 150705, upload-time = "2025-11-10T15:03:13.549Z" }, ] +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + [[package]] name = "identify" version = "2.6.15" @@ -584,6 +707,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fe/ff/ba31bb3e0fa715251f3c9f344644a77a0a2294b0a8d3409ff53920c59fba/impit-0.9.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3b5383d7d0c3ea3fedf53416646da1570e1c0377037a8b294ca23cea14fe1c86", size = 6476703, upload-time = "2025-11-26T16:06:44.232Z" }, ] +[[package]] +name = "inflect" +version = "7.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, + { name = "typeguard" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/c6/943357d44a21fd995723d07ccaddd78023eace03c1846049a2645d4324a3/inflect-7.5.0.tar.gz", hash = "sha256:faf19801c3742ed5a05a8ce388e0d8fe1a07f8d095c82201eb904f5d27ad571f", size = 73751, upload-time = "2024-12-28T17:11:18.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/eb/427ed2b20a38a4ee29f24dbe4ae2dafab198674fe9a85e3d6adf9e5f5f41/inflect-7.5.0-py3-none-any.whl", hash = "sha256:2aea70e5e70c35d8350b8097396ec155ffd68def678c7ff97f51aa69c1d92344", size = 35197, upload-time = "2024-12-28T17:11:15.931Z" }, +] + [[package]] name = "iniconfig" version = "2.3.0" @@ -593,6 +729,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] +[[package]] +name = "isort" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/63/53/4f3c058e3bace40282876f9b553343376ee687f3c35a525dc79dbd450f88/isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187", size = 805049, upload-time = "2025-10-11T13:30:59.107Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/ed/e3705d6d02b4f7aea715a353c8ce193efd0b5db13e204df895d38734c244/isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1", size = 94672, upload-time = "2025-10-11T13:30:57.665Z" }, +] + [[package]] name = "jinja2" version = "3.1.6" @@ -919,6 +1064,139 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5d/c4/b2d28e9d2edf4f1713eb3c29307f1a63f3d67cf09bdda29715a36a68921a/pre_commit-4.5.0-py2.py3-none-any.whl", hash = "sha256:25e2ce09595174d9c97860a95609f9f852c0614ba602de3561e267547f2335e1", size = 226429, upload-time = "2025-11-22T21:02:40.836Z" }, ] +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146", size = 2107298, upload-time = "2025-11-04T13:39:04.116Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2", size = 1901475, upload-time = "2025-11-04T13:39:06.055Z" }, + { url = "https://files.pythonhosted.org/packages/5d/b6/338abf60225acc18cdc08b4faef592d0310923d19a87fba1faf05af5346e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97", size = 1918815, upload-time = "2025-11-04T13:39:10.41Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1c/2ed0433e682983d8e8cba9c8d8ef274d4791ec6a6f24c58935b90e780e0a/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9", size = 2065567, upload-time = "2025-11-04T13:39:12.244Z" }, + { url = "https://files.pythonhosted.org/packages/b3/24/cf84974ee7d6eae06b9e63289b7b8f6549d416b5c199ca2d7ce13bbcf619/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52", size = 2230442, upload-time = "2025-11-04T13:39:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/fd/21/4e287865504b3edc0136c89c9c09431be326168b1eb7841911cbc877a995/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941", size = 2350956, upload-time = "2025-11-04T13:39:15.889Z" }, + { url = "https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a", size = 2068253, upload-time = "2025-11-04T13:39:17.403Z" }, + { url = "https://files.pythonhosted.org/packages/d5/8c/a4abfc79604bcb4c748e18975c44f94f756f08fb04218d5cb87eb0d3a63e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c", size = 2177050, upload-time = "2025-11-04T13:39:19.351Z" }, + { url = "https://files.pythonhosted.org/packages/67/b1/de2e9a9a79b480f9cb0b6e8b6ba4c50b18d4e89852426364c66aa82bb7b3/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2", size = 2147178, upload-time = "2025-11-04T13:39:21Z" }, + { url = "https://files.pythonhosted.org/packages/16/c1/dfb33f837a47b20417500efaa0378adc6635b3c79e8369ff7a03c494b4ac/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556", size = 2341833, upload-time = "2025-11-04T13:39:22.606Z" }, + { url = "https://files.pythonhosted.org/packages/47/36/00f398642a0f4b815a9a558c4f1dca1b4020a7d49562807d7bc9ff279a6c/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49", size = 2321156, upload-time = "2025-11-04T13:39:25.843Z" }, + { url = "https://files.pythonhosted.org/packages/7e/70/cad3acd89fde2010807354d978725ae111ddf6d0ea46d1ea1775b5c1bd0c/pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba", size = 1989378, upload-time = "2025-11-04T13:39:27.92Z" }, + { url = "https://files.pythonhosted.org/packages/76/92/d338652464c6c367e5608e4488201702cd1cbb0f33f7b6a85a60fe5f3720/pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9", size = 2013622, upload-time = "2025-11-04T13:39:29.848Z" }, + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b0/1a2aa41e3b5a4ba11420aba2d091b2d17959c8d1519ece3627c371951e73/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8", size = 2103351, upload-time = "2025-11-04T13:43:02.058Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ee/31b1f0020baaf6d091c87900ae05c6aeae101fa4e188e1613c80e4f1ea31/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a", size = 1925363, upload-time = "2025-11-04T13:43:05.159Z" }, + { url = "https://files.pythonhosted.org/packages/e1/89/ab8e86208467e467a80deaca4e434adac37b10a9d134cd2f99b28a01e483/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b", size = 2135615, upload-time = "2025-11-04T13:43:08.116Z" }, + { url = "https://files.pythonhosted.org/packages/99/0a/99a53d06dd0348b2008f2f30884b34719c323f16c3be4e6cc1203b74a91d/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2", size = 2175369, upload-time = "2025-11-04T13:43:12.49Z" }, + { url = "https://files.pythonhosted.org/packages/6d/94/30ca3b73c6d485b9bb0bc66e611cff4a7138ff9736b7e66bcf0852151636/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093", size = 2144218, upload-time = "2025-11-04T13:43:15.431Z" }, + { url = "https://files.pythonhosted.org/packages/87/57/31b4f8e12680b739a91f472b5671294236b82586889ef764b5fbc6669238/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a", size = 2329951, upload-time = "2025-11-04T13:43:18.062Z" }, + { url = "https://files.pythonhosted.org/packages/7d/73/3c2c8edef77b8f7310e6fb012dbc4b8551386ed575b9eb6fb2506e28a7eb/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963", size = 2318428, upload-time = "2025-11-04T13:43:20.679Z" }, + { url = "https://files.pythonhosted.org/packages/2f/02/8559b1f26ee0d502c74f9cca5c0d2fd97e967e083e006bbbb4e97f3a043a/pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a", size = 2147009, upload-time = "2025-11-04T13:43:23.286Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + [[package]] name = "pydoc-markdown" version = "4.8.2" @@ -1036,6 +1314,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, ] +[[package]] +name = "pytokens" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/8d/a762be14dae1c3bf280202ba3172020b2b0b4c537f94427435f19c413b72/pytokens-0.3.0.tar.gz", hash = "sha256:2f932b14ed08de5fcf0b391ace2642f858f1394c0857202959000b68ed7a458a", size = 17644, upload-time = "2025-11-05T13:36:35.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/25/d9db8be44e205a124f6c98bc0324b2bb149b7431c53877fc6d1038dddaf5/pytokens-0.3.0-py3-none-any.whl", hash = "sha256:95b2b5eaf832e469d141a378872480ede3f251a5a5041b8ec6e581d3ac71bbf3", size = 12195, upload-time = "2025-11-05T13:36:33.183Z" }, +] + [[package]] name = "pyyaml" version = "6.0.3" @@ -1244,6 +1531,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d4/84/021bbeb7edb990dd6875cb6ab08d32faaa49fec63453d863730260a01f9e/typeapi-2.3.0-py3-none-any.whl", hash = "sha256:576b7dcb94412e91c5cae107a393674f8f99c10a24beb8be2302e3fed21d5cc2", size = 26858, upload-time = "2025-10-23T13:44:09.833Z" }, ] +[[package]] +name = "typeguard" +version = "4.4.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c7/68/71c1a15b5f65f40e91b65da23b8224dad41349894535a97f63a52e462196/typeguard-4.4.4.tar.gz", hash = "sha256:3a7fd2dffb705d4d0efaed4306a704c89b9dee850b688f060a8b1615a79e5f74", size = 75203, upload-time = "2025-06-18T09:56:07.624Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/a9/e3aee762739c1d7528da1c3e06d518503f8b6c439c35549b53735ba52ead/typeguard-4.4.4-py3-none-any.whl", hash = "sha256:b5f562281b6bfa1f5492470464730ef001646128b180769880468bd84b68b09e", size = 34874, upload-time = "2025-06-18T09:56:05.999Z" }, +] + [[package]] name = "types-colorama" version = "0.4.15.20250801" @@ -1262,6 +1561,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + [[package]] name = "urllib3" version = "2.6.0" diff --git a/website/src/pages/home_page_example.py b/website/src/pages/home_page_example.py index 13236b83..6b8a3fc3 100644 --- a/website/src/pages/home_page_example.py +++ b/website/src/pages/home_page_example.py @@ -13,6 +13,6 @@ async def main() -> None: return # Fetch results from the Actor run's default dataset. - dataset_client = apify_client.dataset(call_result['defaultDatasetId']) + dataset_client = apify_client.dataset(call_result.default_dataset_id) list_items_result = await dataset_client.list_items() print(f'Dataset: {list_items_result}')