diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..2975313
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,18 @@
+# macOS system files
+.DS_Store
+
+# MkDocs build artifacts
+site/
+docs/SUMMARY.md
+docs/clients/
+**/docs/client/
+
+# Virtual environments
+.venv
+venv
+
+# Python bytecode cache
+__pycache__/
+
+# Dependency lock file for uv
+uv.lock
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index c4b6a1c..2f58725 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -56,4 +56,4 @@ If you discover a potential security issue in this project we ask that you notif
## Licensing
-See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
+See the [LICENSE](https://github.com/awslabs/aws-sdk-python/blob/develop/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..b6b9c76
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,25 @@
+DOCS_PORT ?= 8000
+PYTHON_VERSION := 3.12
+
+.PHONY: docs docs-serve docs-clean docs-install venv
+
+venv:
+ uv venv --python $(PYTHON_VERSION)
+
+docs-install: venv
+ uv pip install -r requirements-docs.in
+ uv pip install -e clients/*
+
+docs-clean:
+ rm -rf site docs/clients docs/SUMMARY.md
+
+docs-generate:
+ uv run python scripts/docs/generate_all_doc_stubs.py
+ uv run python scripts/docs/generate_nav.py
+
+docs: docs-generate
+ uv run mkdocs build
+
+docs-serve:
+ @[ -d site ] || $(MAKE) docs
+ uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
\ No newline at end of file
diff --git a/README.md b/README.md
index 7902617..f7337e7 100644
--- a/README.md
+++ b/README.md
@@ -31,7 +31,7 @@ This is the preferred mechanism to give feedback so that other users can engage
## Security
-See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more information.
+See [CONTRIBUTING](https://github.com/awslabs/aws-sdk-python/blob/develop/CONTRIBUTING.md#security-issue-notifications) for more information.
## License
diff --git a/clients/aws-sdk-bedrock-runtime/Makefile b/clients/aws-sdk-bedrock-runtime/Makefile
new file mode 100644
index 0000000..43adf73
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/Makefile
@@ -0,0 +1,25 @@
+DOCS_PORT ?= 8000
+CLIENT_DIR := src/aws_sdk_bedrock_runtime
+DOCS_OUTPUT_DIR := docs/client
+PYTHON_VERSION := 3.12
+
+.PHONY: docs docs-serve docs-clean docs-install venv
+
+venv:
+ uv venv --python $(PYTHON_VERSION)
+
+docs-install: venv
+ uv sync --group docs
+
+docs-clean:
+ rm -rf site $(DOCS_OUTPUT_DIR)
+
+docs-generate:
+ uv run python scripts/docs/generate_doc_stubs.py -c $(CLIENT_DIR) -o $(DOCS_OUTPUT_DIR)
+
+docs: docs-generate
+ uv run mkdocs build
+
+docs-serve:
+ @[ -d site ] || $(MAKE) docs
+ uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
\ No newline at end of file
diff --git a/clients/aws-sdk-bedrock-runtime/docs/README.md b/clients/aws-sdk-bedrock-runtime/docs/README.md
new file mode 100644
index 0000000..c25ff76
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/docs/README.md
@@ -0,0 +1,18 @@
+## Generating Client Documentation
+
+Material for MkDocs is used for documentation. You can generate the documentation HTML
+for this client locally with the following:
+
+```bash
+# Install documentation dependencies
+make docs-install
+
+# Serve documentation locally
+make docs-serve
+
+# OR build static HTML documentation
+make docs
+
+# Clean docs artifacts
+make docs-clean
+```
diff --git a/clients/aws-sdk-bedrock-runtime/docs/hooks/copyright.py b/clients/aws-sdk-bedrock-runtime/docs/hooks/copyright.py
new file mode 100644
index 0000000..1260def
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/docs/hooks/copyright.py
@@ -0,0 +1,6 @@
+from datetime import datetime
+
+
+def on_config(config, **kwargs):
+ config.copyright = f"Copyright © {datetime.now().year}, Amazon Web Services, Inc"
+ return config
diff --git a/clients/aws-sdk-bedrock-runtime/docs/index.md b/clients/aws-sdk-bedrock-runtime/docs/index.md
new file mode 100644
index 0000000..612c7a5
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/docs/index.md
@@ -0,0 +1 @@
+--8<-- "README.md"
diff --git a/clients/aws-sdk-bedrock-runtime/docs/stylesheets/extra.css b/clients/aws-sdk-bedrock-runtime/docs/stylesheets/extra.css
new file mode 100644
index 0000000..21d1b09
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/docs/stylesheets/extra.css
@@ -0,0 +1,9 @@
+/* Custom breadcrumb styling */
+.breadcrumb {
+ font-size: 0.85em;
+ color: var(--md-default-fg-color--light);
+}
+
+p:has(span.breadcrumb) {
+margin-top: 0;
+}
diff --git a/clients/aws-sdk-bedrock-runtime/mkdocs.yml b/clients/aws-sdk-bedrock-runtime/mkdocs.yml
new file mode 100644
index 0000000..9e0024e
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/mkdocs.yml
@@ -0,0 +1,96 @@
+site_name: AWS SDK for Python - Bedrock Runtime
+site_description: Documentation for AWS Bedrock Runtime Client
+
+repo_name: awslabs/aws-sdk-python
+repo_url: https://github.com/awslabs/aws-sdk-python
+
+exclude_docs: |
+ README.md
+
+hooks:
+ - docs/hooks/copyright.py
+
+theme:
+ name: material
+ favicon: ""
+ palette:
+ # Palette toggle for automatic mode
+ - media: "(prefers-color-scheme)"
+ scheme: default
+ toggle:
+ icon: material/brightness-auto
+ name: Switch to light mode
+ primary: white
+ # Palette toggle for light mode
+ - media: "(prefers-color-scheme: light)"
+ scheme: default
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+ primary: white
+ # Palette toggle for dark mode
+ - media: "(prefers-color-scheme: dark)"
+ scheme: slate
+ toggle:
+ icon: material/brightness-4
+ name: Switch to system preference
+ primary: black
+ features:
+ - navigation.indexes
+ - navigation.instant
+ - navigation.top
+ - search.suggest
+ - search.highlight
+ - content.code.copy
+
+plugins:
+- search
+- mkdocstrings:
+ handlers:
+ python:
+ options:
+ show_source: false
+ show_signature: true
+ show_signature_annotations: true
+ show_root_heading: true
+ show_root_full_path: false
+ show_object_full_path: false
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ show_if_no_docstring: true
+ show_category_heading: true
+ group_by_category: true
+ separate_signature: true
+ signature_crossrefs: true
+ filters:
+ - "!^_"
+ - "!^deserialize"
+ - "!^serialize"
+
+markdown_extensions:
+ - pymdownx.highlight
+ - pymdownx.inlinehilite
+ - pymdownx.snippets:
+ check_paths: true
+ - pymdownx.superfences
+ - admonition
+ - def_list
+ - toc:
+ permalink: true
+ toc_depth: 3
+
+nav:
+ - Overview: index.md
+ - Client: client/index.md
+
+extra:
+ social:
+ - icon: fontawesome/brands/github
+ link: https://github.com/awslabs/aws-sdk-python
+
+extra_css:
+ - stylesheets/extra.css
+
+validation:
+ nav:
+ omitted_files: ignore
diff --git a/clients/aws-sdk-bedrock-runtime/pyproject.toml b/clients/aws-sdk-bedrock-runtime/pyproject.toml
index df2fcb4..3023696 100644
--- a/clients/aws-sdk-bedrock-runtime/pyproject.toml
+++ b/clients/aws-sdk-bedrock-runtime/pyproject.toml
@@ -34,6 +34,12 @@ test = [
"pytest-asyncio>=0.20.3,<0.21.0"
]
+docs = [
+ "mkdocs~=1.6.1",
+ "mkdocs-material==9.7.0",
+ "mkdocstrings[python]==1.0.0"
+]
+
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
diff --git a/clients/aws-sdk-bedrock-runtime/scripts/docs/generate_doc_stubs.py b/clients/aws-sdk-bedrock-runtime/scripts/docs/generate_doc_stubs.py
new file mode 100644
index 0000000..056708a
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/scripts/docs/generate_doc_stubs.py
@@ -0,0 +1,607 @@
+"""
+Generate markdown API Reference stubs for AWS SDK for Python clients.
+
+This script generates MkDocs markdown stub files for a single client package.
+It uses griffe to analyze the Python source and outputs mkdocstrings directives
+for the client, operations, models (structures, unions, enums), and errors.
+"""
+
+import argparse
+import logging
+import sys
+from collections.abc import Sequence
+from dataclasses import dataclass
+from enum import Enum
+from pathlib import Path
+from typing import TypeGuard
+
+import griffe
+from griffe import (
+ Alias,
+ Attribute,
+ Class,
+ Expr,
+ ExprBinOp,
+ ExprName,
+ ExprSubscript,
+ ExprTuple,
+ Function,
+ Module,
+ Object,
+)
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger("generate_doc_stubs")
+
+
+class StreamType(Enum):
+ """Type of event stream for operations."""
+
+ INPUT = "InputEventStream"
+ OUTPUT = "OutputEventStream"
+ DUPLEX = "DuplexEventStream"
+
+ @property
+ def description(self) -> str:
+ """Return a string description for documentation."""
+ descriptions = {
+ StreamType.INPUT: "an `InputEventStream` for client-to-server streaming",
+ StreamType.OUTPUT: "an `OutputEventStream` for server-to-client streaming",
+ StreamType.DUPLEX: "a `DuplexEventStream` for bidirectional streaming",
+ }
+ return descriptions[self]
+
+
+@dataclass(frozen=True)
+class TypeInfo:
+ """Information about a type (structure, enum, error, config, plugin)."""
+
+ name: str # e.g., "ConverseOperationOutput"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.models.ConverseOperationOutput"
+
+
+@dataclass
+class UnionInfo:
+ """Information about a union type."""
+
+ name: str
+ module_path: str
+ members: list[TypeInfo]
+
+
+@dataclass
+class OperationInfo:
+ """Information about a client operation."""
+
+ name: str
+ module_path: str
+ input: TypeInfo
+ output: TypeInfo
+ stream_type: StreamType | None
+ event_input_type: str | None # For input/duplex streams
+ event_output_type: str | None # For output/duplex streams
+
+
+@dataclass
+class ModelsInfo:
+ """Information about all modeled types."""
+
+ structures: list[TypeInfo]
+ unions: list[UnionInfo]
+ enums: list[TypeInfo]
+ errors: list[TypeInfo]
+
+
+@dataclass
+class ClientInfo:
+ """Complete information about a client package."""
+
+ name: str # e.g., "BedrockRuntimeClient"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.client.BedrockRuntimeClient"
+ package_name: str # e.g., "aws_sdk_bedrock_runtime"
+ config: TypeInfo
+ plugin: TypeInfo
+ operations: list[OperationInfo]
+ models: ModelsInfo
+
+
+class DocStubGenerator:
+ """Generate markdown API Reference stubs for AWS SDK for Python clients."""
+
+ def __init__(self, client_dir: Path, output_dir: Path) -> None:
+ """
+ Initialize the documentation generator.
+
+ Args:
+ client_dir: Path to the client source directory
+ output_dir: Path to the output directory for generated doc stubs
+ """
+ self.client_dir = client_dir
+ self.output_dir = output_dir
+ # Extract service name from package name
+ # (e.g., "aws_sdk_bedrock_runtime" -> "Bedrock Runtime")
+ self.service_name = client_dir.name.replace("aws_sdk_", "").replace("_", " ").title()
+
+ def generate(self) -> bool:
+ """
+ Generate the documentation stubs to the output directory.
+
+ Returns:
+ True if documentation was generated successfully, False otherwise.
+ """
+ logger.info(f"Generating doc stubs for {self.service_name}...")
+
+ package_name = self.client_dir.name
+ client_info = self._analyze_client_package(package_name)
+ if not self._generate_client_docs(client_info):
+ return False
+
+ logger.info(f"Finished generating doc stubs for {self.service_name}")
+ return True
+
+ def _analyze_client_package(self, package_name: str) -> ClientInfo:
+ """Analyze a client package using griffe."""
+ logger.info(f"Analyzing package: {package_name}")
+ package = griffe.load(package_name)
+
+ # Ensure required modules exist
+ required = ["client", "config", "models"]
+ missing = [name for name in required if not package.modules.get(name)]
+ if missing:
+ raise ValueError(f"Missing required modules in {package_name}: {', '.join(missing)}")
+
+ # Parse submodules
+ client_module = package.modules["client"]
+ config_module = package.modules["config"]
+ models_module = package.modules["models"]
+
+ client_class = self._find_class_with_suffix(client_module, "Client")
+ if not client_class:
+ raise ValueError(f"No class ending with 'Client' found in {package_name}.client")
+
+ config_class = config_module.members.get("Config")
+ plugin_alias = config_module.members.get("Plugin")
+ if not config_class or not plugin_alias:
+ raise ValueError(f"Missing Config or Plugin in {package_name}.config")
+
+ config = TypeInfo(name=config_class.name, module_path=config_class.path)
+ plugin = TypeInfo(name=plugin_alias.name, module_path=plugin_alias.path)
+
+ operations = self._extract_operations(client_class)
+ models = self._extract_models(models_module, operations)
+
+ logger.info(
+ f"Analyzed {client_class.name}: {len(operations)} operations, "
+ f"{len(models.structures)} structures, {len(models.errors)} errors, "
+ f"{len(models.unions)} unions, {len(models.enums)} enums"
+ )
+
+ return ClientInfo(
+ name=client_class.name,
+ module_path=client_class.path,
+ package_name=package_name,
+ config=config,
+ plugin=plugin,
+ operations=operations,
+ models=models,
+ )
+
+ def _find_class_with_suffix(self, module: Module, suffix: str) -> Class | None:
+ """Find the class in the module with a matching suffix."""
+ for cls in module.classes.values():
+ if cls.name.endswith(suffix):
+ return cls
+ return None
+
+ def _extract_operations(self, client_class: Class) -> list[OperationInfo]:
+ """Extract operation information from client class."""
+ operations = []
+ for op in client_class.functions.values():
+ if op.is_private or op.is_init_method:
+ continue
+ operations.append(self._analyze_operation(op))
+ return operations
+
+ def _analyze_operation(self, operation: Function) -> OperationInfo:
+ """Analyze an operation method to extract information."""
+ stream_type = None
+ event_input_type = None
+ event_output_type = None
+
+ input_param = operation.parameters["input"]
+ input_annotation = self._get_expr(
+ input_param.annotation, f"'{operation.name}' input annotation"
+ )
+ input_info = TypeInfo(
+ name=input_annotation.canonical_name,
+ module_path=input_annotation.canonical_path,
+ )
+
+ returns = self._get_expr(operation.returns, f"'{operation.name}' return type")
+ output_type = returns.canonical_name
+ stream_type_map = {s.value: s for s in StreamType}
+
+ if output_type in stream_type_map:
+ stream_type = stream_type_map[output_type]
+ stream_args = self._get_subscript_elements(returns, f"'{operation.name}' stream type")
+
+ if stream_type in (StreamType.INPUT, StreamType.DUPLEX):
+ event_input_type = stream_args[0].canonical_name
+ if stream_type in (StreamType.OUTPUT, StreamType.DUPLEX):
+ idx = 1 if stream_type == StreamType.DUPLEX else 0
+ event_output_type = stream_args[idx].canonical_name
+
+ output_info = TypeInfo(
+ name=stream_args[-1].canonical_name, module_path=stream_args[-1].canonical_path
+ )
+ else:
+ output_info = TypeInfo(name=output_type, module_path=returns.canonical_path)
+
+ return OperationInfo(
+ name=operation.name,
+ module_path=operation.path,
+ input=input_info,
+ output=output_info,
+ stream_type=stream_type,
+ event_input_type=event_input_type,
+ event_output_type=event_output_type,
+ )
+
+ def _get_expr(self, annotation: str | Expr | None, context: str) -> Expr:
+ """Extract and validate an Expr from an annotation."""
+ if not isinstance(annotation, Expr):
+ raise TypeError(f"{context}: expected Expr, got {type(annotation).__name__}")
+ return annotation
+
+ def _get_subscript_elements(self, expr: Expr, context: str) -> list[Expr]:
+ """Extract type arguments from a subscript expression like Generic[A, B, C]."""
+ if not isinstance(expr, ExprSubscript):
+ raise TypeError(f"{context}: expected subscript, got {type(expr).__name__}")
+ slice_expr = expr.slice
+ if isinstance(slice_expr, str):
+ raise TypeError(f"{context}: unexpected string slice '{slice_expr}'")
+ if isinstance(slice_expr, ExprTuple):
+ return [el for el in slice_expr.elements if isinstance(el, Expr)]
+ return [slice_expr]
+
+ def _extract_models(self, models_module: Module, operations: list[OperationInfo]) -> ModelsInfo:
+ """Extract structures, unions, enums, and errors from models module."""
+ structures, unions, enums, errors = [], [], [], []
+
+ for member in models_module.members.values():
+ # Skip imported and private members
+ if member.is_imported or member.is_private:
+ continue
+
+ if self._is_union(member):
+ unions.append(
+ UnionInfo(
+ name=member.name,
+ module_path=member.path,
+ members=self._extract_union_members(member, models_module),
+ )
+ )
+ elif self._is_enum(member):
+ enums.append(TypeInfo(name=member.name, module_path=member.path))
+ elif self._is_error(member):
+ errors.append(TypeInfo(name=member.name, module_path=member.path))
+ elif member.is_class:
+ structures.append(TypeInfo(name=member.name, module_path=member.path))
+
+ duplicates = set()
+ for structure in structures:
+ if self._is_operation_io_type(structure.name, operations) or self._is_union_member(
+ structure.name, unions
+ ):
+ duplicates.add(structure)
+
+ structures = [struct for struct in structures if struct not in duplicates]
+
+ return ModelsInfo(structures=structures, unions=unions, enums=enums, errors=errors)
+
+ def _is_union(self, member: Object | Alias) -> TypeGuard[Attribute]:
+ """Check if a module member is a union type."""
+ if not isinstance(member, Attribute):
+ return False
+
+ value = member.value
+ # Check for Union[...] syntax
+ if isinstance(value, ExprSubscript):
+ left = value.left
+ if isinstance(left, ExprName) and left.name == "Union":
+ return True
+
+ # Check for PEP 604 (X | Y) syntax
+ if isinstance(value, ExprBinOp):
+ return True
+
+ return False
+
+ def _extract_union_members(
+ self, union_attr: Attribute, models_module: Module
+ ) -> list[TypeInfo]:
+ """Extract member types from a union."""
+ members = []
+ value_str = str(union_attr.value)
+
+ # Clean up value_str for Union[X | Y | Z] syntax
+ if value_str.startswith("Union[") and value_str.endswith("]"):
+ value_str = value_str.removeprefix("Union[").removesuffix("]")
+
+ member_names = [member.strip() for member in value_str.split("|")]
+
+ for name in member_names:
+ if not (member_object := models_module.members.get(name)):
+ raise ValueError(f"Union member '{name}' not found in models module")
+ members.append(TypeInfo(name=member_object.name, module_path=member_object.path))
+
+ return members
+
+ def _is_enum(self, member: Object | Alias) -> TypeGuard[Class]:
+ """Check if a module member is an enum."""
+ if not isinstance(member, Class):
+ return False
+ return any(
+ isinstance(base, ExprName) and base.name in ("StrEnum", "IntEnum")
+ for base in member.bases
+ )
+
+ def _is_error(self, member: Object | Alias) -> TypeGuard[Class]:
+ """Check if a module member is an error."""
+ if not isinstance(member, Class):
+ return False
+ return any(
+ isinstance(base, ExprName) and base.name in ("ServiceError", "ModeledError")
+ for base in member.bases
+ )
+
+ def _is_operation_io_type(self, type_name: str, operations: list[OperationInfo]) -> bool:
+ """Check if a type is used as operation input/output."""
+ return any(type_name in (op.input.name, op.output.name) for op in operations)
+
+ def _is_union_member(self, type_name: str, unions: list[UnionInfo]) -> bool:
+ """Check if a type is used as union member."""
+ return any(type_name == m.name for u in unions for m in u.members)
+
+ def _generate_client_docs(self, client_info: ClientInfo) -> bool:
+ """Generate all documentation files for a client."""
+ logger.info(f"Writing doc stubs to {self.output_dir}...")
+
+ try:
+ self._generate_index(client_info)
+ self._generate_operation_stubs(client_info.operations)
+ self._generate_type_stubs(
+ client_info.models.structures, "structures", "Structure Class"
+ )
+ self._generate_type_stubs(client_info.models.errors, "errors", "Error Class")
+ self._generate_type_stubs(
+ client_info.models.enums, "enums", "Enum Class", ["members: true"]
+ )
+ self._generate_union_stubs(client_info.models.unions)
+ except OSError as e:
+ logger.error(f"Failed to write documentation files: {e}")
+ return False
+ return True
+
+ def _generate_index(self, client_info: ClientInfo) -> None:
+ """Generate the main index.md file."""
+ lines = []
+ lines.append(f"# {self.service_name}")
+ lines.append("")
+ lines.append("## Client")
+ lines.append("")
+ lines.append(f"::: {client_info.module_path}")
+ lines.append(" options:")
+ lines.append(" members: false")
+ lines.append(" heading_level: 3")
+ lines.append(" merge_init_into_class: true")
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+ lines.append("")
+
+ # Operations section
+ if client_info.operations:
+ lines.append("## Operations")
+ lines.append("")
+ for op in sorted(client_info.operations, key=lambda x: x.name):
+ lines.append(f"- [`{op.name}`](operations/{op.name}.md)")
+ lines.append("")
+
+ # Configuration section
+ lines.append("## Configuration")
+ lines.append("")
+ lines.append(f"::: {client_info.config.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append(" merge_init_into_class: true")
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+ lines.append("")
+ lines.append(f"::: {client_info.plugin.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ models = client_info.models
+
+ # Model sections
+ sections: list[tuple[str, str, Sequence[TypeInfo | UnionInfo]]] = [
+ ("Structures", "structures", models.structures),
+ ("Errors", "errors", models.errors),
+ ("Unions", "unions", models.unions),
+ ("Enums", "enums", models.enums),
+ ]
+ for title, folder, items in sections:
+ if items:
+ lines.append("")
+ lines.append(f"## {title}")
+ lines.append("")
+ for item in sorted(items, key=lambda x: x.name):
+ lines.append(f"- [`{item.name}`]({folder}/{item.name}.md)")
+
+ output_path = self.output_dir / "index.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ content = "\n".join(lines)
+ output_path.write_text(content)
+
+ logger.info("Wrote client index file!")
+
+ def _generate_operation_stubs(self, operations: list[OperationInfo]) -> None:
+ """Generate operation documentation files."""
+ for op in operations:
+ lines = []
+ lines.append(f"# {op.name}")
+ lines.append("")
+
+ # Operation section
+ lines.append("## Operation")
+ lines.append("")
+ lines.append(f"::: {op.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Input section
+ lines.append("## Input")
+ lines.append("")
+ lines.append(f"::: {op.input.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Output section - handle all stream types
+ lines.append("## Output")
+ lines.append("")
+
+ if op.stream_type:
+ lines.append(f"This operation returns {op.stream_type.description}.")
+ lines.append("")
+ lines.append("### Event Stream Structure")
+ lines.append("")
+
+ if op.event_input_type:
+ lines.append("#### Input Event Type")
+ lines.append("")
+ lines.append(f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)")
+ lines.append("")
+ if op.event_output_type:
+ lines.append("#### Output Event Type")
+ lines.append("")
+ lines.append(f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)")
+ lines.append("")
+
+ lines.append("### Initial Response Structure")
+ lines.append("")
+ lines.append(f"::: {op.output.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 4")
+ else:
+ lines.append(f"::: {op.output.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ output_path = self.output_dir / "operations" / f"{op.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb("Operations", op.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(operations)} operation files")
+
+ def _generate_type_stubs(
+ self,
+ items: list[TypeInfo],
+ category: str,
+ section_title: str,
+ extra_options: list[str] | None = None,
+ ) -> None:
+ """Generate documentation files for a category of types."""
+ for item in items:
+ lines = [
+ f"# {item.name}",
+ "",
+ f"## {section_title}",
+ f"::: {item.module_path}",
+ " options:",
+ " heading_level: 3",
+ ]
+ if extra_options:
+ lines.extend(f" {opt}" for opt in extra_options)
+
+ output_path = self.output_dir / category / f"{item.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb(category.title(), item.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(items)} {category} files")
+
+ def _generate_union_stubs(self, unions: list[UnionInfo]) -> None:
+ """Generate union documentation files."""
+ for union in unions:
+ lines = []
+ lines.append(f"# {union.name}")
+ lines.append("")
+ lines.append("## Union Type")
+ lines.append(f"::: {union.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Add union members
+ if union.members:
+ lines.append("## Union Member Types")
+ for member in union.members:
+ lines.append("")
+ lines.append(f"::: {member.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ output_path = self.output_dir / "unions" / f"{union.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb("Unions", union.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(unions)} union files")
+
+ def _breadcrumb(self, category: str, name: str) -> str:
+ """Generate a breadcrumb navigation element."""
+ separator = " > "
+ home = f"[{self.service_name}](../index.md)"
+ section = f"[{category}](../index.md#{category.lower()})"
+ return f'{home}{separator}{section}{separator}{name}\n'
+
+
+def main() -> int:
+ """Main entry point for the single-client documentation generator."""
+ parser = argparse.ArgumentParser(
+ description="Generate API documentation stubs for AWS SDK Python client."
+ )
+ parser.add_argument(
+ "-c", "--client-dir", type=Path, required=True, help="Path to the client source package"
+ )
+ parser.add_argument(
+ "-o",
+ "--output-dir",
+ type=Path,
+ required=True,
+ help="Output directory for generated doc stubs",
+ )
+
+ args = parser.parse_args()
+ client_dir = args.client_dir.resolve()
+ output_dir = args.output_dir.resolve()
+
+ if not client_dir.exists():
+ logger.error(f"Client directory not found: {client_dir}")
+ return 1
+
+ try:
+ generator = DocStubGenerator(client_dir, output_dir)
+ success = generator.generate()
+ return 0 if success else 1
+ except Exception as e:
+ logger.error(f"Unexpected error generating doc stubs: {e}", exc_info=True)
+ return 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/Makefile b/clients/aws-sdk-sagemaker-runtime-http2/Makefile
new file mode 100644
index 0000000..44f9df9
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/Makefile
@@ -0,0 +1,25 @@
+DOCS_PORT ?= 8000
+CLIENT_DIR := src/aws_sdk_sagemaker_runtime_http2
+DOCS_OUTPUT_DIR := docs/client
+PYTHON_VERSION := 3.12
+
+.PHONY: docs docs-serve docs-clean docs-install venv
+
+venv:
+ uv venv --python $(PYTHON_VERSION)
+
+docs-install: venv
+ uv sync --group docs
+
+docs-clean:
+ rm -rf site $(DOCS_OUTPUT_DIR)
+
+docs-generate:
+ uv run python scripts/docs/generate_doc_stubs.py -c $(CLIENT_DIR) -o $(DOCS_OUTPUT_DIR)
+
+docs: docs-generate
+ uv run mkdocs build
+
+docs-serve:
+ @[ -d site ] || $(MAKE) docs
+ uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
\ No newline at end of file
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/README.md b/clients/aws-sdk-sagemaker-runtime-http2/README.md
index fa1cda1..8d7c485 100644
--- a/clients/aws-sdk-sagemaker-runtime-http2/README.md
+++ b/clients/aws-sdk-sagemaker-runtime-http2/README.md
@@ -9,6 +9,6 @@ Changes may result in breaking changes prior to the release of version
Documentation is available in the `/docs` directory of this package.
Pages can be built into portable HTML files for the time being. You can
-follow the instructions in the docs [README.md](https://github.com/awslabs/aws-sdk-python/blob/main/clients/aws-sdk-sagemaker-runtime-http/docs/README.md).
+follow the instructions in the docs [README.md](https://github.com/awslabs/aws-sdk-python/blob/main/clients/aws-sdk-sagemaker-runtime-http2/docs/README.md).
For high-level documentation, you can view the [`dev-guide`](https://github.com/awslabs/aws-sdk-python/tree/main/dev-guide) at the top level of this repo.
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/README.md b/clients/aws-sdk-sagemaker-runtime-http2/docs/README.md
new file mode 100644
index 0000000..c25ff76
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/README.md
@@ -0,0 +1,18 @@
+## Generating Client Documentation
+
+Material for MkDocs is used for documentation. You can generate the documentation HTML
+for this client locally with the following:
+
+```bash
+# Install documentation dependencies
+make docs-install
+
+# Serve documentation locally
+make docs-serve
+
+# OR build static HTML documentation
+make docs
+
+# Clean docs artifacts
+make docs-clean
+```
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/hooks/copyright.py b/clients/aws-sdk-sagemaker-runtime-http2/docs/hooks/copyright.py
new file mode 100644
index 0000000..1260def
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/hooks/copyright.py
@@ -0,0 +1,6 @@
+from datetime import datetime
+
+
+def on_config(config, **kwargs):
+ config.copyright = f"Copyright © {datetime.now().year}, Amazon Web Services, Inc"
+ return config
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/index.md b/clients/aws-sdk-sagemaker-runtime-http2/docs/index.md
new file mode 100644
index 0000000..612c7a5
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/index.md
@@ -0,0 +1 @@
+--8<-- "README.md"
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/stylesheets/extra.css b/clients/aws-sdk-sagemaker-runtime-http2/docs/stylesheets/extra.css
new file mode 100644
index 0000000..21d1b09
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/stylesheets/extra.css
@@ -0,0 +1,9 @@
+/* Custom breadcrumb styling */
+.breadcrumb {
+ font-size: 0.85em;
+ color: var(--md-default-fg-color--light);
+}
+
+p:has(span.breadcrumb) {
+margin-top: 0;
+}
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/mkdocs.yml b/clients/aws-sdk-sagemaker-runtime-http2/mkdocs.yml
new file mode 100644
index 0000000..aeb4186
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/mkdocs.yml
@@ -0,0 +1,96 @@
+site_name: AWS SDK for Python - Sagemaker Runtime Http2
+site_description: Documentation for AWS Sagemaker Runtime Http2 Client
+
+repo_name: awslabs/aws-sdk-python
+repo_url: https://github.com/awslabs/aws-sdk-python
+
+exclude_docs: |
+ README.md
+
+hooks:
+ - docs/hooks/copyright.py
+
+theme:
+ name: material
+ favicon: ""
+ palette:
+ # Palette toggle for automatic mode
+ - media: "(prefers-color-scheme)"
+ scheme: default
+ toggle:
+ icon: material/brightness-auto
+ name: Switch to light mode
+ primary: white
+ # Palette toggle for light mode
+ - media: "(prefers-color-scheme: light)"
+ scheme: default
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+ primary: white
+ # Palette toggle for dark mode
+ - media: "(prefers-color-scheme: dark)"
+ scheme: slate
+ toggle:
+ icon: material/brightness-4
+ name: Switch to system preference
+ primary: black
+ features:
+ - navigation.indexes
+ - navigation.instant
+ - navigation.top
+ - search.suggest
+ - search.highlight
+ - content.code.copy
+
+plugins:
+- search
+- mkdocstrings:
+ handlers:
+ python:
+ options:
+ show_source: false
+ show_signature: true
+ show_signature_annotations: true
+ show_root_heading: true
+ show_root_full_path: false
+ show_object_full_path: false
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ show_if_no_docstring: true
+ show_category_heading: true
+ group_by_category: true
+ separate_signature: true
+ signature_crossrefs: true
+ filters:
+ - "!^_"
+ - "!^deserialize"
+ - "!^serialize"
+
+markdown_extensions:
+ - pymdownx.highlight
+ - pymdownx.inlinehilite
+ - pymdownx.snippets:
+ check_paths: true
+ - pymdownx.superfences
+ - admonition
+ - def_list
+ - toc:
+ permalink: true
+ toc_depth: 3
+
+nav:
+ - Overview: index.md
+ - Client: client/index.md
+
+extra:
+ social:
+ - icon: fontawesome/brands/github
+ link: https://github.com/awslabs/aws-sdk-python
+
+extra_css:
+ - stylesheets/extra.css
+
+validation:
+ nav:
+ omitted_files: ignore
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml b/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml
index 3b1e6e4..f0ba4c0 100644
--- a/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml
+++ b/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml
@@ -33,6 +33,11 @@ test = [
"pytest>=7.2.0,<8.0.0",
"pytest-asyncio>=0.20.3,<0.21.0"
]
+docs = [
+ "mkdocs~=1.6.1",
+ "mkdocs-material==9.7.0",
+ "mkdocstrings[python]==1.0.0"
+]
[build-system]
requires = ["hatchling"]
@@ -57,5 +62,4 @@ ignore = ["F841"]
skip-magic-trailing-comma = true
[tool.pytest.ini_options]
-python_classes = ["!Test"]
-asyncio_mode = "auto"
+# python_classes = ["!Test"]
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/scripts/docs/generate_doc_stubs.py b/clients/aws-sdk-sagemaker-runtime-http2/scripts/docs/generate_doc_stubs.py
new file mode 100644
index 0000000..056708a
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/scripts/docs/generate_doc_stubs.py
@@ -0,0 +1,607 @@
+"""
+Generate markdown API Reference stubs for AWS SDK for Python clients.
+
+This script generates MkDocs markdown stub files for a single client package.
+It uses griffe to analyze the Python source and outputs mkdocstrings directives
+for the client, operations, models (structures, unions, enums), and errors.
+"""
+
+import argparse
+import logging
+import sys
+from collections.abc import Sequence
+from dataclasses import dataclass
+from enum import Enum
+from pathlib import Path
+from typing import TypeGuard
+
+import griffe
+from griffe import (
+ Alias,
+ Attribute,
+ Class,
+ Expr,
+ ExprBinOp,
+ ExprName,
+ ExprSubscript,
+ ExprTuple,
+ Function,
+ Module,
+ Object,
+)
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger("generate_doc_stubs")
+
+
+class StreamType(Enum):
+ """Type of event stream for operations."""
+
+ INPUT = "InputEventStream"
+ OUTPUT = "OutputEventStream"
+ DUPLEX = "DuplexEventStream"
+
+ @property
+ def description(self) -> str:
+ """Return a string description for documentation."""
+ descriptions = {
+ StreamType.INPUT: "an `InputEventStream` for client-to-server streaming",
+ StreamType.OUTPUT: "an `OutputEventStream` for server-to-client streaming",
+ StreamType.DUPLEX: "a `DuplexEventStream` for bidirectional streaming",
+ }
+ return descriptions[self]
+
+
+@dataclass(frozen=True)
+class TypeInfo:
+ """Information about a type (structure, enum, error, config, plugin)."""
+
+ name: str # e.g., "ConverseOperationOutput"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.models.ConverseOperationOutput"
+
+
+@dataclass
+class UnionInfo:
+ """Information about a union type."""
+
+ name: str
+ module_path: str
+ members: list[TypeInfo]
+
+
+@dataclass
+class OperationInfo:
+ """Information about a client operation."""
+
+ name: str
+ module_path: str
+ input: TypeInfo
+ output: TypeInfo
+ stream_type: StreamType | None
+ event_input_type: str | None # For input/duplex streams
+ event_output_type: str | None # For output/duplex streams
+
+
+@dataclass
+class ModelsInfo:
+ """Information about all modeled types."""
+
+ structures: list[TypeInfo]
+ unions: list[UnionInfo]
+ enums: list[TypeInfo]
+ errors: list[TypeInfo]
+
+
+@dataclass
+class ClientInfo:
+ """Complete information about a client package."""
+
+ name: str # e.g., "BedrockRuntimeClient"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.client.BedrockRuntimeClient"
+ package_name: str # e.g., "aws_sdk_bedrock_runtime"
+ config: TypeInfo
+ plugin: TypeInfo
+ operations: list[OperationInfo]
+ models: ModelsInfo
+
+
+class DocStubGenerator:
+ """Generate markdown API Reference stubs for AWS SDK for Python clients."""
+
+ def __init__(self, client_dir: Path, output_dir: Path) -> None:
+ """
+ Initialize the documentation generator.
+
+ Args:
+ client_dir: Path to the client source directory
+ output_dir: Path to the output directory for generated doc stubs
+ """
+ self.client_dir = client_dir
+ self.output_dir = output_dir
+ # Extract service name from package name
+ # (e.g., "aws_sdk_bedrock_runtime" -> "Bedrock Runtime")
+ self.service_name = client_dir.name.replace("aws_sdk_", "").replace("_", " ").title()
+
+ def generate(self) -> bool:
+ """
+ Generate the documentation stubs to the output directory.
+
+ Returns:
+ True if documentation was generated successfully, False otherwise.
+ """
+ logger.info(f"Generating doc stubs for {self.service_name}...")
+
+ package_name = self.client_dir.name
+ client_info = self._analyze_client_package(package_name)
+ if not self._generate_client_docs(client_info):
+ return False
+
+ logger.info(f"Finished generating doc stubs for {self.service_name}")
+ return True
+
+ def _analyze_client_package(self, package_name: str) -> ClientInfo:
+ """Analyze a client package using griffe."""
+ logger.info(f"Analyzing package: {package_name}")
+ package = griffe.load(package_name)
+
+ # Ensure required modules exist
+ required = ["client", "config", "models"]
+ missing = [name for name in required if not package.modules.get(name)]
+ if missing:
+ raise ValueError(f"Missing required modules in {package_name}: {', '.join(missing)}")
+
+ # Parse submodules
+ client_module = package.modules["client"]
+ config_module = package.modules["config"]
+ models_module = package.modules["models"]
+
+ client_class = self._find_class_with_suffix(client_module, "Client")
+ if not client_class:
+ raise ValueError(f"No class ending with 'Client' found in {package_name}.client")
+
+ config_class = config_module.members.get("Config")
+ plugin_alias = config_module.members.get("Plugin")
+ if not config_class or not plugin_alias:
+ raise ValueError(f"Missing Config or Plugin in {package_name}.config")
+
+ config = TypeInfo(name=config_class.name, module_path=config_class.path)
+ plugin = TypeInfo(name=plugin_alias.name, module_path=plugin_alias.path)
+
+ operations = self._extract_operations(client_class)
+ models = self._extract_models(models_module, operations)
+
+ logger.info(
+ f"Analyzed {client_class.name}: {len(operations)} operations, "
+ f"{len(models.structures)} structures, {len(models.errors)} errors, "
+ f"{len(models.unions)} unions, {len(models.enums)} enums"
+ )
+
+ return ClientInfo(
+ name=client_class.name,
+ module_path=client_class.path,
+ package_name=package_name,
+ config=config,
+ plugin=plugin,
+ operations=operations,
+ models=models,
+ )
+
+ def _find_class_with_suffix(self, module: Module, suffix: str) -> Class | None:
+ """Find the class in the module with a matching suffix."""
+ for cls in module.classes.values():
+ if cls.name.endswith(suffix):
+ return cls
+ return None
+
+ def _extract_operations(self, client_class: Class) -> list[OperationInfo]:
+ """Extract operation information from client class."""
+ operations = []
+ for op in client_class.functions.values():
+ if op.is_private or op.is_init_method:
+ continue
+ operations.append(self._analyze_operation(op))
+ return operations
+
+ def _analyze_operation(self, operation: Function) -> OperationInfo:
+ """Analyze an operation method to extract information."""
+ stream_type = None
+ event_input_type = None
+ event_output_type = None
+
+ input_param = operation.parameters["input"]
+ input_annotation = self._get_expr(
+ input_param.annotation, f"'{operation.name}' input annotation"
+ )
+ input_info = TypeInfo(
+ name=input_annotation.canonical_name,
+ module_path=input_annotation.canonical_path,
+ )
+
+ returns = self._get_expr(operation.returns, f"'{operation.name}' return type")
+ output_type = returns.canonical_name
+ stream_type_map = {s.value: s for s in StreamType}
+
+ if output_type in stream_type_map:
+ stream_type = stream_type_map[output_type]
+ stream_args = self._get_subscript_elements(returns, f"'{operation.name}' stream type")
+
+ if stream_type in (StreamType.INPUT, StreamType.DUPLEX):
+ event_input_type = stream_args[0].canonical_name
+ if stream_type in (StreamType.OUTPUT, StreamType.DUPLEX):
+ idx = 1 if stream_type == StreamType.DUPLEX else 0
+ event_output_type = stream_args[idx].canonical_name
+
+ output_info = TypeInfo(
+ name=stream_args[-1].canonical_name, module_path=stream_args[-1].canonical_path
+ )
+ else:
+ output_info = TypeInfo(name=output_type, module_path=returns.canonical_path)
+
+ return OperationInfo(
+ name=operation.name,
+ module_path=operation.path,
+ input=input_info,
+ output=output_info,
+ stream_type=stream_type,
+ event_input_type=event_input_type,
+ event_output_type=event_output_type,
+ )
+
+ def _get_expr(self, annotation: str | Expr | None, context: str) -> Expr:
+ """Extract and validate an Expr from an annotation."""
+ if not isinstance(annotation, Expr):
+ raise TypeError(f"{context}: expected Expr, got {type(annotation).__name__}")
+ return annotation
+
+ def _get_subscript_elements(self, expr: Expr, context: str) -> list[Expr]:
+ """Extract type arguments from a subscript expression like Generic[A, B, C]."""
+ if not isinstance(expr, ExprSubscript):
+ raise TypeError(f"{context}: expected subscript, got {type(expr).__name__}")
+ slice_expr = expr.slice
+ if isinstance(slice_expr, str):
+ raise TypeError(f"{context}: unexpected string slice '{slice_expr}'")
+ if isinstance(slice_expr, ExprTuple):
+ return [el for el in slice_expr.elements if isinstance(el, Expr)]
+ return [slice_expr]
+
+ def _extract_models(self, models_module: Module, operations: list[OperationInfo]) -> ModelsInfo:
+ """Extract structures, unions, enums, and errors from models module."""
+ structures, unions, enums, errors = [], [], [], []
+
+ for member in models_module.members.values():
+ # Skip imported and private members
+ if member.is_imported or member.is_private:
+ continue
+
+ if self._is_union(member):
+ unions.append(
+ UnionInfo(
+ name=member.name,
+ module_path=member.path,
+ members=self._extract_union_members(member, models_module),
+ )
+ )
+ elif self._is_enum(member):
+ enums.append(TypeInfo(name=member.name, module_path=member.path))
+ elif self._is_error(member):
+ errors.append(TypeInfo(name=member.name, module_path=member.path))
+ elif member.is_class:
+ structures.append(TypeInfo(name=member.name, module_path=member.path))
+
+ duplicates = set()
+ for structure in structures:
+ if self._is_operation_io_type(structure.name, operations) or self._is_union_member(
+ structure.name, unions
+ ):
+ duplicates.add(structure)
+
+ structures = [struct for struct in structures if struct not in duplicates]
+
+ return ModelsInfo(structures=structures, unions=unions, enums=enums, errors=errors)
+
+ def _is_union(self, member: Object | Alias) -> TypeGuard[Attribute]:
+ """Check if a module member is a union type."""
+ if not isinstance(member, Attribute):
+ return False
+
+ value = member.value
+ # Check for Union[...] syntax
+ if isinstance(value, ExprSubscript):
+ left = value.left
+ if isinstance(left, ExprName) and left.name == "Union":
+ return True
+
+ # Check for PEP 604 (X | Y) syntax
+ if isinstance(value, ExprBinOp):
+ return True
+
+ return False
+
+ def _extract_union_members(
+ self, union_attr: Attribute, models_module: Module
+ ) -> list[TypeInfo]:
+ """Extract member types from a union."""
+ members = []
+ value_str = str(union_attr.value)
+
+ # Clean up value_str for Union[X | Y | Z] syntax
+ if value_str.startswith("Union[") and value_str.endswith("]"):
+ value_str = value_str.removeprefix("Union[").removesuffix("]")
+
+ member_names = [member.strip() for member in value_str.split("|")]
+
+ for name in member_names:
+ if not (member_object := models_module.members.get(name)):
+ raise ValueError(f"Union member '{name}' not found in models module")
+ members.append(TypeInfo(name=member_object.name, module_path=member_object.path))
+
+ return members
+
+ def _is_enum(self, member: Object | Alias) -> TypeGuard[Class]:
+ """Check if a module member is an enum."""
+ if not isinstance(member, Class):
+ return False
+ return any(
+ isinstance(base, ExprName) and base.name in ("StrEnum", "IntEnum")
+ for base in member.bases
+ )
+
+ def _is_error(self, member: Object | Alias) -> TypeGuard[Class]:
+ """Check if a module member is an error."""
+ if not isinstance(member, Class):
+ return False
+ return any(
+ isinstance(base, ExprName) and base.name in ("ServiceError", "ModeledError")
+ for base in member.bases
+ )
+
+ def _is_operation_io_type(self, type_name: str, operations: list[OperationInfo]) -> bool:
+ """Check if a type is used as operation input/output."""
+ return any(type_name in (op.input.name, op.output.name) for op in operations)
+
+ def _is_union_member(self, type_name: str, unions: list[UnionInfo]) -> bool:
+ """Check if a type is used as union member."""
+ return any(type_name == m.name for u in unions for m in u.members)
+
+ def _generate_client_docs(self, client_info: ClientInfo) -> bool:
+ """Generate all documentation files for a client."""
+ logger.info(f"Writing doc stubs to {self.output_dir}...")
+
+ try:
+ self._generate_index(client_info)
+ self._generate_operation_stubs(client_info.operations)
+ self._generate_type_stubs(
+ client_info.models.structures, "structures", "Structure Class"
+ )
+ self._generate_type_stubs(client_info.models.errors, "errors", "Error Class")
+ self._generate_type_stubs(
+ client_info.models.enums, "enums", "Enum Class", ["members: true"]
+ )
+ self._generate_union_stubs(client_info.models.unions)
+ except OSError as e:
+ logger.error(f"Failed to write documentation files: {e}")
+ return False
+ return True
+
+ def _generate_index(self, client_info: ClientInfo) -> None:
+ """Generate the main index.md file."""
+ lines = []
+ lines.append(f"# {self.service_name}")
+ lines.append("")
+ lines.append("## Client")
+ lines.append("")
+ lines.append(f"::: {client_info.module_path}")
+ lines.append(" options:")
+ lines.append(" members: false")
+ lines.append(" heading_level: 3")
+ lines.append(" merge_init_into_class: true")
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+ lines.append("")
+
+ # Operations section
+ if client_info.operations:
+ lines.append("## Operations")
+ lines.append("")
+ for op in sorted(client_info.operations, key=lambda x: x.name):
+ lines.append(f"- [`{op.name}`](operations/{op.name}.md)")
+ lines.append("")
+
+ # Configuration section
+ lines.append("## Configuration")
+ lines.append("")
+ lines.append(f"::: {client_info.config.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append(" merge_init_into_class: true")
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+ lines.append("")
+ lines.append(f"::: {client_info.plugin.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ models = client_info.models
+
+ # Model sections
+ sections: list[tuple[str, str, Sequence[TypeInfo | UnionInfo]]] = [
+ ("Structures", "structures", models.structures),
+ ("Errors", "errors", models.errors),
+ ("Unions", "unions", models.unions),
+ ("Enums", "enums", models.enums),
+ ]
+ for title, folder, items in sections:
+ if items:
+ lines.append("")
+ lines.append(f"## {title}")
+ lines.append("")
+ for item in sorted(items, key=lambda x: x.name):
+ lines.append(f"- [`{item.name}`]({folder}/{item.name}.md)")
+
+ output_path = self.output_dir / "index.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ content = "\n".join(lines)
+ output_path.write_text(content)
+
+ logger.info("Wrote client index file!")
+
+ def _generate_operation_stubs(self, operations: list[OperationInfo]) -> None:
+ """Generate operation documentation files."""
+ for op in operations:
+ lines = []
+ lines.append(f"# {op.name}")
+ lines.append("")
+
+ # Operation section
+ lines.append("## Operation")
+ lines.append("")
+ lines.append(f"::: {op.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Input section
+ lines.append("## Input")
+ lines.append("")
+ lines.append(f"::: {op.input.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Output section - handle all stream types
+ lines.append("## Output")
+ lines.append("")
+
+ if op.stream_type:
+ lines.append(f"This operation returns {op.stream_type.description}.")
+ lines.append("")
+ lines.append("### Event Stream Structure")
+ lines.append("")
+
+ if op.event_input_type:
+ lines.append("#### Input Event Type")
+ lines.append("")
+ lines.append(f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)")
+ lines.append("")
+ if op.event_output_type:
+ lines.append("#### Output Event Type")
+ lines.append("")
+ lines.append(f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)")
+ lines.append("")
+
+ lines.append("### Initial Response Structure")
+ lines.append("")
+ lines.append(f"::: {op.output.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 4")
+ else:
+ lines.append(f"::: {op.output.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ output_path = self.output_dir / "operations" / f"{op.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb("Operations", op.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(operations)} operation files")
+
+ def _generate_type_stubs(
+ self,
+ items: list[TypeInfo],
+ category: str,
+ section_title: str,
+ extra_options: list[str] | None = None,
+ ) -> None:
+ """Generate documentation files for a category of types."""
+ for item in items:
+ lines = [
+ f"# {item.name}",
+ "",
+ f"## {section_title}",
+ f"::: {item.module_path}",
+ " options:",
+ " heading_level: 3",
+ ]
+ if extra_options:
+ lines.extend(f" {opt}" for opt in extra_options)
+
+ output_path = self.output_dir / category / f"{item.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb(category.title(), item.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(items)} {category} files")
+
+ def _generate_union_stubs(self, unions: list[UnionInfo]) -> None:
+ """Generate union documentation files."""
+ for union in unions:
+ lines = []
+ lines.append(f"# {union.name}")
+ lines.append("")
+ lines.append("## Union Type")
+ lines.append(f"::: {union.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Add union members
+ if union.members:
+ lines.append("## Union Member Types")
+ for member in union.members:
+ lines.append("")
+ lines.append(f"::: {member.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ output_path = self.output_dir / "unions" / f"{union.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb("Unions", union.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(unions)} union files")
+
+ def _breadcrumb(self, category: str, name: str) -> str:
+ """Generate a breadcrumb navigation element."""
+ separator = " > "
+ home = f"[{self.service_name}](../index.md)"
+ section = f"[{category}](../index.md#{category.lower()})"
+ return f'{home}{separator}{section}{separator}{name}\n'
+
+
+def main() -> int:
+ """Main entry point for the single-client documentation generator."""
+ parser = argparse.ArgumentParser(
+ description="Generate API documentation stubs for AWS SDK Python client."
+ )
+ parser.add_argument(
+ "-c", "--client-dir", type=Path, required=True, help="Path to the client source package"
+ )
+ parser.add_argument(
+ "-o",
+ "--output-dir",
+ type=Path,
+ required=True,
+ help="Output directory for generated doc stubs",
+ )
+
+ args = parser.parse_args()
+ client_dir = args.client_dir.resolve()
+ output_dir = args.output_dir.resolve()
+
+ if not client_dir.exists():
+ logger.error(f"Client directory not found: {client_dir}")
+ return 1
+
+ try:
+ generator = DocStubGenerator(client_dir, output_dir)
+ success = generator.generate()
+ return 0 if success else 1
+ except Exception as e:
+ logger.error(f"Unexpected error generating doc stubs: {e}", exc_info=True)
+ return 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/clients/aws-sdk-transcribe-streaming/Makefile b/clients/aws-sdk-transcribe-streaming/Makefile
new file mode 100644
index 0000000..016d87e
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/Makefile
@@ -0,0 +1,25 @@
+DOCS_PORT ?= 8000
+CLIENT_DIR := src/aws_sdk_transcribe_streaming
+DOCS_OUTPUT_DIR := docs/client
+PYTHON_VERSION := 3.12
+
+.PHONY: docs docs-serve docs-clean docs-install venv
+
+venv:
+ uv venv --python $(PYTHON_VERSION)
+
+docs-install: venv
+ uv sync --group docs
+
+docs-clean:
+ rm -rf site $(DOCS_OUTPUT_DIR)
+
+docs-generate:
+ uv run python scripts/docs/generate_doc_stubs.py -c $(CLIENT_DIR) -o $(DOCS_OUTPUT_DIR)
+
+docs: docs-generate
+ uv run mkdocs build
+
+docs-serve:
+ @[ -d site ] || $(MAKE) docs
+ uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
\ No newline at end of file
diff --git a/clients/aws-sdk-transcribe-streaming/docs/README.md b/clients/aws-sdk-transcribe-streaming/docs/README.md
new file mode 100644
index 0000000..c25ff76
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/docs/README.md
@@ -0,0 +1,18 @@
+## Generating Client Documentation
+
+Material for MkDocs is used for documentation. You can generate the documentation HTML
+for this client locally with the following:
+
+```bash
+# Install documentation dependencies
+make docs-install
+
+# Serve documentation locally
+make docs-serve
+
+# OR build static HTML documentation
+make docs
+
+# Clean docs artifacts
+make docs-clean
+```
diff --git a/clients/aws-sdk-transcribe-streaming/docs/hooks/copyright.py b/clients/aws-sdk-transcribe-streaming/docs/hooks/copyright.py
new file mode 100644
index 0000000..1260def
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/docs/hooks/copyright.py
@@ -0,0 +1,6 @@
+from datetime import datetime
+
+
+def on_config(config, **kwargs):
+ config.copyright = f"Copyright © {datetime.now().year}, Amazon Web Services, Inc"
+ return config
diff --git a/clients/aws-sdk-transcribe-streaming/docs/index.md b/clients/aws-sdk-transcribe-streaming/docs/index.md
new file mode 100644
index 0000000..612c7a5
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/docs/index.md
@@ -0,0 +1 @@
+--8<-- "README.md"
diff --git a/clients/aws-sdk-transcribe-streaming/docs/stylesheets/extra.css b/clients/aws-sdk-transcribe-streaming/docs/stylesheets/extra.css
new file mode 100644
index 0000000..21d1b09
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/docs/stylesheets/extra.css
@@ -0,0 +1,9 @@
+/* Custom breadcrumb styling */
+.breadcrumb {
+ font-size: 0.85em;
+ color: var(--md-default-fg-color--light);
+}
+
+p:has(span.breadcrumb) {
+margin-top: 0;
+}
diff --git a/clients/aws-sdk-transcribe-streaming/mkdocs.yml b/clients/aws-sdk-transcribe-streaming/mkdocs.yml
new file mode 100644
index 0000000..3787400
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/mkdocs.yml
@@ -0,0 +1,96 @@
+site_name: AWS SDK for Python - Transcribe Streaming
+site_description: Documentation for AWS Transcribe Streaming Client
+
+repo_name: awslabs/aws-sdk-python
+repo_url: https://github.com/awslabs/aws-sdk-python
+
+exclude_docs: |
+ README.md
+
+hooks:
+ - docs/hooks/copyright.py
+
+theme:
+ name: material
+ favicon: ""
+ palette:
+ # Palette toggle for automatic mode
+ - media: "(prefers-color-scheme)"
+ scheme: default
+ toggle:
+ icon: material/brightness-auto
+ name: Switch to light mode
+ primary: white
+ # Palette toggle for light mode
+ - media: "(prefers-color-scheme: light)"
+ scheme: default
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+ primary: white
+ # Palette toggle for dark mode
+ - media: "(prefers-color-scheme: dark)"
+ scheme: slate
+ toggle:
+ icon: material/brightness-4
+ name: Switch to system preference
+ primary: black
+ features:
+ - navigation.indexes
+ - navigation.instant
+ - navigation.top
+ - search.suggest
+ - search.highlight
+ - content.code.copy
+
+plugins:
+- search
+- mkdocstrings:
+ handlers:
+ python:
+ options:
+ show_source: false
+ show_signature: true
+ show_signature_annotations: true
+ show_root_heading: true
+ show_root_full_path: false
+ show_object_full_path: false
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ show_if_no_docstring: true
+ show_category_heading: true
+ group_by_category: true
+ separate_signature: true
+ signature_crossrefs: true
+ filters:
+ - "!^_"
+ - "!^deserialize"
+ - "!^serialize"
+
+markdown_extensions:
+ - pymdownx.highlight
+ - pymdownx.inlinehilite
+ - pymdownx.snippets:
+ check_paths: true
+ - pymdownx.superfences
+ - admonition
+ - def_list
+ - toc:
+ permalink: true
+ toc_depth: 3
+
+nav:
+ - Overview: index.md
+ - Client: client/index.md
+
+extra:
+ social:
+ - icon: fontawesome/brands/github
+ link: https://github.com/awslabs/aws-sdk-python
+
+extra_css:
+ - stylesheets/extra.css
+
+validation:
+ nav:
+ omitted_files: ignore
diff --git a/clients/aws-sdk-transcribe-streaming/pyproject.toml b/clients/aws-sdk-transcribe-streaming/pyproject.toml
index ed871d6..71b62f6 100644
--- a/clients/aws-sdk-transcribe-streaming/pyproject.toml
+++ b/clients/aws-sdk-transcribe-streaming/pyproject.toml
@@ -34,6 +34,12 @@ test = [
"pytest-asyncio>=0.20.3,<0.21.0"
]
+docs = [
+ "mkdocs~=1.6.1",
+ "mkdocs-material==9.7.0",
+ "mkdocstrings[python]==1.0.0"
+]
+
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
diff --git a/clients/aws-sdk-transcribe-streaming/scripts/docs/generate_doc_stubs.py b/clients/aws-sdk-transcribe-streaming/scripts/docs/generate_doc_stubs.py
new file mode 100644
index 0000000..056708a
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/scripts/docs/generate_doc_stubs.py
@@ -0,0 +1,607 @@
+"""
+Generate markdown API Reference stubs for AWS SDK for Python clients.
+
+This script generates MkDocs markdown stub files for a single client package.
+It uses griffe to analyze the Python source and outputs mkdocstrings directives
+for the client, operations, models (structures, unions, enums), and errors.
+"""
+
+import argparse
+import logging
+import sys
+from collections.abc import Sequence
+from dataclasses import dataclass
+from enum import Enum
+from pathlib import Path
+from typing import TypeGuard
+
+import griffe
+from griffe import (
+ Alias,
+ Attribute,
+ Class,
+ Expr,
+ ExprBinOp,
+ ExprName,
+ ExprSubscript,
+ ExprTuple,
+ Function,
+ Module,
+ Object,
+)
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger("generate_doc_stubs")
+
+
+class StreamType(Enum):
+ """Type of event stream for operations."""
+
+ INPUT = "InputEventStream"
+ OUTPUT = "OutputEventStream"
+ DUPLEX = "DuplexEventStream"
+
+ @property
+ def description(self) -> str:
+ """Return a string description for documentation."""
+ descriptions = {
+ StreamType.INPUT: "an `InputEventStream` for client-to-server streaming",
+ StreamType.OUTPUT: "an `OutputEventStream` for server-to-client streaming",
+ StreamType.DUPLEX: "a `DuplexEventStream` for bidirectional streaming",
+ }
+ return descriptions[self]
+
+
+@dataclass(frozen=True)
+class TypeInfo:
+ """Information about a type (structure, enum, error, config, plugin)."""
+
+ name: str # e.g., "ConverseOperationOutput"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.models.ConverseOperationOutput"
+
+
+@dataclass
+class UnionInfo:
+ """Information about a union type."""
+
+ name: str
+ module_path: str
+ members: list[TypeInfo]
+
+
+@dataclass
+class OperationInfo:
+ """Information about a client operation."""
+
+ name: str
+ module_path: str
+ input: TypeInfo
+ output: TypeInfo
+ stream_type: StreamType | None
+ event_input_type: str | None # For input/duplex streams
+ event_output_type: str | None # For output/duplex streams
+
+
+@dataclass
+class ModelsInfo:
+ """Information about all modeled types."""
+
+ structures: list[TypeInfo]
+ unions: list[UnionInfo]
+ enums: list[TypeInfo]
+ errors: list[TypeInfo]
+
+
+@dataclass
+class ClientInfo:
+ """Complete information about a client package."""
+
+ name: str # e.g., "BedrockRuntimeClient"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.client.BedrockRuntimeClient"
+ package_name: str # e.g., "aws_sdk_bedrock_runtime"
+ config: TypeInfo
+ plugin: TypeInfo
+ operations: list[OperationInfo]
+ models: ModelsInfo
+
+
+class DocStubGenerator:
+ """Generate markdown API Reference stubs for AWS SDK for Python clients."""
+
+ def __init__(self, client_dir: Path, output_dir: Path) -> None:
+ """
+ Initialize the documentation generator.
+
+ Args:
+ client_dir: Path to the client source directory
+ output_dir: Path to the output directory for generated doc stubs
+ """
+ self.client_dir = client_dir
+ self.output_dir = output_dir
+ # Extract service name from package name
+ # (e.g., "aws_sdk_bedrock_runtime" -> "Bedrock Runtime")
+ self.service_name = client_dir.name.replace("aws_sdk_", "").replace("_", " ").title()
+
+ def generate(self) -> bool:
+ """
+ Generate the documentation stubs to the output directory.
+
+ Returns:
+ True if documentation was generated successfully, False otherwise.
+ """
+ logger.info(f"Generating doc stubs for {self.service_name}...")
+
+ package_name = self.client_dir.name
+ client_info = self._analyze_client_package(package_name)
+ if not self._generate_client_docs(client_info):
+ return False
+
+ logger.info(f"Finished generating doc stubs for {self.service_name}")
+ return True
+
+ def _analyze_client_package(self, package_name: str) -> ClientInfo:
+ """Analyze a client package using griffe."""
+ logger.info(f"Analyzing package: {package_name}")
+ package = griffe.load(package_name)
+
+ # Ensure required modules exist
+ required = ["client", "config", "models"]
+ missing = [name for name in required if not package.modules.get(name)]
+ if missing:
+ raise ValueError(f"Missing required modules in {package_name}: {', '.join(missing)}")
+
+ # Parse submodules
+ client_module = package.modules["client"]
+ config_module = package.modules["config"]
+ models_module = package.modules["models"]
+
+ client_class = self._find_class_with_suffix(client_module, "Client")
+ if not client_class:
+ raise ValueError(f"No class ending with 'Client' found in {package_name}.client")
+
+ config_class = config_module.members.get("Config")
+ plugin_alias = config_module.members.get("Plugin")
+ if not config_class or not plugin_alias:
+ raise ValueError(f"Missing Config or Plugin in {package_name}.config")
+
+ config = TypeInfo(name=config_class.name, module_path=config_class.path)
+ plugin = TypeInfo(name=plugin_alias.name, module_path=plugin_alias.path)
+
+ operations = self._extract_operations(client_class)
+ models = self._extract_models(models_module, operations)
+
+ logger.info(
+ f"Analyzed {client_class.name}: {len(operations)} operations, "
+ f"{len(models.structures)} structures, {len(models.errors)} errors, "
+ f"{len(models.unions)} unions, {len(models.enums)} enums"
+ )
+
+ return ClientInfo(
+ name=client_class.name,
+ module_path=client_class.path,
+ package_name=package_name,
+ config=config,
+ plugin=plugin,
+ operations=operations,
+ models=models,
+ )
+
+ def _find_class_with_suffix(self, module: Module, suffix: str) -> Class | None:
+ """Find the class in the module with a matching suffix."""
+ for cls in module.classes.values():
+ if cls.name.endswith(suffix):
+ return cls
+ return None
+
+ def _extract_operations(self, client_class: Class) -> list[OperationInfo]:
+ """Extract operation information from client class."""
+ operations = []
+ for op in client_class.functions.values():
+ if op.is_private or op.is_init_method:
+ continue
+ operations.append(self._analyze_operation(op))
+ return operations
+
+ def _analyze_operation(self, operation: Function) -> OperationInfo:
+ """Analyze an operation method to extract information."""
+ stream_type = None
+ event_input_type = None
+ event_output_type = None
+
+ input_param = operation.parameters["input"]
+ input_annotation = self._get_expr(
+ input_param.annotation, f"'{operation.name}' input annotation"
+ )
+ input_info = TypeInfo(
+ name=input_annotation.canonical_name,
+ module_path=input_annotation.canonical_path,
+ )
+
+ returns = self._get_expr(operation.returns, f"'{operation.name}' return type")
+ output_type = returns.canonical_name
+ stream_type_map = {s.value: s for s in StreamType}
+
+ if output_type in stream_type_map:
+ stream_type = stream_type_map[output_type]
+ stream_args = self._get_subscript_elements(returns, f"'{operation.name}' stream type")
+
+ if stream_type in (StreamType.INPUT, StreamType.DUPLEX):
+ event_input_type = stream_args[0].canonical_name
+ if stream_type in (StreamType.OUTPUT, StreamType.DUPLEX):
+ idx = 1 if stream_type == StreamType.DUPLEX else 0
+ event_output_type = stream_args[idx].canonical_name
+
+ output_info = TypeInfo(
+ name=stream_args[-1].canonical_name, module_path=stream_args[-1].canonical_path
+ )
+ else:
+ output_info = TypeInfo(name=output_type, module_path=returns.canonical_path)
+
+ return OperationInfo(
+ name=operation.name,
+ module_path=operation.path,
+ input=input_info,
+ output=output_info,
+ stream_type=stream_type,
+ event_input_type=event_input_type,
+ event_output_type=event_output_type,
+ )
+
+ def _get_expr(self, annotation: str | Expr | None, context: str) -> Expr:
+ """Extract and validate an Expr from an annotation."""
+ if not isinstance(annotation, Expr):
+ raise TypeError(f"{context}: expected Expr, got {type(annotation).__name__}")
+ return annotation
+
+ def _get_subscript_elements(self, expr: Expr, context: str) -> list[Expr]:
+ """Extract type arguments from a subscript expression like Generic[A, B, C]."""
+ if not isinstance(expr, ExprSubscript):
+ raise TypeError(f"{context}: expected subscript, got {type(expr).__name__}")
+ slice_expr = expr.slice
+ if isinstance(slice_expr, str):
+ raise TypeError(f"{context}: unexpected string slice '{slice_expr}'")
+ if isinstance(slice_expr, ExprTuple):
+ return [el for el in slice_expr.elements if isinstance(el, Expr)]
+ return [slice_expr]
+
+ def _extract_models(self, models_module: Module, operations: list[OperationInfo]) -> ModelsInfo:
+ """Extract structures, unions, enums, and errors from models module."""
+ structures, unions, enums, errors = [], [], [], []
+
+ for member in models_module.members.values():
+ # Skip imported and private members
+ if member.is_imported or member.is_private:
+ continue
+
+ if self._is_union(member):
+ unions.append(
+ UnionInfo(
+ name=member.name,
+ module_path=member.path,
+ members=self._extract_union_members(member, models_module),
+ )
+ )
+ elif self._is_enum(member):
+ enums.append(TypeInfo(name=member.name, module_path=member.path))
+ elif self._is_error(member):
+ errors.append(TypeInfo(name=member.name, module_path=member.path))
+ elif member.is_class:
+ structures.append(TypeInfo(name=member.name, module_path=member.path))
+
+ duplicates = set()
+ for structure in structures:
+ if self._is_operation_io_type(structure.name, operations) or self._is_union_member(
+ structure.name, unions
+ ):
+ duplicates.add(structure)
+
+ structures = [struct for struct in structures if struct not in duplicates]
+
+ return ModelsInfo(structures=structures, unions=unions, enums=enums, errors=errors)
+
+ def _is_union(self, member: Object | Alias) -> TypeGuard[Attribute]:
+ """Check if a module member is a union type."""
+ if not isinstance(member, Attribute):
+ return False
+
+ value = member.value
+ # Check for Union[...] syntax
+ if isinstance(value, ExprSubscript):
+ left = value.left
+ if isinstance(left, ExprName) and left.name == "Union":
+ return True
+
+ # Check for PEP 604 (X | Y) syntax
+ if isinstance(value, ExprBinOp):
+ return True
+
+ return False
+
+ def _extract_union_members(
+ self, union_attr: Attribute, models_module: Module
+ ) -> list[TypeInfo]:
+ """Extract member types from a union."""
+ members = []
+ value_str = str(union_attr.value)
+
+ # Clean up value_str for Union[X | Y | Z] syntax
+ if value_str.startswith("Union[") and value_str.endswith("]"):
+ value_str = value_str.removeprefix("Union[").removesuffix("]")
+
+ member_names = [member.strip() for member in value_str.split("|")]
+
+ for name in member_names:
+ if not (member_object := models_module.members.get(name)):
+ raise ValueError(f"Union member '{name}' not found in models module")
+ members.append(TypeInfo(name=member_object.name, module_path=member_object.path))
+
+ return members
+
+ def _is_enum(self, member: Object | Alias) -> TypeGuard[Class]:
+ """Check if a module member is an enum."""
+ if not isinstance(member, Class):
+ return False
+ return any(
+ isinstance(base, ExprName) and base.name in ("StrEnum", "IntEnum")
+ for base in member.bases
+ )
+
+ def _is_error(self, member: Object | Alias) -> TypeGuard[Class]:
+ """Check if a module member is an error."""
+ if not isinstance(member, Class):
+ return False
+ return any(
+ isinstance(base, ExprName) and base.name in ("ServiceError", "ModeledError")
+ for base in member.bases
+ )
+
+ def _is_operation_io_type(self, type_name: str, operations: list[OperationInfo]) -> bool:
+ """Check if a type is used as operation input/output."""
+ return any(type_name in (op.input.name, op.output.name) for op in operations)
+
+ def _is_union_member(self, type_name: str, unions: list[UnionInfo]) -> bool:
+ """Check if a type is used as union member."""
+ return any(type_name == m.name for u in unions for m in u.members)
+
+ def _generate_client_docs(self, client_info: ClientInfo) -> bool:
+ """Generate all documentation files for a client."""
+ logger.info(f"Writing doc stubs to {self.output_dir}...")
+
+ try:
+ self._generate_index(client_info)
+ self._generate_operation_stubs(client_info.operations)
+ self._generate_type_stubs(
+ client_info.models.structures, "structures", "Structure Class"
+ )
+ self._generate_type_stubs(client_info.models.errors, "errors", "Error Class")
+ self._generate_type_stubs(
+ client_info.models.enums, "enums", "Enum Class", ["members: true"]
+ )
+ self._generate_union_stubs(client_info.models.unions)
+ except OSError as e:
+ logger.error(f"Failed to write documentation files: {e}")
+ return False
+ return True
+
+ def _generate_index(self, client_info: ClientInfo) -> None:
+ """Generate the main index.md file."""
+ lines = []
+ lines.append(f"# {self.service_name}")
+ lines.append("")
+ lines.append("## Client")
+ lines.append("")
+ lines.append(f"::: {client_info.module_path}")
+ lines.append(" options:")
+ lines.append(" members: false")
+ lines.append(" heading_level: 3")
+ lines.append(" merge_init_into_class: true")
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+ lines.append("")
+
+ # Operations section
+ if client_info.operations:
+ lines.append("## Operations")
+ lines.append("")
+ for op in sorted(client_info.operations, key=lambda x: x.name):
+ lines.append(f"- [`{op.name}`](operations/{op.name}.md)")
+ lines.append("")
+
+ # Configuration section
+ lines.append("## Configuration")
+ lines.append("")
+ lines.append(f"::: {client_info.config.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append(" merge_init_into_class: true")
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+ lines.append("")
+ lines.append(f"::: {client_info.plugin.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ models = client_info.models
+
+ # Model sections
+ sections: list[tuple[str, str, Sequence[TypeInfo | UnionInfo]]] = [
+ ("Structures", "structures", models.structures),
+ ("Errors", "errors", models.errors),
+ ("Unions", "unions", models.unions),
+ ("Enums", "enums", models.enums),
+ ]
+ for title, folder, items in sections:
+ if items:
+ lines.append("")
+ lines.append(f"## {title}")
+ lines.append("")
+ for item in sorted(items, key=lambda x: x.name):
+ lines.append(f"- [`{item.name}`]({folder}/{item.name}.md)")
+
+ output_path = self.output_dir / "index.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ content = "\n".join(lines)
+ output_path.write_text(content)
+
+ logger.info("Wrote client index file!")
+
+ def _generate_operation_stubs(self, operations: list[OperationInfo]) -> None:
+ """Generate operation documentation files."""
+ for op in operations:
+ lines = []
+ lines.append(f"# {op.name}")
+ lines.append("")
+
+ # Operation section
+ lines.append("## Operation")
+ lines.append("")
+ lines.append(f"::: {op.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Input section
+ lines.append("## Input")
+ lines.append("")
+ lines.append(f"::: {op.input.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Output section - handle all stream types
+ lines.append("## Output")
+ lines.append("")
+
+ if op.stream_type:
+ lines.append(f"This operation returns {op.stream_type.description}.")
+ lines.append("")
+ lines.append("### Event Stream Structure")
+ lines.append("")
+
+ if op.event_input_type:
+ lines.append("#### Input Event Type")
+ lines.append("")
+ lines.append(f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)")
+ lines.append("")
+ if op.event_output_type:
+ lines.append("#### Output Event Type")
+ lines.append("")
+ lines.append(f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)")
+ lines.append("")
+
+ lines.append("### Initial Response Structure")
+ lines.append("")
+ lines.append(f"::: {op.output.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 4")
+ else:
+ lines.append(f"::: {op.output.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ output_path = self.output_dir / "operations" / f"{op.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb("Operations", op.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(operations)} operation files")
+
+ def _generate_type_stubs(
+ self,
+ items: list[TypeInfo],
+ category: str,
+ section_title: str,
+ extra_options: list[str] | None = None,
+ ) -> None:
+ """Generate documentation files for a category of types."""
+ for item in items:
+ lines = [
+ f"# {item.name}",
+ "",
+ f"## {section_title}",
+ f"::: {item.module_path}",
+ " options:",
+ " heading_level: 3",
+ ]
+ if extra_options:
+ lines.extend(f" {opt}" for opt in extra_options)
+
+ output_path = self.output_dir / category / f"{item.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb(category.title(), item.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(items)} {category} files")
+
+ def _generate_union_stubs(self, unions: list[UnionInfo]) -> None:
+ """Generate union documentation files."""
+ for union in unions:
+ lines = []
+ lines.append(f"# {union.name}")
+ lines.append("")
+ lines.append("## Union Type")
+ lines.append(f"::: {union.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Add union members
+ if union.members:
+ lines.append("## Union Member Types")
+ for member in union.members:
+ lines.append("")
+ lines.append(f"::: {member.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ output_path = self.output_dir / "unions" / f"{union.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb("Unions", union.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(unions)} union files")
+
+ def _breadcrumb(self, category: str, name: str) -> str:
+ """Generate a breadcrumb navigation element."""
+ separator = " > "
+ home = f"[{self.service_name}](../index.md)"
+ section = f"[{category}](../index.md#{category.lower()})"
+ return f'{home}{separator}{section}{separator}{name}\n'
+
+
+def main() -> int:
+ """Main entry point for the single-client documentation generator."""
+ parser = argparse.ArgumentParser(
+ description="Generate API documentation stubs for AWS SDK Python client."
+ )
+ parser.add_argument(
+ "-c", "--client-dir", type=Path, required=True, help="Path to the client source package"
+ )
+ parser.add_argument(
+ "-o",
+ "--output-dir",
+ type=Path,
+ required=True,
+ help="Output directory for generated doc stubs",
+ )
+
+ args = parser.parse_args()
+ client_dir = args.client_dir.resolve()
+ output_dir = args.output_dir.resolve()
+
+ if not client_dir.exists():
+ logger.error(f"Client directory not found: {client_dir}")
+ return 1
+
+ try:
+ generator = DocStubGenerator(client_dir, output_dir)
+ success = generator.generate()
+ return 0 if success else 1
+ except Exception as e:
+ logger.error(f"Unexpected error generating doc stubs: {e}", exc_info=True)
+ return 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/docs/assets/aws-logo-dark.svg b/docs/assets/aws-logo-dark.svg
new file mode 100644
index 0000000..70619b8
--- /dev/null
+++ b/docs/assets/aws-logo-dark.svg
@@ -0,0 +1,35 @@
+
+
+
diff --git a/docs/assets/aws-logo-white.svg b/docs/assets/aws-logo-white.svg
new file mode 100644
index 0000000..982571b
--- /dev/null
+++ b/docs/assets/aws-logo-white.svg
@@ -0,0 +1,38 @@
+
+
+
diff --git a/docs/contributing.md b/docs/contributing.md
new file mode 100644
index 0000000..e079654
--- /dev/null
+++ b/docs/contributing.md
@@ -0,0 +1 @@
+--8<-- "CONTRIBUTING.md"
\ No newline at end of file
diff --git a/docs/hooks/copyright.py b/docs/hooks/copyright.py
new file mode 100644
index 0000000..1260def
--- /dev/null
+++ b/docs/hooks/copyright.py
@@ -0,0 +1,6 @@
+from datetime import datetime
+
+
+def on_config(config, **kwargs):
+ config.copyright = f"Copyright © {datetime.now().year}, Amazon Web Services, Inc"
+ return config
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 0000000..0f88098
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,3 @@
+# AWS SDK for Python
+
+--8<-- "README.md:2"
\ No newline at end of file
diff --git a/docs/javascript/nav-expand.js b/docs/javascript/nav-expand.js
new file mode 100644
index 0000000..1984a62
--- /dev/null
+++ b/docs/javascript/nav-expand.js
@@ -0,0 +1,29 @@
+/**
+ * Keep API Reference nav expanded on /clients/ pages and highlight active client.
+ * Uses Material for MkDocs document$ observable for instant navigation compatibility.
+ */
+function expandClientsNav() {
+ if (!location.pathname.includes("/clients/")) return;
+ document.querySelectorAll(".md-nav__item--nested").forEach(function (item) {
+ var link = item.querySelector(":scope > .md-nav__link");
+ if (link && link.textContent.trim().includes("Available Clients")) {
+ // Expand "All Available Clients" drop down
+ var toggle = item.querySelector(":scope > .md-nav__toggle");
+ if (toggle) toggle.checked = true;
+ item.setAttribute("data-md-state", "expanded");
+
+ // Highlight active client
+ var navItems = item.querySelectorAll(".md-nav__item .md-nav__link");
+ navItems.forEach(function (navLink) {
+ if (navLink.href && location.pathname.includes(navLink.pathname)) {
+ navLink.classList.add("md-nav__link--active");
+ }
+ });
+ }
+ });
+}
+
+// Subscribe to Material's document$ observable for instant navigation support
+document$.subscribe(expandClientsNav);
+// Also run on initial page load
+expandClientsNav();
\ No newline at end of file
diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css
new file mode 100644
index 0000000..3df1023
--- /dev/null
+++ b/docs/stylesheets/extra.css
@@ -0,0 +1,14 @@
+/* Custom breadcrumb styling */
+.breadcrumb {
+ font-size: 0.85em;
+ color: var(--md-default-fg-color--light);
+}
+
+p:has(span.breadcrumb) {
+ margin-top: 0;
+}
+
+/* Light mode - use dark logo */
+[data-md-color-scheme="default"] .md-header__button.md-logo img {
+ content: url('../assets/aws-logo-dark.svg');
+}
\ No newline at end of file
diff --git a/mkdocs.yml b/mkdocs.yml
new file mode 100644
index 0000000..05784d2
--- /dev/null
+++ b/mkdocs.yml
@@ -0,0 +1,101 @@
+site_name: AWS SDK for Python
+site_description: Documentation for AWS SDK for Python Clients
+
+repo_name: awslabs/aws-sdk-pythons
+repo_url: https://github.com/awslabs/aws-sdk-python
+
+exclude_docs: |
+ README.md
+
+hooks:
+ - docs/hooks/copyright.py
+
+theme:
+ name: material
+ logo: assets/aws-logo-white.svg
+ favicon: ""
+ palette:
+ # Palette toggle for automatic mode
+ - media: "(prefers-color-scheme)"
+ scheme: default
+ toggle:
+ icon: material/brightness-auto
+ name: Switch to light mode
+ primary: white
+ # Palette toggle for light mode
+ - media: "(prefers-color-scheme: light)"
+ scheme: default
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+ primary: white
+ # Palette toggle for dark mode
+ - media: "(prefers-color-scheme: dark)"
+ scheme: slate
+ toggle:
+ icon: material/brightness-4
+ name: Switch to system preference
+ primary: black
+ features:
+ - navigation.indexes
+ - navigation.instant
+ - navigation.top
+ - search.suggest
+ - search.highlight
+ - content.code.copy
+
+plugins:
+ - search
+ - literate-nav:
+ nav_file: SUMMARY.md
+ - mkdocstrings:
+ handlers:
+ python:
+ options:
+ show_source: false
+ show_signature: true
+ show_signature_annotations: true
+ show_root_heading: true
+ show_root_full_path: false
+ show_object_full_path: false
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ show_if_no_docstring: true
+ show_category_heading: true
+ group_by_category: true
+ separate_signature: true
+ signature_crossrefs: true
+ filters:
+ - "!^_"
+ - "!^deserialize"
+ - "!^serialize"
+
+markdown_extensions:
+ - pymdownx.highlight
+ - pymdownx.inlinehilite
+ - pymdownx.snippets:
+ check_paths: true
+ - pymdownx.superfences
+ - pymdownx.tabbed:
+ alternate_style: true
+ - admonition
+ - def_list
+ - toc:
+ permalink: true
+ toc_depth: 3
+
+extra:
+ social:
+ - icon: fontawesome/brands/github
+ link: https://github.com/awslabs/aws-sdk-python
+
+extra_javascript:
+ - path: javascript/nav-expand.js
+ defer: true
+
+extra_css:
+ - stylesheets/extra.css
+
+validation:
+ nav:
+ omitted_files: ignore
\ No newline at end of file
diff --git a/requirements-docs.in b/requirements-docs.in
new file mode 100644
index 0000000..9d00568
--- /dev/null
+++ b/requirements-docs.in
@@ -0,0 +1,4 @@
+mkdocs==1.6.1
+mkdocstrings[python]==1.0.0
+mkdocs-material==9.7.0
+mkdocs-literate-nav==0.6.1
\ No newline at end of file
diff --git a/scripts/docs/generate_all_doc_stubs.py b/scripts/docs/generate_all_doc_stubs.py
new file mode 100644
index 0000000..d16e628
--- /dev/null
+++ b/scripts/docs/generate_all_doc_stubs.py
@@ -0,0 +1,209 @@
+"""
+Generate documentation stubs for all AWS SDK Python clients.
+
+This script iterates through each client directory and runs the
+generate_doc_stubs.py script with output directed to the top-level docs folder.
+It also generates the clients index page.
+"""
+
+import logging
+import os
+import subprocess
+import sys
+from collections import defaultdict
+from concurrent.futures import ProcessPoolExecutor, as_completed
+from dataclasses import dataclass
+from pathlib import Path
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger("generate_all_doc_stubs")
+
+DEFAULT_CPU_COUNT = 1
+
+@dataclass
+class ClientInfo:
+ """Information about a client for documentation generation."""
+
+ dir: Path
+ service_name: str
+ package_name: str
+ path_name: str
+
+
+def discover_clients(clients_dir: Path) -> list[ClientInfo]:
+ """
+ Discover all clients that have a generate_doc_stubs.py script.
+
+ Args:
+ clients_dir: Path to the clients directory.
+
+ Returns:
+ List of ClientInfo objects.
+ """
+ if not clients_dir.exists():
+ raise FileNotFoundError(f"Clients directory not found: {clients_dir}")
+
+ clients = []
+ for client_dir in sorted(clients_dir.iterdir()):
+ script_path = client_dir / "scripts" / "docs" / "generate_doc_stubs.py"
+ if not script_path.exists():
+ continue
+
+ # Convert "aws-sdk-bedrock-runtime" -> "Bedrock Runtime" / "bedrock-runtime"
+ package_name = client_dir.name
+ path_name = package_name.replace("aws-sdk-", "")
+ service_name = path_name.replace("-", " ").title()
+ clients.append(ClientInfo(client_dir, service_name, package_name, path_name))
+
+ return clients
+
+
+def generate_all_doc_stubs(clients: list[ClientInfo], docs_dir: Path) -> bool:
+ """
+ Generate doc stubs for all clients by running each client's generate_doc_stubs.py.
+
+ Args:
+ clients: List of ClientInfo objects.
+ docs_dir: Path to the docs directory.
+
+ Returns:
+ True if all doc stubs were generated successfully, False otherwise.
+ """
+ top_level_docs = docs_dir / "clients"
+ max_workers = os.cpu_count() or DEFAULT_CPU_COUNT
+
+ logger.info(f"Generating doc stubs for {len(clients)} clients using {max_workers} workers...")
+
+ with ProcessPoolExecutor(max_workers=max_workers) as executor:
+ futures = {
+ executor.submit(
+ _generate_doc_stub,
+ client.dir,
+ client.service_name,
+ top_level_docs / client.path_name,
+ ): client
+ for client in clients
+ }
+
+ failed = []
+ for future in as_completed(futures):
+ service_name, success = future.result()
+ if success:
+ logger.info(f"✅ Generated docs stubs for {service_name}")
+ else:
+ logger.error(f"❌ Failed to generate docs stubs for {service_name}")
+ failed.append(service_name)
+
+ if failed:
+ logger.error(f"Failed to generate doc stubs for: {', '.join(failed)}")
+ return False
+
+ return True
+
+
+def _generate_doc_stub(client_dir: Path, service_name: str, output_dir: Path) -> tuple[str, bool]:
+ """
+ Generate doc stubs for a single client.
+
+ Args:
+ client_dir: Path to the client directory.
+ service_name: Name of the service.
+ output_dir: Path to the output directory.
+
+ Returns:
+ Tuple of (service_name, success).
+ """
+ script_path = client_dir / "scripts" / "docs" / "generate_doc_stubs.py"
+
+ result = subprocess.run(
+ [
+ sys.executable,
+ str(script_path),
+ "--client-dir",
+ str(client_dir / "src" / client_dir.name.replace("-", "_")),
+ "--output-dir",
+ str(output_dir),
+ ],
+ cwd=client_dir,
+ )
+
+ return service_name, result.returncode == 0
+
+
+def generate_clients_index(clients: list[ClientInfo], docs_dir: Path) -> bool:
+ """
+ Generate clients/index.md (with alphabetical tabs).
+
+ Args:
+ clients: List of ClientInfo objects.
+ docs_dir: Path to the docs directory.
+
+ Returns:
+ True if the index was generated successfully, False otherwise.
+ """
+ lines = ["# Available Clients", ""]
+
+ # Group by first letter
+ grouped: defaultdict[str, list[ClientInfo]] = defaultdict(list)
+ for client in clients:
+ letter = client.service_name[0].upper()
+ grouped[letter].append(client)
+
+ # Tab for all services
+ lines.append("=== \"All\"")
+ lines.append("")
+ lines.append(" | Service | Package Name |")
+ lines.append(" |----------|--------------|")
+ for client in clients:
+ lines.append(f" | **[{client.service_name}]({client.path_name}/index.md)** | `{client.package_name}` |")
+ lines.append("")
+
+ # Individual letter tabs
+ for letter in sorted(grouped.keys()):
+ lines.append(f"=== \"{letter}\"")
+ lines.append("")
+ lines.append(" | Service | Package Name |")
+ lines.append(" |----------|--------------|")
+ for client in grouped[letter]:
+ lines.append(f" | **[{client.service_name}]({client.path_name}/index.md)** | `{client.package_name}` |")
+ lines.append("")
+
+ index_path = docs_dir / "clients" / "index.md"
+ try:
+ index_path.write_text("\n".join(lines) + "\n")
+ except OSError as e:
+ logger.error(f"Failed to write clients index: {e}")
+ return False
+
+ logger.info(f"✅ Generated clients index page with {len(grouped)} letter tabs")
+ return True
+
+
+def main() -> int:
+ """Main entry point for generating doc stubs for all clients."""
+ repo_root = Path(__file__).parent.parent.parent
+ clients_dir = repo_root / "clients"
+ docs_dir = repo_root / "docs"
+
+ try:
+ clients = discover_clients(clients_dir)
+
+ if not generate_all_doc_stubs(clients, docs_dir):
+ return 1
+
+ if not generate_clients_index(clients, docs_dir):
+ return 1
+
+ except Exception as e:
+ logger.error(f"Unexpected error: {e}")
+ return 1
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/docs/generate_nav.py b/scripts/docs/generate_nav.py
new file mode 100644
index 0000000..3cd600b
--- /dev/null
+++ b/scripts/docs/generate_nav.py
@@ -0,0 +1,91 @@
+# scripts/docs/generate_nav.py
+"""
+Generate client documentation navigation dynamically.
+
+Run this script before mkdocs build to generate:
+docs/SUMMARY.md - Navigation file for literate-nav plugin
+"""
+
+import logging
+import sys
+
+from pathlib import Path
+
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger("generate_nav")
+
+
+def generate_nav(repo_root: Path) -> bool:
+ """
+ Generate navigation structure for clients using literate-nav SUMMARY.md format.
+
+ Args:
+ repo_root: Path to the repository root.
+
+ Returns:
+ True if navigation was generated successfully, False otherwise.
+ """
+ logger.info("Generating navigation structure...")
+
+ clients_dir = repo_root / "clients"
+ if not clients_dir.exists():
+ logger.error(f"Clients directory not found: {clients_dir}")
+ return False
+
+ # Build the SUMMARY.md content for literate-nav
+ lines = [
+ "* [Overview](index.md)",
+ "* [Contributing](contributing.md)",
+ "* [Available Clients](clients/index.md)",
+ ]
+
+ # Discover clients and add each as a nested item under Available Clients
+ client_count = 0
+ for client_path in sorted(clients_dir.iterdir()):
+ if not (client_path / "scripts" / "docs" / "generate_doc_stubs.py").exists():
+ continue
+
+ # Extract service name and path from package name
+ # (e.g., "aws-sdk-bedrock-runtime" -> "Bedrock Runtime" / "bedrock-runtime")
+ path_name = client_path.name.replace("aws-sdk-", "")
+ display_name = path_name.replace("-", " ").title()
+
+ lines.append(f" * [{display_name}](clients/{path_name}/index.md)")
+ logger.info(f"Discovered client: {display_name}")
+ client_count += 1
+
+ logger.info(f"Found {client_count} total clients")
+
+ # Write the SUMMARY.md file to the docs directory
+ summary_path = repo_root / "docs" / "SUMMARY.md"
+ try:
+ summary_path.write_text("\n".join(lines) + "\n")
+ except OSError as e:
+ logger.error(f"Failed to write SUMMARY.md: {e}")
+ return False
+
+ logger.info(f"✅ Generated SUMMARY.md navigation for {client_count} clients")
+ return True
+
+
+def main() -> int:
+ """Main entry point to generate navigation."""
+ repo_root = Path(__file__).parent.parent.parent
+
+ try:
+ if not generate_nav(repo_root):
+ return 1
+ except Exception as e:
+ logger.error(f"Unexpected error: {e}")
+ return 1
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file