From 1d903300314af8563469566680908379845743f5 Mon Sep 17 00:00:00 2001
From: Antonio Aranda <102337110+arandito@users.noreply.github.com>
Date: Mon, 24 Nov 2025 12:53:28 -0500
Subject: [PATCH 1/4] Implement initial top-level doc gen
---
CONTRIBUTING.md | 8 +-
README.md | 2 +
docs/contributing/index.md | 4 +
docs/index.md | 3 +
docs/stylesheets/extra.css | 3 +
mkdocs.yml | 82 +++++
pyproject.toml | 20 ++
scripts/generate_doc_stubs.py | 590 ++++++++++++++++++++++++++++++++++
scripts/generate_nav.py | 141 ++++++++
9 files changed, 847 insertions(+), 6 deletions(-)
create mode 100644 docs/contributing/index.md
create mode 100644 docs/index.md
create mode 100644 docs/stylesheets/extra.css
create mode 100644 mkdocs.yml
create mode 100644 pyproject.toml
create mode 100644 scripts/generate_doc_stubs.py
create mode 100644 scripts/generate_nav.py
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index c4b6a1c..3a533cb 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,3 +1,4 @@
+
# Contributing Guidelines
Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
@@ -51,9 +52,4 @@ opensource-codeofconduct@amazon.com with any additional questions or comments.
## Security issue notifications
-If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
-
-
-## Licensing
-
-See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
+If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a
\ No newline at end of file
diff --git a/README.md b/README.md
index 7902617..b6bf9e8 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,5 @@
## AWS SDK for Python
+
[![Apache 2 licensed][apache-badge]][apache-url]
[apache-badge]: https://img.shields.io/badge/license-APACHE2-blue.svg
@@ -29,6 +30,7 @@ features we are actively working on.
You can provide feedback or report a bug by submitting an [issue](https://github.com/awslabs/aws-sdk-python/issues/new/choose).
This is the preferred mechanism to give feedback so that other users can engage in the conversation, +1 issues, etc.
+
## Security
See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more information.
diff --git a/docs/contributing/index.md b/docs/contributing/index.md
new file mode 100644
index 0000000..1331899
--- /dev/null
+++ b/docs/contributing/index.md
@@ -0,0 +1,4 @@
+--8<-- "CONTRIBUTING.md:docs"
+## Licensing
+
+See the [LICENSE](https://github.com/awslabs/aws-sdk-python/blob/develop/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
\ No newline at end of file
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 0000000..9123f93
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,3 @@
+# AWS SDK for Python
+
+--8<-- "README.md:docs"
\ No newline at end of file
diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css
new file mode 100644
index 0000000..32aa2b6
--- /dev/null
+++ b/docs/stylesheets/extra.css
@@ -0,0 +1,3 @@
+.md-grid {
+ max-width: 70rem;
+}
\ No newline at end of file
diff --git a/mkdocs.yml b/mkdocs.yml
new file mode 100644
index 0000000..71675cf
--- /dev/null
+++ b/mkdocs.yml
@@ -0,0 +1,82 @@
+site_name: AWS SDK for Python
+site_description: Documentation for AWS SDK for Python Clients
+
+copyright: Copyright © 2025, Amazon Web Services, Inc
+repo_name: awslabs/aws-sdk-python
+repo_url: https://github.com/awslabs/aws-sdk-python
+
+theme:
+ name: material
+ palette:
+ - scheme: default
+ primary: white
+ accent: light blue
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+ - scheme: slate
+ primary: black
+ accent: light blue
+ toggle:
+ icon: material/brightness-4
+ name: Switch to light mode
+ features:
+ - navigation.tabs
+ - navigation.tabs.sticky
+ - navigation.top
+ - search.suggest
+ - search.highlight
+ - content.code.copy
+
+plugins:
+ - search
+ - gen-files:
+ scripts:
+ - scripts/generate_doc_stubs.py
+ - scripts/generate_nav.py
+ - mkdocstrings:
+ handlers:
+ python:
+ options:
+ show_source: false
+ show_signature: true
+ show_signature_annotations: true
+ show_root_heading: true
+ show_root_full_path: false
+ show_object_full_path: false
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ show_category_heading: true
+ group_by_category: true
+ separate_signature: true
+ signature_crossrefs: true
+ filters:
+ - "!^_"
+ - "!^deserialize"
+ - "!^serialize"
+
+markdown_extensions:
+ - pymdownx.highlight
+ - pymdownx.inlinehilite
+ - pymdownx.snippets:
+ base_path: ['.']
+ - pymdownx.superfences
+ - pymdownx.tabbed:
+ alternate_style: true
+ - admonition
+ - def_list
+ - toc:
+ permalink: true
+ toc_depth: 3
+
+extra:
+ social:
+ - icon: fontawesome/brands/github
+ link: https://github.com/awslabs/aws-sdk-python
+
+extra_css:
+ - stylesheets/extra.css
+
+validation:
+ nav:
+ omitted_files: ignore
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..501b27e
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,20 @@
+[project]
+name = "aws-sdk-python"
+version = "0.1.0"
+description = "Add your description here"
+readme = "README.md"
+requires-python = ">=3.12"
+dependencies = [
+ "mkdocs-awesome-pages-plugin>=2.10.1",
+]
+
+[dependency-groups]
+dev = [
+ "mkdocs-awesome-pages-plugin>=2.10.1",
+]
+docs = [
+ "mkdocs==1.6.1",
+ "mkdocstrings[python]==0.30.1",
+ "mkdocs-material==9.7.0",
+ "mkdocs-gen-files>=0.5.0",
+]
diff --git a/scripts/generate_doc_stubs.py b/scripts/generate_doc_stubs.py
new file mode 100644
index 0000000..928eca3
--- /dev/null
+++ b/scripts/generate_doc_stubs.py
@@ -0,0 +1,590 @@
+"""
+Generate API documentation for AWS SDK Python clients.
+
+This script generates MkDocs Material documentation that matches the reference
+documentation format. It analyzes Python client packages using griffe
+and creates structured documentation with proper formatting.
+"""
+
+import logging
+import sys
+from dataclasses import dataclass, field
+from enum import Enum
+from pathlib import Path
+
+import griffe
+import mkdocs_gen_files
+from griffe import Alias, Class, Function, Module, Object, ExprBinOp, ExprSubscript, TypeAlias
+
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger("generate_doc_stubs")
+
+EVENT_STREAM_TYPES = ["InputEventStream", "OutputEventStream", "DuplexEventStream"]
+
+class StreamType(Enum):
+ """Type of event stream for operations."""
+
+ NONE = "none"
+ INPUT = "InputEventStream" # Client-to-server streaming
+ OUTPUT = "OutputEventStream" # Server-to-client streaming
+ DUPLEX = "DuplexEventStream" # Bidirectional streaming
+
+
+@dataclass(frozen=True)
+class StructureInfo:
+ """Information about a structure (dataclass)."""
+
+ name: str
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.models"
+
+
+@dataclass
+class UnionInfo:
+ """Information about a union type."""
+
+ name: str
+ module_path: str
+ members: list[StructureInfo]
+
+
+@dataclass
+class EnumInfo:
+ """Information about an enum."""
+
+ name: str
+ module_path: str
+
+
+@dataclass
+class ErrorInfo:
+ """Information about an error/exception."""
+
+ name: str
+ module_path: str
+
+
+@dataclass
+class ConfigInfo:
+ """Information about a configuration class."""
+
+ name: str
+ module_path: str
+
+
+@dataclass
+class PluginInfo:
+ """Information about a plugin type alias."""
+
+ name: str
+ module_path: str
+
+
+@dataclass
+class OperationInfo:
+ """Information about a client operation."""
+
+ name: str # e.g., "converse"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.client.BedrockRuntimeClient.converse"
+ input: StructureInfo # e.g., "ConverseInput"
+ output: StructureInfo # e.g., "ConverseOperationOutput"
+ stream_type: StreamType.NONE
+ event_input_type: str | None # For input/duplex streams
+ event_output_type: str | None # For output/duplex streams
+
+
+@dataclass
+class ModelsInfo:
+ """Information about all models."""
+
+ structures: list[StructureInfo]
+ unions: list[UnionInfo]
+ enums: list[EnumInfo]
+ errors: list[ErrorInfo]
+
+
+@dataclass
+class ClientInfo:
+ """Complete information about a client package."""
+
+ name: str # e.g., "BedrockRuntimeClient"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.client.BedrockRuntimeClient"
+ package_name: str # e.g., "aws_sdk_bedrock_runtime"
+ config: ConfigInfo
+ plugin: PluginInfo
+ operations: list[OperationInfo]
+ models: ModelsInfo
+
+
+Member = Object | Alias
+
+class DocStubGenerator:
+ """Generates MkDocs stubs for AWS SDK client documentation."""
+
+ def __init__(self, client_dir: Path, docs_dir: Path, service_name: str) -> None:
+ """Initialize the documentation generator."""
+ self.client_dir = client_dir
+ self.service_name = service_name
+
+ def generate(self) -> None:
+ """Generate the documentation stubs to the output directory."""
+ client_name = self.client_dir.name
+ package_name = client_name.replace("-", "_")
+ client_info = self._analyze_client_package(package_name)
+ self._generate_client_docs(client_info)
+
+ def _analyze_client_package(self, package_name: str) -> ClientInfo:
+ """Analyze a client package using griffe."""
+ logger.info(f"Analyzing package: {package_name}")
+ package = griffe.load(package_name)
+
+ # Parse submodules
+ client_module = package.modules.get("client")
+ config_module = package.modules.get("config")
+ models_module = package.modules.get("models")
+
+ client_class = self._find_class_with_suffix(client_module, "Client")
+ config_class = config_module.members.get("Config")
+ plugin_alias = config_module.members.get("Plugin")
+ config = ConfigInfo(
+ name=config_class.name,
+ module_path=config_class.path
+ )
+ plugin = PluginInfo(
+ name=plugin_alias.name,
+ module_path=plugin_alias.path
+ )
+ operations = self._extract_operations(client_class)
+ models = self._extract_models(models_module, operations)
+
+ return ClientInfo(
+ name=client_class.name,
+ module_path=client_class.path,
+ package_name=package_name,
+ config=config,
+ plugin=plugin,
+ operations=operations,
+ models=models
+ )
+
+ def _find_class_with_suffix(self, module: Module, suffix: str) -> Class:
+ """Find the class in the module with a matching suffix."""
+ for cls in module.classes.values():
+ if cls.name.endswith(suffix):
+ return cls
+
+ def _extract_operations(self, client_class: Class) -> list[OperationInfo]:
+ """Extract operation information from client class."""
+ operations = []
+ for op in client_class.functions.values():
+ if op.is_private or op.is_init_method:
+ continue
+ operations.append(self._analyze_operation(op))
+ return operations
+
+ def _analyze_operation(self, operation: Function) -> None:
+ """Analyze an operation method to extract information."""
+ stream_type = None
+ event_input_type = None
+ event_output_type = None
+
+ input_param = operation.parameters["input"]
+ input_info = StructureInfo(
+ name=input_param.annotation.canonical_name,
+ module_path=input_param.annotation.canonical_path
+ )
+
+ output_type = operation.returns.canonical_name
+ if any(type in output_type for type in EVENT_STREAM_TYPES):
+ stream_args = operation.returns.slice.elements
+
+ if output_type == "InputEventStream":
+ stream_type = StreamType.INPUT
+ event_input_type = stream_args[0].canonical_name
+ elif output_type == "OutputEventStream":
+ stream_type = StreamType.OUTPUT
+ event_output_type = stream_args[0].canonical_name
+ elif output_type == "DuplexEventStream":
+ stream_type = StreamType.DUPLEX
+ event_input_type = stream_args[0].canonical_name
+ event_output_type = stream_args[1].canonical_name
+
+ output_info = StructureInfo(
+ name=stream_args[-1].canonical_name,
+ module_path=stream_args[-1].canonical_path
+ )
+ else:
+ output_info = StructureInfo(
+ name=output_type,
+ module_path=operation.returns.canonical_path
+ )
+
+ return OperationInfo(
+ name=operation.name,
+ module_path=operation.path,
+ input=input_info,
+ output=output_info,
+ stream_type=stream_type,
+ event_input_type=event_input_type,
+ event_output_type=event_output_type,
+ )
+
+ def _extract_models(self, models_module: Module, operations: list[OperationInfo]) -> ModelsInfo:
+ """Extract structures, unions, enums, and errors from the models module."""
+ structures, unions, enums, errors = [], [], [], []
+
+ for member in models_module.members.values():
+ # Skip imported and private members
+ if member.is_imported or member.is_private:
+ continue
+
+ if self._is_union(member):
+ unions.append(UnionInfo(
+ name=member.name,
+ module_path=member.path,
+ members=self._extract_union_members(member, models_module)
+ ))
+ elif self._is_enum(member):
+ enums.append(EnumInfo(
+ name=member.name,
+ module_path=member.path
+ ))
+ elif self._is_error(member):
+ errors.append(ErrorInfo(
+ name=member.name,
+ module_path=member.path
+ ))
+ else:
+ if member.is_class:
+ structures.append(StructureInfo(
+ name=member.name,
+ module_path=member.path
+ ))
+
+ duplicates = set()
+ for structure in structures:
+ if (self._is_operation_io_type(structure.name, operations) or
+ self._is_union_member(structure.name, unions)):
+ duplicates.add(structure)
+
+ structures = [struct for struct in structures if struct not in duplicates]
+
+ return ModelsInfo(
+ structures=structures,
+ unions=unions,
+ enums=enums,
+ errors=errors
+ )
+
+ def _is_union(self, member: Member) -> bool:
+ """Check if a type is a union type."""
+ if member.is_attribute:
+ # Check for Union[...] syntax
+ if isinstance(member.value, ExprSubscript):
+ if member.value.left.name == "Union":
+ return True
+
+ # Check for PEP 604 (X | Y) syntax
+ if isinstance(member.value, ExprBinOp):
+ return True
+
+ return False
+
+ def _extract_union_members(self, union_class: TypeAlias, models_module: Module) -> list[StructureInfo]:
+ """Extract member types from a union."""
+ members = []
+ value_str = str(union_class.value)
+
+ # Handle Union[X | Y | Z] syntax
+ if value_str.startswith("Union[") and value_str.endswith("]"):
+ value_str = value_str.removeprefix("Union[").removesuffix("]")
+
+ member_names = [member.strip() for member in value_str.split("|")]
+
+ for name in member_names:
+ member_object = models_module.members.get(name)
+ members.append(StructureInfo(
+ name=member_object.name,
+ module_path=member_object.path
+ ))
+
+ return members
+
+ def _is_enum(self, member: Member) -> bool:
+ """Check if a module member is an enum."""
+ if not member.is_class:
+ return False
+
+ for base in member.bases:
+ if base.name in ('StrEnum', 'IntEnum'):
+ return True
+
+ return False
+
+ def _is_error(self, member: Member) -> bool:
+ """Check if a module member is an error."""
+ if not member.is_class:
+ return False
+
+ for base in member.bases:
+ if base.name in ('ServiceError', 'ModeledError'):
+ return True
+
+ return False
+
+ def _is_operation_io_type(self, type_name: str, operations: list[OperationInfo]) -> bool:
+ """Check if a type is used as operation input/output."""
+ for op in operations:
+ if type_name == op.input.name or type_name == op.output.name:
+ return True
+ return False
+
+ def _is_union_member(self, type_name:str, unions: list[UnionInfo]) -> bool:
+ """Check if a type is used as union member."""
+ for union in unions:
+ for member in union.members:
+ if type_name == member.name:
+ return True
+ return False
+
+ def _generate_client_docs(self, client_info: ClientInfo) -> None:
+ """Generate all documentation files for a client."""
+ logger.info(f"Writing files to mkdocs virtual filesystem for {self.service_name}")
+
+ self._generate_index(client_info)
+ self._generate_operations(client_info.operations)
+ self._generate_structures(client_info.models.structures)
+ self._generate_errors(client_info.models.errors)
+ self._generate_unions(client_info.models.unions)
+ self._generate_enums(client_info.models.enums)
+
+ def _generate_index(self, client_info: ClientInfo) -> None:
+ """Generate the main index.md file."""
+ content = f"# {self.service_name}\n\n"
+
+ # Client section
+ content += "## Client\n\n"
+ content += f"::: {client_info.module_path}\n"
+ content += " options:\n"
+ content += " merge_init_into_class: true\n"
+ content += " docstring_options:\n"
+ content += " ignore_init_summary: true\n"
+ content += " members: false\n"
+ content += " heading_level: 3\n\n"
+
+ # Operations section
+ if client_info.operations:
+ content += "## Available Operations\n\n"
+ for op in sorted(client_info.operations, key=lambda x: x.name):
+ content += f"- [`{op.name}`](operations/{op.name}.md)\n\n"
+
+ # Configuration section
+ content += "## Configuration\n\n"
+ content += f"::: {client_info.config.module_path}\n"
+ content += " options:\n"
+ content += " merge_init_into_class: true\n"
+ content += " docstring_options:\n"
+ content += " ignore_init_summary: true\n"
+ content += " heading_level: 3\n\n"
+ content += f"::: {client_info.plugin.module_path}\n"
+ content += " options:\n"
+ content += " heading_level: 3\n\n"
+
+ models = client_info.models
+
+ # Structures section
+ if models.structures:
+ content += "## Structures\n\n"
+ for struct in sorted(models.structures, key=lambda x: x.name):
+ content += f"- [`{struct.name}`](structures/{struct.name}.md)\n\n"
+
+ # Errors section
+ if models.errors:
+ content += "## Errors\n\n"
+ for error in sorted(models.errors, key=lambda x: x.name):
+ content += f"- [`{error.name}`](errors/{error.name}.md)\n\n"
+
+ # Unions section
+ if models.unions:
+ content += "## Unions\n\n"
+ for union in sorted(models.unions, key=lambda x: x.name):
+ content += f"- [`{union.name}`](unions/{union.name}.md)\n\n"
+
+ # Enums section
+ if models.enums:
+ content += "## Enums\n\n"
+ for enum in sorted(models.enums, key=lambda x: x.name):
+ content += f"- [`{enum.name}`](enums/{enum.name}.md)\n\n"
+
+ docs_path = f"clients/{self.service_name}/index.md"
+ with mkdocs_gen_files.open(docs_path, "w") as f:
+ f.write(content)
+
+ logger.info(f"Generated index.md")
+
+ def _generate_operations(self, operations: list[OperationInfo]) -> None:
+ """Generate operation documentation files."""
+ for op in operations:
+ content = f"# {op.name}\n\n"
+
+ # Operation section
+ content += "## Operation\n\n"
+ content += f"::: {op.module_path}\n"
+ content += " options:\n"
+ content += " heading_level: 3\n\n"
+
+ # Input section
+ content += "## Input\n\n"
+ content += f"::: {op.input.module_path}\n"
+ content += " options:\n"
+ content += " heading_level: 3\n\n"
+
+ # Output section - handle all stream types
+ content += "## Output\n\n"
+
+ if op.stream_type == StreamType.INPUT:
+ content += "This operation returns an `InputEventStream` for client-to-server streaming.\n\n"
+ content += "### Event Stream Structure\n\n"
+ content += "#### Input Event Type\n\n"
+ content += f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)\n\n"
+ content += "### Initial Response Structure\n\n"
+ content += f"::: {op.output.module_path}\n"
+ content += " options:\n"
+ content += " heading_level: 4\n\n"
+ elif op.stream_type == StreamType.OUTPUT:
+ content += "This operation returns an `OutputEventStream` for server-to-client streaming.\n\n"
+ content += "### Event Stream Structure\n\n"
+ content += "#### Output Event Type\n\n"
+ if op.event_output_type:
+ content += f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)\n\n"
+ content += "### Initial Response Structure\n\n"
+ content += f"::: {op.output.module_path}\n"
+ content += " options:\n"
+ content += " heading_level: 4\n\n"
+ elif op.stream_type == StreamType.DUPLEX:
+ content += "This operation returns a `DuplexEventStream` for bidirectional streaming.\n\n"
+ content += "### Event Stream Structure\n\n"
+ content += "#### Input Event Type\n\n"
+ if op.event_input_type:
+ content += f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)\n\n"
+ content += "#### Output Event Type\n\n"
+ if op.event_output_type:
+ content += f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)\n\n"
+ content += "### Initial Response Structure\n\n"
+ content += f"::: {op.output.module_path}\n"
+ content += " options:\n"
+ content += " heading_level: 4\n\n"
+ else:
+ # No streaming
+ content += f"::: {op.output.module_path}\n"
+ content += " options:\n"
+ content += " heading_level: 3\n\n"
+
+ docs_path = f"clients/{self.service_name}/operations/{op.name}.md"
+ with mkdocs_gen_files.open(docs_path, "w") as f:
+ f.write(content)
+
+ logger.info(f"Generated {len(operations)} operation files")
+
+ def _generate_structures(self, structures: list[StructureInfo]) -> None:
+ """Generate structure documentation files."""
+ for struct in structures:
+ content = f"::: {struct.module_path}\n"
+ content += " options:\n"
+ content += " heading_level: 1\n"
+
+ docs_path = f"clients/{self.service_name}/structures/{struct.name}.md"
+ with mkdocs_gen_files.open(docs_path, "w") as f:
+ f.write(content)
+
+ logger.info(f"Generated {len(structures)} structure files")
+
+ def _generate_unions(self, unions: list[UnionInfo]) -> None:
+ """Generate union documentation files."""
+ for union in unions:
+ content = f"::: {union.module_path}\n"
+ content += " options:\n"
+ content += " heading_level: 1\n\n"
+
+ # Add union members
+ if union.members:
+ content += "## Union Members\n\n"
+ for member in union.members:
+ content += f"::: {member.module_path}\n"
+ content += " options:\n"
+ content += " heading_level: 3\n\n"
+
+ docs_path = f"clients/{self.service_name}/unions/{union.name}.md"
+ with mkdocs_gen_files.open(docs_path, "w") as f:
+ f.write(content)
+
+ logger.info(f"Generated {len(unions)} union files")
+
+ def _generate_enums(self, enums: list[EnumInfo]) -> None:
+ """Generate enum documentation files."""
+ for enum in enums:
+ content = f"::: {enum.module_path}\n"
+ content += " options:\n"
+ content += " heading_level: 1\n"
+ content += " members: true\n"
+
+ docs_path = f"clients/{self.service_name}/enums/{enum.name}.md"
+ with mkdocs_gen_files.open(docs_path, "w") as f:
+ f.write(content)
+
+ logger.info(f"Generated {len(enums)} enum files")
+
+ def _generate_errors(self, errors: list[ErrorInfo]) -> None:
+ """Generate error documentation files."""
+ for error in errors:
+ content = f"::: {error.module_path}\n"
+ content += " options:\n"
+ content += " heading_level: 1\n"
+ content += " members: true\n"
+
+ docs_path = f"clients/{self.service_name}/errors/{error.name}.md"
+ with mkdocs_gen_files.open(docs_path, "w") as f:
+ f.write(content)
+
+ logger.info(f"Generated {len(errors)} error files")
+
+
+def extract_service_name(package_name: str) -> str:
+ """Extract service name from client package name."""
+ return (
+ package_name
+ .replace("aws-sdk-", "")
+ .replace("-", " ")
+ .title()
+ )
+
+
+def main() -> int:
+ """Main entry point for the documentation generator."""
+ repo_root = Path(__file__).parent.parent.absolute()
+ output_dir = repo_root / "docs" / "clients"
+ clients_dir = repo_root / "clients"
+
+ try:
+ for client_dir in clients_dir.iterdir():
+ if client_dir.is_dir() and client_dir.name != "aws-sdk-python":
+ service_name = extract_service_name(client_dir.name)
+ logger.info(f"Generating docs for {service_name}")
+ generator = DocStubGenerator(client_dir, output_dir / service_name, service_name)
+ generator.generate()
+
+ return 0
+ except Exception as e:
+ logger.error(f"Error generating doc stubs: {e}", exc_info=True)
+ return 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
+else:
+ # When imported by mkdocs-gen-files, run the generation
+ main()
diff --git a/scripts/generate_nav.py b/scripts/generate_nav.py
new file mode 100644
index 0000000..cc0f194
--- /dev/null
+++ b/scripts/generate_nav.py
@@ -0,0 +1,141 @@
+# docs/scripts/generate_nav.py
+"""
+Generate client documentation navigation dynamically.
+
+Executed by mkdocs-gen-files during the build process after generate_doc_stubs.py.
+It discovers client documentation already generated in docs/clients/ by generate_doc_stubs.py
+and generates a dynamic index (clients/index.md) that groups clients alphabetically.
+"""
+
+import logging
+import sys
+
+from collections import defaultdict
+from dataclasses import dataclass
+from pathlib import Path
+
+import mkdocs_gen_files
+
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger("generate_nav")
+
+
+@dataclass
+class ClientDocInfo:
+ """Information about a client's documentation directory."""
+
+ name: str
+ package_name: str
+ docs_path: Path
+
+
+def generate_nav(repo_root: Path) -> int:
+ """Generate navigation for clients."""
+ try:
+ clients = discover_clients(repo_root)
+ generate_clients_index(clients)
+ build_nav_structure(clients)
+ except Exception as e:
+ logger.error(f"Error generating navigation: {e}")
+ return 1
+
+ return 0
+
+
+def discover_clients(repo_root: Path) -> list[ClientDocInfo]:
+ """Discover clients from clients packages."""
+ clients = []
+ clients_dir = repo_root / "clients"
+
+ if not clients_dir.exists():
+ raise FileNotFoundError(f"No clients directory found at {clients_dir}")
+
+ for client_path in sorted(clients_dir.iterdir()):
+ if not client_path.is_dir() or client_path.name == "aws-sdk-python":
+ continue
+
+ # Extract service name from package name (e.g., "aws-sdk-bedrock-runtime" -> "Bedrock Runtime")
+ service_name = client_path.name.replace("aws-sdk-", "").replace("-", " ").title()
+ package_name = client_path.name
+
+ clients.append(ClientDocInfo(
+ name=service_name,
+ package_name=package_name,
+ docs_path=client_path,
+ ))
+
+ logger.info(f"✅ Discovered client: {service_name}")
+
+ return clients
+
+
+def generate_clients_index(clients: list[ClientDocInfo]) -> None:
+ """Generate clients/index.md (with alphabetical tabs)."""
+ content = "# All Available Clients\n\n"
+
+ # Group by first letter
+ grouped = defaultdict(list)
+ for client in clients:
+ letter = client.name[0].upper()
+ grouped[letter].append(client)
+
+ # Tab for all services
+ content += "=== \"All\"\n\n"
+ content += " | Service | Package Name |\n"
+ content += " |----------|--------------|\n"
+ for client in sorted(clients, key=lambda x: x.name):
+ content += f" | **[{client.name}]({client.name}/index.md)** | `{client.package_name}` |\n"
+ content += "\n"
+
+ # Individual letter tabs
+ for letter in sorted(grouped.keys()):
+ content += f"=== \"{letter}\"\n\n"
+ content += " | Service | Package Name |\n"
+ content += " |----------|--------------|\n"
+ for client in sorted(grouped[letter], key=lambda x: x.name):
+ content += f" | **[{client.name}]({client.name}/index.md)** | `{client.package_name}` |\n"
+ content += "\n"
+
+ with mkdocs_gen_files.open("clients/index.md", "w") as f:
+ f.write(content)
+
+ logger.info(f"✅ Generated clients index page with {len(clients)} letter tabs")
+
+
+def build_nav_structure(clients: list[ClientDocInfo]) -> None:
+ """Build navigation structure for clients."""
+ nav_structure = [
+ {
+ "Getting Started": [
+ {"Overview": "index.md"},
+ {"Contributing": "contributing/index.md"},
+ ]
+ },
+ {
+ "Clients API Reference": [
+ "clients/index.md",
+ *[f"clients/{client.name}/index.md" for client in sorted(clients, key=lambda x: x.name)]
+ ]
+ }
+ ]
+ mkdocs_gen_files.config["nav"] = nav_structure
+ logger.info(f"✅ Generated navigation structure for {len(clients)} clients")
+
+
+def main() -> int:
+ """Main entry point to generate navigation."""
+ repo_root = Path(__file__).parent.parent
+
+ return generate_nav(repo_root)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
+else:
+ # When imported by mkdocs-gen-files, run the generation
+ main()
\ No newline at end of file
From a447c2bbb2b01bc031ddaa4b694a5a9b53a51aa8 Mon Sep 17 00:00:00 2001
From: Antonio Aranda <102337110+arandito@users.noreply.github.com>
Date: Mon, 24 Nov 2025 13:04:43 -0500
Subject: [PATCH 2/4] Fix CONTRIBUTING.md
---
CONTRIBUTING.md | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 3a533cb..d1fadc3 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -52,4 +52,9 @@ opensource-codeofconduct@amazon.com with any additional questions or comments.
## Security issue notifications
-If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a
\ No newline at end of file
+If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
+
+
+## Licensing
+
+See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
From 15bb9cdae1340efb5b1c353b50b64bbf714c6107 Mon Sep 17 00:00:00 2001
From: Antonio Aranda <102337110+arandito@users.noreply.github.com>
Date: Tue, 16 Dec 2025 14:05:01 -0500
Subject: [PATCH 3/4] Add per-client documentation
---
.gitignore | 18 +
CONTRIBUTING.md | 5 +-
Makefile | 25 +
README.md | 4 +-
clients/aws-sdk-bedrock-runtime/Makefile | 25 +
.../aws-sdk-bedrock-runtime/docs/README.md | 18 +
.../docs/hooks/copyright.py | 6 +
clients/aws-sdk-bedrock-runtime/docs/index.md | 1 +
.../docs/stylesheets/extra.css | 9 +
clients/aws-sdk-bedrock-runtime/mkdocs.yml | 96 +++
.../aws-sdk-bedrock-runtime/pyproject.toml | 6 +
.../scripts/docs/generate_doc_stubs.py | 607 ++++++++++++++++++
.../aws-sdk-sagemaker-runtime-http2/Makefile | 25 +
.../aws-sdk-sagemaker-runtime-http2/README.md | 2 +-
.../docs/README.md | 18 +
.../docs/hooks/copyright.py | 6 +
.../docs/index.md | 1 +
.../docs/stylesheets/extra.css | 9 +
.../mkdocs.yml | 96 +++
.../pyproject.toml | 8 +-
.../scripts/docs/generate_doc_stubs.py | 607 ++++++++++++++++++
clients/aws-sdk-transcribe-streaming/Makefile | 25 +
.../docs/README.md | 18 +
.../docs/hooks/copyright.py | 6 +
.../docs/index.md | 1 +
.../docs/stylesheets/extra.css | 9 +
.../aws-sdk-transcribe-streaming/mkdocs.yml | 96 +++
.../pyproject.toml | 6 +
.../scripts/docs/generate_doc_stubs.py | 607 ++++++++++++++++++
docs/assets/aws-logo-dark.svg | 35 +
docs/assets/aws-logo-white.svg | 38 ++
docs/contributing.md | 1 +
docs/contributing/index.md | 4 -
docs/hooks/copyright.py | 6 +
docs/index.md | 2 +-
docs/javascript/nav-expand.js | 29 +
docs/stylesheets/extra.css | 15 +-
mkdocs.yml | 59 +-
pyproject.toml | 20 -
requirements-docs.in | 4 +
scripts/docs/generate_all_doc_stubs.py | 209 ++++++
scripts/docs/generate_nav.py | 91 +++
scripts/generate_doc_stubs.py | 590 -----------------
scripts/generate_nav.py | 141 ----
44 files changed, 2817 insertions(+), 787 deletions(-)
create mode 100644 .gitignore
create mode 100644 Makefile
create mode 100644 clients/aws-sdk-bedrock-runtime/Makefile
create mode 100644 clients/aws-sdk-bedrock-runtime/docs/README.md
create mode 100644 clients/aws-sdk-bedrock-runtime/docs/hooks/copyright.py
create mode 100644 clients/aws-sdk-bedrock-runtime/docs/index.md
create mode 100644 clients/aws-sdk-bedrock-runtime/docs/stylesheets/extra.css
create mode 100644 clients/aws-sdk-bedrock-runtime/mkdocs.yml
create mode 100644 clients/aws-sdk-bedrock-runtime/scripts/docs/generate_doc_stubs.py
create mode 100644 clients/aws-sdk-sagemaker-runtime-http2/Makefile
create mode 100644 clients/aws-sdk-sagemaker-runtime-http2/docs/README.md
create mode 100644 clients/aws-sdk-sagemaker-runtime-http2/docs/hooks/copyright.py
create mode 100644 clients/aws-sdk-sagemaker-runtime-http2/docs/index.md
create mode 100644 clients/aws-sdk-sagemaker-runtime-http2/docs/stylesheets/extra.css
create mode 100644 clients/aws-sdk-sagemaker-runtime-http2/mkdocs.yml
create mode 100644 clients/aws-sdk-sagemaker-runtime-http2/scripts/docs/generate_doc_stubs.py
create mode 100644 clients/aws-sdk-transcribe-streaming/Makefile
create mode 100644 clients/aws-sdk-transcribe-streaming/docs/README.md
create mode 100644 clients/aws-sdk-transcribe-streaming/docs/hooks/copyright.py
create mode 100644 clients/aws-sdk-transcribe-streaming/docs/index.md
create mode 100644 clients/aws-sdk-transcribe-streaming/docs/stylesheets/extra.css
create mode 100644 clients/aws-sdk-transcribe-streaming/mkdocs.yml
create mode 100644 clients/aws-sdk-transcribe-streaming/scripts/docs/generate_doc_stubs.py
create mode 100644 docs/assets/aws-logo-dark.svg
create mode 100644 docs/assets/aws-logo-white.svg
create mode 100644 docs/contributing.md
delete mode 100644 docs/contributing/index.md
create mode 100644 docs/hooks/copyright.py
create mode 100644 docs/javascript/nav-expand.js
delete mode 100644 pyproject.toml
create mode 100644 requirements-docs.in
create mode 100644 scripts/docs/generate_all_doc_stubs.py
create mode 100644 scripts/docs/generate_nav.py
delete mode 100644 scripts/generate_doc_stubs.py
delete mode 100644 scripts/generate_nav.py
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..2975313
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,18 @@
+# macOS system files
+.DS_Store
+
+# MkDocs build artifacts
+site/
+docs/SUMMARY.md
+docs/clients/
+**/docs/client/
+
+# Virtual environments
+.venv
+venv
+
+# Python bytecode cache
+__pycache__/
+
+# Dependency lock file for uv
+uv.lock
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index d1fadc3..2f58725 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,4 +1,3 @@
-
# Contributing Guidelines
Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
@@ -54,7 +53,7 @@ opensource-codeofconduct@amazon.com with any additional questions or comments.
## Security issue notifications
If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
-
+
## Licensing
-See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
+See the [LICENSE](https://github.com/awslabs/aws-sdk-python/blob/develop/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..b6b9c76
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,25 @@
+DOCS_PORT ?= 8000
+PYTHON_VERSION := 3.12
+
+.PHONY: docs docs-serve docs-clean docs-install venv
+
+venv:
+ uv venv --python $(PYTHON_VERSION)
+
+docs-install: venv
+ uv pip install -r requirements-docs.in
+ uv pip install -e clients/*
+
+docs-clean:
+ rm -rf site docs/clients docs/SUMMARY.md
+
+docs-generate:
+ uv run python scripts/docs/generate_all_doc_stubs.py
+ uv run python scripts/docs/generate_nav.py
+
+docs: docs-generate
+ uv run mkdocs build
+
+docs-serve:
+ @[ -d site ] || $(MAKE) docs
+ uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
\ No newline at end of file
diff --git a/README.md b/README.md
index b6bf9e8..f7337e7 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,4 @@
## AWS SDK for Python
-
[![Apache 2 licensed][apache-badge]][apache-url]
[apache-badge]: https://img.shields.io/badge/license-APACHE2-blue.svg
@@ -30,10 +29,9 @@ features we are actively working on.
You can provide feedback or report a bug by submitting an [issue](https://github.com/awslabs/aws-sdk-python/issues/new/choose).
This is the preferred mechanism to give feedback so that other users can engage in the conversation, +1 issues, etc.
-
## Security
-See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more information.
+See [CONTRIBUTING](https://github.com/awslabs/aws-sdk-python/blob/develop/CONTRIBUTING.md#security-issue-notifications) for more information.
## License
diff --git a/clients/aws-sdk-bedrock-runtime/Makefile b/clients/aws-sdk-bedrock-runtime/Makefile
new file mode 100644
index 0000000..43adf73
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/Makefile
@@ -0,0 +1,25 @@
+DOCS_PORT ?= 8000
+CLIENT_DIR := src/aws_sdk_bedrock_runtime
+DOCS_OUTPUT_DIR := docs/client
+PYTHON_VERSION := 3.12
+
+.PHONY: docs docs-serve docs-clean docs-install venv
+
+venv:
+ uv venv --python $(PYTHON_VERSION)
+
+docs-install: venv
+ uv sync --group docs
+
+docs-clean:
+ rm -rf site $(DOCS_OUTPUT_DIR)
+
+docs-generate:
+ uv run python scripts/docs/generate_doc_stubs.py -c $(CLIENT_DIR) -o $(DOCS_OUTPUT_DIR)
+
+docs: docs-generate
+ uv run mkdocs build
+
+docs-serve:
+ @[ -d site ] || $(MAKE) docs
+ uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
\ No newline at end of file
diff --git a/clients/aws-sdk-bedrock-runtime/docs/README.md b/clients/aws-sdk-bedrock-runtime/docs/README.md
new file mode 100644
index 0000000..c25ff76
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/docs/README.md
@@ -0,0 +1,18 @@
+## Generating Client Documentation
+
+Material for MkDocs is used for documentation. You can generate the documentation HTML
+for this client locally with the following:
+
+```bash
+# Install documentation dependencies
+make docs-install
+
+# Serve documentation locally
+make docs-serve
+
+# OR build static HTML documentation
+make docs
+
+# Clean docs artifacts
+make docs-clean
+```
diff --git a/clients/aws-sdk-bedrock-runtime/docs/hooks/copyright.py b/clients/aws-sdk-bedrock-runtime/docs/hooks/copyright.py
new file mode 100644
index 0000000..1260def
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/docs/hooks/copyright.py
@@ -0,0 +1,6 @@
+from datetime import datetime
+
+
+def on_config(config, **kwargs):
+ config.copyright = f"Copyright © {datetime.now().year}, Amazon Web Services, Inc"
+ return config
diff --git a/clients/aws-sdk-bedrock-runtime/docs/index.md b/clients/aws-sdk-bedrock-runtime/docs/index.md
new file mode 100644
index 0000000..612c7a5
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/docs/index.md
@@ -0,0 +1 @@
+--8<-- "README.md"
diff --git a/clients/aws-sdk-bedrock-runtime/docs/stylesheets/extra.css b/clients/aws-sdk-bedrock-runtime/docs/stylesheets/extra.css
new file mode 100644
index 0000000..21d1b09
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/docs/stylesheets/extra.css
@@ -0,0 +1,9 @@
+/* Custom breadcrumb styling */
+.breadcrumb {
+ font-size: 0.85em;
+ color: var(--md-default-fg-color--light);
+}
+
+p:has(span.breadcrumb) {
+margin-top: 0;
+}
diff --git a/clients/aws-sdk-bedrock-runtime/mkdocs.yml b/clients/aws-sdk-bedrock-runtime/mkdocs.yml
new file mode 100644
index 0000000..9e0024e
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/mkdocs.yml
@@ -0,0 +1,96 @@
+site_name: AWS SDK for Python - Bedrock Runtime
+site_description: Documentation for AWS Bedrock Runtime Client
+
+repo_name: awslabs/aws-sdk-python
+repo_url: https://github.com/awslabs/aws-sdk-python
+
+exclude_docs: |
+ README.md
+
+hooks:
+ - docs/hooks/copyright.py
+
+theme:
+ name: material
+ favicon: ""
+ palette:
+ # Palette toggle for automatic mode
+ - media: "(prefers-color-scheme)"
+ scheme: default
+ toggle:
+ icon: material/brightness-auto
+ name: Switch to light mode
+ primary: white
+ # Palette toggle for light mode
+ - media: "(prefers-color-scheme: light)"
+ scheme: default
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+ primary: white
+ # Palette toggle for dark mode
+ - media: "(prefers-color-scheme: dark)"
+ scheme: slate
+ toggle:
+ icon: material/brightness-4
+ name: Switch to system preference
+ primary: black
+ features:
+ - navigation.indexes
+ - navigation.instant
+ - navigation.top
+ - search.suggest
+ - search.highlight
+ - content.code.copy
+
+plugins:
+- search
+- mkdocstrings:
+ handlers:
+ python:
+ options:
+ show_source: false
+ show_signature: true
+ show_signature_annotations: true
+ show_root_heading: true
+ show_root_full_path: false
+ show_object_full_path: false
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ show_if_no_docstring: true
+ show_category_heading: true
+ group_by_category: true
+ separate_signature: true
+ signature_crossrefs: true
+ filters:
+ - "!^_"
+ - "!^deserialize"
+ - "!^serialize"
+
+markdown_extensions:
+ - pymdownx.highlight
+ - pymdownx.inlinehilite
+ - pymdownx.snippets:
+ check_paths: true
+ - pymdownx.superfences
+ - admonition
+ - def_list
+ - toc:
+ permalink: true
+ toc_depth: 3
+
+nav:
+ - Overview: index.md
+ - Client: client/index.md
+
+extra:
+ social:
+ - icon: fontawesome/brands/github
+ link: https://github.com/awslabs/aws-sdk-python
+
+extra_css:
+ - stylesheets/extra.css
+
+validation:
+ nav:
+ omitted_files: ignore
diff --git a/clients/aws-sdk-bedrock-runtime/pyproject.toml b/clients/aws-sdk-bedrock-runtime/pyproject.toml
index df2fcb4..3023696 100644
--- a/clients/aws-sdk-bedrock-runtime/pyproject.toml
+++ b/clients/aws-sdk-bedrock-runtime/pyproject.toml
@@ -34,6 +34,12 @@ test = [
"pytest-asyncio>=0.20.3,<0.21.0"
]
+docs = [
+ "mkdocs~=1.6.1",
+ "mkdocs-material==9.7.0",
+ "mkdocstrings[python]==1.0.0"
+]
+
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
diff --git a/clients/aws-sdk-bedrock-runtime/scripts/docs/generate_doc_stubs.py b/clients/aws-sdk-bedrock-runtime/scripts/docs/generate_doc_stubs.py
new file mode 100644
index 0000000..056708a
--- /dev/null
+++ b/clients/aws-sdk-bedrock-runtime/scripts/docs/generate_doc_stubs.py
@@ -0,0 +1,607 @@
+"""
+Generate markdown API Reference stubs for AWS SDK for Python clients.
+
+This script generates MkDocs markdown stub files for a single client package.
+It uses griffe to analyze the Python source and outputs mkdocstrings directives
+for the client, operations, models (structures, unions, enums), and errors.
+"""
+
+import argparse
+import logging
+import sys
+from collections.abc import Sequence
+from dataclasses import dataclass
+from enum import Enum
+from pathlib import Path
+from typing import TypeGuard
+
+import griffe
+from griffe import (
+ Alias,
+ Attribute,
+ Class,
+ Expr,
+ ExprBinOp,
+ ExprName,
+ ExprSubscript,
+ ExprTuple,
+ Function,
+ Module,
+ Object,
+)
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger("generate_doc_stubs")
+
+
+class StreamType(Enum):
+ """Type of event stream for operations."""
+
+ INPUT = "InputEventStream"
+ OUTPUT = "OutputEventStream"
+ DUPLEX = "DuplexEventStream"
+
+ @property
+ def description(self) -> str:
+ """Return a string description for documentation."""
+ descriptions = {
+ StreamType.INPUT: "an `InputEventStream` for client-to-server streaming",
+ StreamType.OUTPUT: "an `OutputEventStream` for server-to-client streaming",
+ StreamType.DUPLEX: "a `DuplexEventStream` for bidirectional streaming",
+ }
+ return descriptions[self]
+
+
+@dataclass(frozen=True)
+class TypeInfo:
+ """Information about a type (structure, enum, error, config, plugin)."""
+
+ name: str # e.g., "ConverseOperationOutput"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.models.ConverseOperationOutput"
+
+
+@dataclass
+class UnionInfo:
+ """Information about a union type."""
+
+ name: str
+ module_path: str
+ members: list[TypeInfo]
+
+
+@dataclass
+class OperationInfo:
+ """Information about a client operation."""
+
+ name: str
+ module_path: str
+ input: TypeInfo
+ output: TypeInfo
+ stream_type: StreamType | None
+ event_input_type: str | None # For input/duplex streams
+ event_output_type: str | None # For output/duplex streams
+
+
+@dataclass
+class ModelsInfo:
+ """Information about all modeled types."""
+
+ structures: list[TypeInfo]
+ unions: list[UnionInfo]
+ enums: list[TypeInfo]
+ errors: list[TypeInfo]
+
+
+@dataclass
+class ClientInfo:
+ """Complete information about a client package."""
+
+ name: str # e.g., "BedrockRuntimeClient"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.client.BedrockRuntimeClient"
+ package_name: str # e.g., "aws_sdk_bedrock_runtime"
+ config: TypeInfo
+ plugin: TypeInfo
+ operations: list[OperationInfo]
+ models: ModelsInfo
+
+
+class DocStubGenerator:
+ """Generate markdown API Reference stubs for AWS SDK for Python clients."""
+
+ def __init__(self, client_dir: Path, output_dir: Path) -> None:
+ """
+ Initialize the documentation generator.
+
+ Args:
+ client_dir: Path to the client source directory
+ output_dir: Path to the output directory for generated doc stubs
+ """
+ self.client_dir = client_dir
+ self.output_dir = output_dir
+ # Extract service name from package name
+ # (e.g., "aws_sdk_bedrock_runtime" -> "Bedrock Runtime")
+ self.service_name = client_dir.name.replace("aws_sdk_", "").replace("_", " ").title()
+
+ def generate(self) -> bool:
+ """
+ Generate the documentation stubs to the output directory.
+
+ Returns:
+ True if documentation was generated successfully, False otherwise.
+ """
+ logger.info(f"Generating doc stubs for {self.service_name}...")
+
+ package_name = self.client_dir.name
+ client_info = self._analyze_client_package(package_name)
+ if not self._generate_client_docs(client_info):
+ return False
+
+ logger.info(f"Finished generating doc stubs for {self.service_name}")
+ return True
+
+ def _analyze_client_package(self, package_name: str) -> ClientInfo:
+ """Analyze a client package using griffe."""
+ logger.info(f"Analyzing package: {package_name}")
+ package = griffe.load(package_name)
+
+ # Ensure required modules exist
+ required = ["client", "config", "models"]
+ missing = [name for name in required if not package.modules.get(name)]
+ if missing:
+ raise ValueError(f"Missing required modules in {package_name}: {', '.join(missing)}")
+
+ # Parse submodules
+ client_module = package.modules["client"]
+ config_module = package.modules["config"]
+ models_module = package.modules["models"]
+
+ client_class = self._find_class_with_suffix(client_module, "Client")
+ if not client_class:
+ raise ValueError(f"No class ending with 'Client' found in {package_name}.client")
+
+ config_class = config_module.members.get("Config")
+ plugin_alias = config_module.members.get("Plugin")
+ if not config_class or not plugin_alias:
+ raise ValueError(f"Missing Config or Plugin in {package_name}.config")
+
+ config = TypeInfo(name=config_class.name, module_path=config_class.path)
+ plugin = TypeInfo(name=plugin_alias.name, module_path=plugin_alias.path)
+
+ operations = self._extract_operations(client_class)
+ models = self._extract_models(models_module, operations)
+
+ logger.info(
+ f"Analyzed {client_class.name}: {len(operations)} operations, "
+ f"{len(models.structures)} structures, {len(models.errors)} errors, "
+ f"{len(models.unions)} unions, {len(models.enums)} enums"
+ )
+
+ return ClientInfo(
+ name=client_class.name,
+ module_path=client_class.path,
+ package_name=package_name,
+ config=config,
+ plugin=plugin,
+ operations=operations,
+ models=models,
+ )
+
+ def _find_class_with_suffix(self, module: Module, suffix: str) -> Class | None:
+ """Find the class in the module with a matching suffix."""
+ for cls in module.classes.values():
+ if cls.name.endswith(suffix):
+ return cls
+ return None
+
+ def _extract_operations(self, client_class: Class) -> list[OperationInfo]:
+ """Extract operation information from client class."""
+ operations = []
+ for op in client_class.functions.values():
+ if op.is_private or op.is_init_method:
+ continue
+ operations.append(self._analyze_operation(op))
+ return operations
+
+ def _analyze_operation(self, operation: Function) -> OperationInfo:
+ """Analyze an operation method to extract information."""
+ stream_type = None
+ event_input_type = None
+ event_output_type = None
+
+ input_param = operation.parameters["input"]
+ input_annotation = self._get_expr(
+ input_param.annotation, f"'{operation.name}' input annotation"
+ )
+ input_info = TypeInfo(
+ name=input_annotation.canonical_name,
+ module_path=input_annotation.canonical_path,
+ )
+
+ returns = self._get_expr(operation.returns, f"'{operation.name}' return type")
+ output_type = returns.canonical_name
+ stream_type_map = {s.value: s for s in StreamType}
+
+ if output_type in stream_type_map:
+ stream_type = stream_type_map[output_type]
+ stream_args = self._get_subscript_elements(returns, f"'{operation.name}' stream type")
+
+ if stream_type in (StreamType.INPUT, StreamType.DUPLEX):
+ event_input_type = stream_args[0].canonical_name
+ if stream_type in (StreamType.OUTPUT, StreamType.DUPLEX):
+ idx = 1 if stream_type == StreamType.DUPLEX else 0
+ event_output_type = stream_args[idx].canonical_name
+
+ output_info = TypeInfo(
+ name=stream_args[-1].canonical_name, module_path=stream_args[-1].canonical_path
+ )
+ else:
+ output_info = TypeInfo(name=output_type, module_path=returns.canonical_path)
+
+ return OperationInfo(
+ name=operation.name,
+ module_path=operation.path,
+ input=input_info,
+ output=output_info,
+ stream_type=stream_type,
+ event_input_type=event_input_type,
+ event_output_type=event_output_type,
+ )
+
+ def _get_expr(self, annotation: str | Expr | None, context: str) -> Expr:
+ """Extract and validate an Expr from an annotation."""
+ if not isinstance(annotation, Expr):
+ raise TypeError(f"{context}: expected Expr, got {type(annotation).__name__}")
+ return annotation
+
+ def _get_subscript_elements(self, expr: Expr, context: str) -> list[Expr]:
+ """Extract type arguments from a subscript expression like Generic[A, B, C]."""
+ if not isinstance(expr, ExprSubscript):
+ raise TypeError(f"{context}: expected subscript, got {type(expr).__name__}")
+ slice_expr = expr.slice
+ if isinstance(slice_expr, str):
+ raise TypeError(f"{context}: unexpected string slice '{slice_expr}'")
+ if isinstance(slice_expr, ExprTuple):
+ return [el for el in slice_expr.elements if isinstance(el, Expr)]
+ return [slice_expr]
+
+ def _extract_models(self, models_module: Module, operations: list[OperationInfo]) -> ModelsInfo:
+ """Extract structures, unions, enums, and errors from models module."""
+ structures, unions, enums, errors = [], [], [], []
+
+ for member in models_module.members.values():
+ # Skip imported and private members
+ if member.is_imported or member.is_private:
+ continue
+
+ if self._is_union(member):
+ unions.append(
+ UnionInfo(
+ name=member.name,
+ module_path=member.path,
+ members=self._extract_union_members(member, models_module),
+ )
+ )
+ elif self._is_enum(member):
+ enums.append(TypeInfo(name=member.name, module_path=member.path))
+ elif self._is_error(member):
+ errors.append(TypeInfo(name=member.name, module_path=member.path))
+ elif member.is_class:
+ structures.append(TypeInfo(name=member.name, module_path=member.path))
+
+ duplicates = set()
+ for structure in structures:
+ if self._is_operation_io_type(structure.name, operations) or self._is_union_member(
+ structure.name, unions
+ ):
+ duplicates.add(structure)
+
+ structures = [struct for struct in structures if struct not in duplicates]
+
+ return ModelsInfo(structures=structures, unions=unions, enums=enums, errors=errors)
+
+ def _is_union(self, member: Object | Alias) -> TypeGuard[Attribute]:
+ """Check if a module member is a union type."""
+ if not isinstance(member, Attribute):
+ return False
+
+ value = member.value
+ # Check for Union[...] syntax
+ if isinstance(value, ExprSubscript):
+ left = value.left
+ if isinstance(left, ExprName) and left.name == "Union":
+ return True
+
+ # Check for PEP 604 (X | Y) syntax
+ if isinstance(value, ExprBinOp):
+ return True
+
+ return False
+
+ def _extract_union_members(
+ self, union_attr: Attribute, models_module: Module
+ ) -> list[TypeInfo]:
+ """Extract member types from a union."""
+ members = []
+ value_str = str(union_attr.value)
+
+ # Clean up value_str for Union[X | Y | Z] syntax
+ if value_str.startswith("Union[") and value_str.endswith("]"):
+ value_str = value_str.removeprefix("Union[").removesuffix("]")
+
+ member_names = [member.strip() for member in value_str.split("|")]
+
+ for name in member_names:
+ if not (member_object := models_module.members.get(name)):
+ raise ValueError(f"Union member '{name}' not found in models module")
+ members.append(TypeInfo(name=member_object.name, module_path=member_object.path))
+
+ return members
+
+ def _is_enum(self, member: Object | Alias) -> TypeGuard[Class]:
+ """Check if a module member is an enum."""
+ if not isinstance(member, Class):
+ return False
+ return any(
+ isinstance(base, ExprName) and base.name in ("StrEnum", "IntEnum")
+ for base in member.bases
+ )
+
+ def _is_error(self, member: Object | Alias) -> TypeGuard[Class]:
+ """Check if a module member is an error."""
+ if not isinstance(member, Class):
+ return False
+ return any(
+ isinstance(base, ExprName) and base.name in ("ServiceError", "ModeledError")
+ for base in member.bases
+ )
+
+ def _is_operation_io_type(self, type_name: str, operations: list[OperationInfo]) -> bool:
+ """Check if a type is used as operation input/output."""
+ return any(type_name in (op.input.name, op.output.name) for op in operations)
+
+ def _is_union_member(self, type_name: str, unions: list[UnionInfo]) -> bool:
+ """Check if a type is used as union member."""
+ return any(type_name == m.name for u in unions for m in u.members)
+
+ def _generate_client_docs(self, client_info: ClientInfo) -> bool:
+ """Generate all documentation files for a client."""
+ logger.info(f"Writing doc stubs to {self.output_dir}...")
+
+ try:
+ self._generate_index(client_info)
+ self._generate_operation_stubs(client_info.operations)
+ self._generate_type_stubs(
+ client_info.models.structures, "structures", "Structure Class"
+ )
+ self._generate_type_stubs(client_info.models.errors, "errors", "Error Class")
+ self._generate_type_stubs(
+ client_info.models.enums, "enums", "Enum Class", ["members: true"]
+ )
+ self._generate_union_stubs(client_info.models.unions)
+ except OSError as e:
+ logger.error(f"Failed to write documentation files: {e}")
+ return False
+ return True
+
+ def _generate_index(self, client_info: ClientInfo) -> None:
+ """Generate the main index.md file."""
+ lines = []
+ lines.append(f"# {self.service_name}")
+ lines.append("")
+ lines.append("## Client")
+ lines.append("")
+ lines.append(f"::: {client_info.module_path}")
+ lines.append(" options:")
+ lines.append(" members: false")
+ lines.append(" heading_level: 3")
+ lines.append(" merge_init_into_class: true")
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+ lines.append("")
+
+ # Operations section
+ if client_info.operations:
+ lines.append("## Operations")
+ lines.append("")
+ for op in sorted(client_info.operations, key=lambda x: x.name):
+ lines.append(f"- [`{op.name}`](operations/{op.name}.md)")
+ lines.append("")
+
+ # Configuration section
+ lines.append("## Configuration")
+ lines.append("")
+ lines.append(f"::: {client_info.config.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append(" merge_init_into_class: true")
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+ lines.append("")
+ lines.append(f"::: {client_info.plugin.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ models = client_info.models
+
+ # Model sections
+ sections: list[tuple[str, str, Sequence[TypeInfo | UnionInfo]]] = [
+ ("Structures", "structures", models.structures),
+ ("Errors", "errors", models.errors),
+ ("Unions", "unions", models.unions),
+ ("Enums", "enums", models.enums),
+ ]
+ for title, folder, items in sections:
+ if items:
+ lines.append("")
+ lines.append(f"## {title}")
+ lines.append("")
+ for item in sorted(items, key=lambda x: x.name):
+ lines.append(f"- [`{item.name}`]({folder}/{item.name}.md)")
+
+ output_path = self.output_dir / "index.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ content = "\n".join(lines)
+ output_path.write_text(content)
+
+ logger.info("Wrote client index file!")
+
+ def _generate_operation_stubs(self, operations: list[OperationInfo]) -> None:
+ """Generate operation documentation files."""
+ for op in operations:
+ lines = []
+ lines.append(f"# {op.name}")
+ lines.append("")
+
+ # Operation section
+ lines.append("## Operation")
+ lines.append("")
+ lines.append(f"::: {op.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Input section
+ lines.append("## Input")
+ lines.append("")
+ lines.append(f"::: {op.input.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Output section - handle all stream types
+ lines.append("## Output")
+ lines.append("")
+
+ if op.stream_type:
+ lines.append(f"This operation returns {op.stream_type.description}.")
+ lines.append("")
+ lines.append("### Event Stream Structure")
+ lines.append("")
+
+ if op.event_input_type:
+ lines.append("#### Input Event Type")
+ lines.append("")
+ lines.append(f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)")
+ lines.append("")
+ if op.event_output_type:
+ lines.append("#### Output Event Type")
+ lines.append("")
+ lines.append(f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)")
+ lines.append("")
+
+ lines.append("### Initial Response Structure")
+ lines.append("")
+ lines.append(f"::: {op.output.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 4")
+ else:
+ lines.append(f"::: {op.output.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ output_path = self.output_dir / "operations" / f"{op.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb("Operations", op.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(operations)} operation files")
+
+ def _generate_type_stubs(
+ self,
+ items: list[TypeInfo],
+ category: str,
+ section_title: str,
+ extra_options: list[str] | None = None,
+ ) -> None:
+ """Generate documentation files for a category of types."""
+ for item in items:
+ lines = [
+ f"# {item.name}",
+ "",
+ f"## {section_title}",
+ f"::: {item.module_path}",
+ " options:",
+ " heading_level: 3",
+ ]
+ if extra_options:
+ lines.extend(f" {opt}" for opt in extra_options)
+
+ output_path = self.output_dir / category / f"{item.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb(category.title(), item.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(items)} {category} files")
+
+ def _generate_union_stubs(self, unions: list[UnionInfo]) -> None:
+ """Generate union documentation files."""
+ for union in unions:
+ lines = []
+ lines.append(f"# {union.name}")
+ lines.append("")
+ lines.append("## Union Type")
+ lines.append(f"::: {union.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Add union members
+ if union.members:
+ lines.append("## Union Member Types")
+ for member in union.members:
+ lines.append("")
+ lines.append(f"::: {member.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ output_path = self.output_dir / "unions" / f"{union.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb("Unions", union.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(unions)} union files")
+
+ def _breadcrumb(self, category: str, name: str) -> str:
+ """Generate a breadcrumb navigation element."""
+ separator = " > "
+ home = f"[{self.service_name}](../index.md)"
+ section = f"[{category}](../index.md#{category.lower()})"
+ return f'{home}{separator}{section}{separator}{name}\n'
+
+
+def main() -> int:
+ """Main entry point for the single-client documentation generator."""
+ parser = argparse.ArgumentParser(
+ description="Generate API documentation stubs for AWS SDK Python client."
+ )
+ parser.add_argument(
+ "-c", "--client-dir", type=Path, required=True, help="Path to the client source package"
+ )
+ parser.add_argument(
+ "-o",
+ "--output-dir",
+ type=Path,
+ required=True,
+ help="Output directory for generated doc stubs",
+ )
+
+ args = parser.parse_args()
+ client_dir = args.client_dir.resolve()
+ output_dir = args.output_dir.resolve()
+
+ if not client_dir.exists():
+ logger.error(f"Client directory not found: {client_dir}")
+ return 1
+
+ try:
+ generator = DocStubGenerator(client_dir, output_dir)
+ success = generator.generate()
+ return 0 if success else 1
+ except Exception as e:
+ logger.error(f"Unexpected error generating doc stubs: {e}", exc_info=True)
+ return 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/Makefile b/clients/aws-sdk-sagemaker-runtime-http2/Makefile
new file mode 100644
index 0000000..44f9df9
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/Makefile
@@ -0,0 +1,25 @@
+DOCS_PORT ?= 8000
+CLIENT_DIR := src/aws_sdk_sagemaker_runtime_http2
+DOCS_OUTPUT_DIR := docs/client
+PYTHON_VERSION := 3.12
+
+.PHONY: docs docs-serve docs-clean docs-install venv
+
+venv:
+ uv venv --python $(PYTHON_VERSION)
+
+docs-install: venv
+ uv sync --group docs
+
+docs-clean:
+ rm -rf site $(DOCS_OUTPUT_DIR)
+
+docs-generate:
+ uv run python scripts/docs/generate_doc_stubs.py -c $(CLIENT_DIR) -o $(DOCS_OUTPUT_DIR)
+
+docs: docs-generate
+ uv run mkdocs build
+
+docs-serve:
+ @[ -d site ] || $(MAKE) docs
+ uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
\ No newline at end of file
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/README.md b/clients/aws-sdk-sagemaker-runtime-http2/README.md
index fa1cda1..8d7c485 100644
--- a/clients/aws-sdk-sagemaker-runtime-http2/README.md
+++ b/clients/aws-sdk-sagemaker-runtime-http2/README.md
@@ -9,6 +9,6 @@ Changes may result in breaking changes prior to the release of version
Documentation is available in the `/docs` directory of this package.
Pages can be built into portable HTML files for the time being. You can
-follow the instructions in the docs [README.md](https://github.com/awslabs/aws-sdk-python/blob/main/clients/aws-sdk-sagemaker-runtime-http/docs/README.md).
+follow the instructions in the docs [README.md](https://github.com/awslabs/aws-sdk-python/blob/main/clients/aws-sdk-sagemaker-runtime-http2/docs/README.md).
For high-level documentation, you can view the [`dev-guide`](https://github.com/awslabs/aws-sdk-python/tree/main/dev-guide) at the top level of this repo.
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/README.md b/clients/aws-sdk-sagemaker-runtime-http2/docs/README.md
new file mode 100644
index 0000000..c25ff76
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/README.md
@@ -0,0 +1,18 @@
+## Generating Client Documentation
+
+Material for MkDocs is used for documentation. You can generate the documentation HTML
+for this client locally with the following:
+
+```bash
+# Install documentation dependencies
+make docs-install
+
+# Serve documentation locally
+make docs-serve
+
+# OR build static HTML documentation
+make docs
+
+# Clean docs artifacts
+make docs-clean
+```
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/hooks/copyright.py b/clients/aws-sdk-sagemaker-runtime-http2/docs/hooks/copyright.py
new file mode 100644
index 0000000..1260def
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/hooks/copyright.py
@@ -0,0 +1,6 @@
+from datetime import datetime
+
+
+def on_config(config, **kwargs):
+ config.copyright = f"Copyright © {datetime.now().year}, Amazon Web Services, Inc"
+ return config
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/index.md b/clients/aws-sdk-sagemaker-runtime-http2/docs/index.md
new file mode 100644
index 0000000..612c7a5
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/index.md
@@ -0,0 +1 @@
+--8<-- "README.md"
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/stylesheets/extra.css b/clients/aws-sdk-sagemaker-runtime-http2/docs/stylesheets/extra.css
new file mode 100644
index 0000000..21d1b09
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/stylesheets/extra.css
@@ -0,0 +1,9 @@
+/* Custom breadcrumb styling */
+.breadcrumb {
+ font-size: 0.85em;
+ color: var(--md-default-fg-color--light);
+}
+
+p:has(span.breadcrumb) {
+margin-top: 0;
+}
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/mkdocs.yml b/clients/aws-sdk-sagemaker-runtime-http2/mkdocs.yml
new file mode 100644
index 0000000..aeb4186
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/mkdocs.yml
@@ -0,0 +1,96 @@
+site_name: AWS SDK for Python - Sagemaker Runtime Http2
+site_description: Documentation for AWS Sagemaker Runtime Http2 Client
+
+repo_name: awslabs/aws-sdk-python
+repo_url: https://github.com/awslabs/aws-sdk-python
+
+exclude_docs: |
+ README.md
+
+hooks:
+ - docs/hooks/copyright.py
+
+theme:
+ name: material
+ favicon: ""
+ palette:
+ # Palette toggle for automatic mode
+ - media: "(prefers-color-scheme)"
+ scheme: default
+ toggle:
+ icon: material/brightness-auto
+ name: Switch to light mode
+ primary: white
+ # Palette toggle for light mode
+ - media: "(prefers-color-scheme: light)"
+ scheme: default
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+ primary: white
+ # Palette toggle for dark mode
+ - media: "(prefers-color-scheme: dark)"
+ scheme: slate
+ toggle:
+ icon: material/brightness-4
+ name: Switch to system preference
+ primary: black
+ features:
+ - navigation.indexes
+ - navigation.instant
+ - navigation.top
+ - search.suggest
+ - search.highlight
+ - content.code.copy
+
+plugins:
+- search
+- mkdocstrings:
+ handlers:
+ python:
+ options:
+ show_source: false
+ show_signature: true
+ show_signature_annotations: true
+ show_root_heading: true
+ show_root_full_path: false
+ show_object_full_path: false
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ show_if_no_docstring: true
+ show_category_heading: true
+ group_by_category: true
+ separate_signature: true
+ signature_crossrefs: true
+ filters:
+ - "!^_"
+ - "!^deserialize"
+ - "!^serialize"
+
+markdown_extensions:
+ - pymdownx.highlight
+ - pymdownx.inlinehilite
+ - pymdownx.snippets:
+ check_paths: true
+ - pymdownx.superfences
+ - admonition
+ - def_list
+ - toc:
+ permalink: true
+ toc_depth: 3
+
+nav:
+ - Overview: index.md
+ - Client: client/index.md
+
+extra:
+ social:
+ - icon: fontawesome/brands/github
+ link: https://github.com/awslabs/aws-sdk-python
+
+extra_css:
+ - stylesheets/extra.css
+
+validation:
+ nav:
+ omitted_files: ignore
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml b/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml
index 3b1e6e4..f0ba4c0 100644
--- a/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml
+++ b/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml
@@ -33,6 +33,11 @@ test = [
"pytest>=7.2.0,<8.0.0",
"pytest-asyncio>=0.20.3,<0.21.0"
]
+docs = [
+ "mkdocs~=1.6.1",
+ "mkdocs-material==9.7.0",
+ "mkdocstrings[python]==1.0.0"
+]
[build-system]
requires = ["hatchling"]
@@ -57,5 +62,4 @@ ignore = ["F841"]
skip-magic-trailing-comma = true
[tool.pytest.ini_options]
-python_classes = ["!Test"]
-asyncio_mode = "auto"
+# python_classes = ["!Test"]
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/scripts/docs/generate_doc_stubs.py b/clients/aws-sdk-sagemaker-runtime-http2/scripts/docs/generate_doc_stubs.py
new file mode 100644
index 0000000..056708a
--- /dev/null
+++ b/clients/aws-sdk-sagemaker-runtime-http2/scripts/docs/generate_doc_stubs.py
@@ -0,0 +1,607 @@
+"""
+Generate markdown API Reference stubs for AWS SDK for Python clients.
+
+This script generates MkDocs markdown stub files for a single client package.
+It uses griffe to analyze the Python source and outputs mkdocstrings directives
+for the client, operations, models (structures, unions, enums), and errors.
+"""
+
+import argparse
+import logging
+import sys
+from collections.abc import Sequence
+from dataclasses import dataclass
+from enum import Enum
+from pathlib import Path
+from typing import TypeGuard
+
+import griffe
+from griffe import (
+ Alias,
+ Attribute,
+ Class,
+ Expr,
+ ExprBinOp,
+ ExprName,
+ ExprSubscript,
+ ExprTuple,
+ Function,
+ Module,
+ Object,
+)
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger("generate_doc_stubs")
+
+
+class StreamType(Enum):
+ """Type of event stream for operations."""
+
+ INPUT = "InputEventStream"
+ OUTPUT = "OutputEventStream"
+ DUPLEX = "DuplexEventStream"
+
+ @property
+ def description(self) -> str:
+ """Return a string description for documentation."""
+ descriptions = {
+ StreamType.INPUT: "an `InputEventStream` for client-to-server streaming",
+ StreamType.OUTPUT: "an `OutputEventStream` for server-to-client streaming",
+ StreamType.DUPLEX: "a `DuplexEventStream` for bidirectional streaming",
+ }
+ return descriptions[self]
+
+
+@dataclass(frozen=True)
+class TypeInfo:
+ """Information about a type (structure, enum, error, config, plugin)."""
+
+ name: str # e.g., "ConverseOperationOutput"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.models.ConverseOperationOutput"
+
+
+@dataclass
+class UnionInfo:
+ """Information about a union type."""
+
+ name: str
+ module_path: str
+ members: list[TypeInfo]
+
+
+@dataclass
+class OperationInfo:
+ """Information about a client operation."""
+
+ name: str
+ module_path: str
+ input: TypeInfo
+ output: TypeInfo
+ stream_type: StreamType | None
+ event_input_type: str | None # For input/duplex streams
+ event_output_type: str | None # For output/duplex streams
+
+
+@dataclass
+class ModelsInfo:
+ """Information about all modeled types."""
+
+ structures: list[TypeInfo]
+ unions: list[UnionInfo]
+ enums: list[TypeInfo]
+ errors: list[TypeInfo]
+
+
+@dataclass
+class ClientInfo:
+ """Complete information about a client package."""
+
+ name: str # e.g., "BedrockRuntimeClient"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.client.BedrockRuntimeClient"
+ package_name: str # e.g., "aws_sdk_bedrock_runtime"
+ config: TypeInfo
+ plugin: TypeInfo
+ operations: list[OperationInfo]
+ models: ModelsInfo
+
+
+class DocStubGenerator:
+ """Generate markdown API Reference stubs for AWS SDK for Python clients."""
+
+ def __init__(self, client_dir: Path, output_dir: Path) -> None:
+ """
+ Initialize the documentation generator.
+
+ Args:
+ client_dir: Path to the client source directory
+ output_dir: Path to the output directory for generated doc stubs
+ """
+ self.client_dir = client_dir
+ self.output_dir = output_dir
+ # Extract service name from package name
+ # (e.g., "aws_sdk_bedrock_runtime" -> "Bedrock Runtime")
+ self.service_name = client_dir.name.replace("aws_sdk_", "").replace("_", " ").title()
+
+ def generate(self) -> bool:
+ """
+ Generate the documentation stubs to the output directory.
+
+ Returns:
+ True if documentation was generated successfully, False otherwise.
+ """
+ logger.info(f"Generating doc stubs for {self.service_name}...")
+
+ package_name = self.client_dir.name
+ client_info = self._analyze_client_package(package_name)
+ if not self._generate_client_docs(client_info):
+ return False
+
+ logger.info(f"Finished generating doc stubs for {self.service_name}")
+ return True
+
+ def _analyze_client_package(self, package_name: str) -> ClientInfo:
+ """Analyze a client package using griffe."""
+ logger.info(f"Analyzing package: {package_name}")
+ package = griffe.load(package_name)
+
+ # Ensure required modules exist
+ required = ["client", "config", "models"]
+ missing = [name for name in required if not package.modules.get(name)]
+ if missing:
+ raise ValueError(f"Missing required modules in {package_name}: {', '.join(missing)}")
+
+ # Parse submodules
+ client_module = package.modules["client"]
+ config_module = package.modules["config"]
+ models_module = package.modules["models"]
+
+ client_class = self._find_class_with_suffix(client_module, "Client")
+ if not client_class:
+ raise ValueError(f"No class ending with 'Client' found in {package_name}.client")
+
+ config_class = config_module.members.get("Config")
+ plugin_alias = config_module.members.get("Plugin")
+ if not config_class or not plugin_alias:
+ raise ValueError(f"Missing Config or Plugin in {package_name}.config")
+
+ config = TypeInfo(name=config_class.name, module_path=config_class.path)
+ plugin = TypeInfo(name=plugin_alias.name, module_path=plugin_alias.path)
+
+ operations = self._extract_operations(client_class)
+ models = self._extract_models(models_module, operations)
+
+ logger.info(
+ f"Analyzed {client_class.name}: {len(operations)} operations, "
+ f"{len(models.structures)} structures, {len(models.errors)} errors, "
+ f"{len(models.unions)} unions, {len(models.enums)} enums"
+ )
+
+ return ClientInfo(
+ name=client_class.name,
+ module_path=client_class.path,
+ package_name=package_name,
+ config=config,
+ plugin=plugin,
+ operations=operations,
+ models=models,
+ )
+
+ def _find_class_with_suffix(self, module: Module, suffix: str) -> Class | None:
+ """Find the class in the module with a matching suffix."""
+ for cls in module.classes.values():
+ if cls.name.endswith(suffix):
+ return cls
+ return None
+
+ def _extract_operations(self, client_class: Class) -> list[OperationInfo]:
+ """Extract operation information from client class."""
+ operations = []
+ for op in client_class.functions.values():
+ if op.is_private or op.is_init_method:
+ continue
+ operations.append(self._analyze_operation(op))
+ return operations
+
+ def _analyze_operation(self, operation: Function) -> OperationInfo:
+ """Analyze an operation method to extract information."""
+ stream_type = None
+ event_input_type = None
+ event_output_type = None
+
+ input_param = operation.parameters["input"]
+ input_annotation = self._get_expr(
+ input_param.annotation, f"'{operation.name}' input annotation"
+ )
+ input_info = TypeInfo(
+ name=input_annotation.canonical_name,
+ module_path=input_annotation.canonical_path,
+ )
+
+ returns = self._get_expr(operation.returns, f"'{operation.name}' return type")
+ output_type = returns.canonical_name
+ stream_type_map = {s.value: s for s in StreamType}
+
+ if output_type in stream_type_map:
+ stream_type = stream_type_map[output_type]
+ stream_args = self._get_subscript_elements(returns, f"'{operation.name}' stream type")
+
+ if stream_type in (StreamType.INPUT, StreamType.DUPLEX):
+ event_input_type = stream_args[0].canonical_name
+ if stream_type in (StreamType.OUTPUT, StreamType.DUPLEX):
+ idx = 1 if stream_type == StreamType.DUPLEX else 0
+ event_output_type = stream_args[idx].canonical_name
+
+ output_info = TypeInfo(
+ name=stream_args[-1].canonical_name, module_path=stream_args[-1].canonical_path
+ )
+ else:
+ output_info = TypeInfo(name=output_type, module_path=returns.canonical_path)
+
+ return OperationInfo(
+ name=operation.name,
+ module_path=operation.path,
+ input=input_info,
+ output=output_info,
+ stream_type=stream_type,
+ event_input_type=event_input_type,
+ event_output_type=event_output_type,
+ )
+
+ def _get_expr(self, annotation: str | Expr | None, context: str) -> Expr:
+ """Extract and validate an Expr from an annotation."""
+ if not isinstance(annotation, Expr):
+ raise TypeError(f"{context}: expected Expr, got {type(annotation).__name__}")
+ return annotation
+
+ def _get_subscript_elements(self, expr: Expr, context: str) -> list[Expr]:
+ """Extract type arguments from a subscript expression like Generic[A, B, C]."""
+ if not isinstance(expr, ExprSubscript):
+ raise TypeError(f"{context}: expected subscript, got {type(expr).__name__}")
+ slice_expr = expr.slice
+ if isinstance(slice_expr, str):
+ raise TypeError(f"{context}: unexpected string slice '{slice_expr}'")
+ if isinstance(slice_expr, ExprTuple):
+ return [el for el in slice_expr.elements if isinstance(el, Expr)]
+ return [slice_expr]
+
+ def _extract_models(self, models_module: Module, operations: list[OperationInfo]) -> ModelsInfo:
+ """Extract structures, unions, enums, and errors from models module."""
+ structures, unions, enums, errors = [], [], [], []
+
+ for member in models_module.members.values():
+ # Skip imported and private members
+ if member.is_imported or member.is_private:
+ continue
+
+ if self._is_union(member):
+ unions.append(
+ UnionInfo(
+ name=member.name,
+ module_path=member.path,
+ members=self._extract_union_members(member, models_module),
+ )
+ )
+ elif self._is_enum(member):
+ enums.append(TypeInfo(name=member.name, module_path=member.path))
+ elif self._is_error(member):
+ errors.append(TypeInfo(name=member.name, module_path=member.path))
+ elif member.is_class:
+ structures.append(TypeInfo(name=member.name, module_path=member.path))
+
+ duplicates = set()
+ for structure in structures:
+ if self._is_operation_io_type(structure.name, operations) or self._is_union_member(
+ structure.name, unions
+ ):
+ duplicates.add(structure)
+
+ structures = [struct for struct in structures if struct not in duplicates]
+
+ return ModelsInfo(structures=structures, unions=unions, enums=enums, errors=errors)
+
+ def _is_union(self, member: Object | Alias) -> TypeGuard[Attribute]:
+ """Check if a module member is a union type."""
+ if not isinstance(member, Attribute):
+ return False
+
+ value = member.value
+ # Check for Union[...] syntax
+ if isinstance(value, ExprSubscript):
+ left = value.left
+ if isinstance(left, ExprName) and left.name == "Union":
+ return True
+
+ # Check for PEP 604 (X | Y) syntax
+ if isinstance(value, ExprBinOp):
+ return True
+
+ return False
+
+ def _extract_union_members(
+ self, union_attr: Attribute, models_module: Module
+ ) -> list[TypeInfo]:
+ """Extract member types from a union."""
+ members = []
+ value_str = str(union_attr.value)
+
+ # Clean up value_str for Union[X | Y | Z] syntax
+ if value_str.startswith("Union[") and value_str.endswith("]"):
+ value_str = value_str.removeprefix("Union[").removesuffix("]")
+
+ member_names = [member.strip() for member in value_str.split("|")]
+
+ for name in member_names:
+ if not (member_object := models_module.members.get(name)):
+ raise ValueError(f"Union member '{name}' not found in models module")
+ members.append(TypeInfo(name=member_object.name, module_path=member_object.path))
+
+ return members
+
+ def _is_enum(self, member: Object | Alias) -> TypeGuard[Class]:
+ """Check if a module member is an enum."""
+ if not isinstance(member, Class):
+ return False
+ return any(
+ isinstance(base, ExprName) and base.name in ("StrEnum", "IntEnum")
+ for base in member.bases
+ )
+
+ def _is_error(self, member: Object | Alias) -> TypeGuard[Class]:
+ """Check if a module member is an error."""
+ if not isinstance(member, Class):
+ return False
+ return any(
+ isinstance(base, ExprName) and base.name in ("ServiceError", "ModeledError")
+ for base in member.bases
+ )
+
+ def _is_operation_io_type(self, type_name: str, operations: list[OperationInfo]) -> bool:
+ """Check if a type is used as operation input/output."""
+ return any(type_name in (op.input.name, op.output.name) for op in operations)
+
+ def _is_union_member(self, type_name: str, unions: list[UnionInfo]) -> bool:
+ """Check if a type is used as union member."""
+ return any(type_name == m.name for u in unions for m in u.members)
+
+ def _generate_client_docs(self, client_info: ClientInfo) -> bool:
+ """Generate all documentation files for a client."""
+ logger.info(f"Writing doc stubs to {self.output_dir}...")
+
+ try:
+ self._generate_index(client_info)
+ self._generate_operation_stubs(client_info.operations)
+ self._generate_type_stubs(
+ client_info.models.structures, "structures", "Structure Class"
+ )
+ self._generate_type_stubs(client_info.models.errors, "errors", "Error Class")
+ self._generate_type_stubs(
+ client_info.models.enums, "enums", "Enum Class", ["members: true"]
+ )
+ self._generate_union_stubs(client_info.models.unions)
+ except OSError as e:
+ logger.error(f"Failed to write documentation files: {e}")
+ return False
+ return True
+
+ def _generate_index(self, client_info: ClientInfo) -> None:
+ """Generate the main index.md file."""
+ lines = []
+ lines.append(f"# {self.service_name}")
+ lines.append("")
+ lines.append("## Client")
+ lines.append("")
+ lines.append(f"::: {client_info.module_path}")
+ lines.append(" options:")
+ lines.append(" members: false")
+ lines.append(" heading_level: 3")
+ lines.append(" merge_init_into_class: true")
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+ lines.append("")
+
+ # Operations section
+ if client_info.operations:
+ lines.append("## Operations")
+ lines.append("")
+ for op in sorted(client_info.operations, key=lambda x: x.name):
+ lines.append(f"- [`{op.name}`](operations/{op.name}.md)")
+ lines.append("")
+
+ # Configuration section
+ lines.append("## Configuration")
+ lines.append("")
+ lines.append(f"::: {client_info.config.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append(" merge_init_into_class: true")
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+ lines.append("")
+ lines.append(f"::: {client_info.plugin.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ models = client_info.models
+
+ # Model sections
+ sections: list[tuple[str, str, Sequence[TypeInfo | UnionInfo]]] = [
+ ("Structures", "structures", models.structures),
+ ("Errors", "errors", models.errors),
+ ("Unions", "unions", models.unions),
+ ("Enums", "enums", models.enums),
+ ]
+ for title, folder, items in sections:
+ if items:
+ lines.append("")
+ lines.append(f"## {title}")
+ lines.append("")
+ for item in sorted(items, key=lambda x: x.name):
+ lines.append(f"- [`{item.name}`]({folder}/{item.name}.md)")
+
+ output_path = self.output_dir / "index.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ content = "\n".join(lines)
+ output_path.write_text(content)
+
+ logger.info("Wrote client index file!")
+
+ def _generate_operation_stubs(self, operations: list[OperationInfo]) -> None:
+ """Generate operation documentation files."""
+ for op in operations:
+ lines = []
+ lines.append(f"# {op.name}")
+ lines.append("")
+
+ # Operation section
+ lines.append("## Operation")
+ lines.append("")
+ lines.append(f"::: {op.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Input section
+ lines.append("## Input")
+ lines.append("")
+ lines.append(f"::: {op.input.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Output section - handle all stream types
+ lines.append("## Output")
+ lines.append("")
+
+ if op.stream_type:
+ lines.append(f"This operation returns {op.stream_type.description}.")
+ lines.append("")
+ lines.append("### Event Stream Structure")
+ lines.append("")
+
+ if op.event_input_type:
+ lines.append("#### Input Event Type")
+ lines.append("")
+ lines.append(f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)")
+ lines.append("")
+ if op.event_output_type:
+ lines.append("#### Output Event Type")
+ lines.append("")
+ lines.append(f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)")
+ lines.append("")
+
+ lines.append("### Initial Response Structure")
+ lines.append("")
+ lines.append(f"::: {op.output.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 4")
+ else:
+ lines.append(f"::: {op.output.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ output_path = self.output_dir / "operations" / f"{op.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb("Operations", op.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(operations)} operation files")
+
+ def _generate_type_stubs(
+ self,
+ items: list[TypeInfo],
+ category: str,
+ section_title: str,
+ extra_options: list[str] | None = None,
+ ) -> None:
+ """Generate documentation files for a category of types."""
+ for item in items:
+ lines = [
+ f"# {item.name}",
+ "",
+ f"## {section_title}",
+ f"::: {item.module_path}",
+ " options:",
+ " heading_level: 3",
+ ]
+ if extra_options:
+ lines.extend(f" {opt}" for opt in extra_options)
+
+ output_path = self.output_dir / category / f"{item.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb(category.title(), item.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(items)} {category} files")
+
+ def _generate_union_stubs(self, unions: list[UnionInfo]) -> None:
+ """Generate union documentation files."""
+ for union in unions:
+ lines = []
+ lines.append(f"# {union.name}")
+ lines.append("")
+ lines.append("## Union Type")
+ lines.append(f"::: {union.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Add union members
+ if union.members:
+ lines.append("## Union Member Types")
+ for member in union.members:
+ lines.append("")
+ lines.append(f"::: {member.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ output_path = self.output_dir / "unions" / f"{union.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb("Unions", union.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(unions)} union files")
+
+ def _breadcrumb(self, category: str, name: str) -> str:
+ """Generate a breadcrumb navigation element."""
+ separator = " > "
+ home = f"[{self.service_name}](../index.md)"
+ section = f"[{category}](../index.md#{category.lower()})"
+ return f'{home}{separator}{section}{separator}{name}\n'
+
+
+def main() -> int:
+ """Main entry point for the single-client documentation generator."""
+ parser = argparse.ArgumentParser(
+ description="Generate API documentation stubs for AWS SDK Python client."
+ )
+ parser.add_argument(
+ "-c", "--client-dir", type=Path, required=True, help="Path to the client source package"
+ )
+ parser.add_argument(
+ "-o",
+ "--output-dir",
+ type=Path,
+ required=True,
+ help="Output directory for generated doc stubs",
+ )
+
+ args = parser.parse_args()
+ client_dir = args.client_dir.resolve()
+ output_dir = args.output_dir.resolve()
+
+ if not client_dir.exists():
+ logger.error(f"Client directory not found: {client_dir}")
+ return 1
+
+ try:
+ generator = DocStubGenerator(client_dir, output_dir)
+ success = generator.generate()
+ return 0 if success else 1
+ except Exception as e:
+ logger.error(f"Unexpected error generating doc stubs: {e}", exc_info=True)
+ return 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/clients/aws-sdk-transcribe-streaming/Makefile b/clients/aws-sdk-transcribe-streaming/Makefile
new file mode 100644
index 0000000..016d87e
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/Makefile
@@ -0,0 +1,25 @@
+DOCS_PORT ?= 8000
+CLIENT_DIR := src/aws_sdk_transcribe_streaming
+DOCS_OUTPUT_DIR := docs/client
+PYTHON_VERSION := 3.12
+
+.PHONY: docs docs-serve docs-clean docs-install venv
+
+venv:
+ uv venv --python $(PYTHON_VERSION)
+
+docs-install: venv
+ uv sync --group docs
+
+docs-clean:
+ rm -rf site $(DOCS_OUTPUT_DIR)
+
+docs-generate:
+ uv run python scripts/docs/generate_doc_stubs.py -c $(CLIENT_DIR) -o $(DOCS_OUTPUT_DIR)
+
+docs: docs-generate
+ uv run mkdocs build
+
+docs-serve:
+ @[ -d site ] || $(MAKE) docs
+ uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
\ No newline at end of file
diff --git a/clients/aws-sdk-transcribe-streaming/docs/README.md b/clients/aws-sdk-transcribe-streaming/docs/README.md
new file mode 100644
index 0000000..c25ff76
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/docs/README.md
@@ -0,0 +1,18 @@
+## Generating Client Documentation
+
+Material for MkDocs is used for documentation. You can generate the documentation HTML
+for this client locally with the following:
+
+```bash
+# Install documentation dependencies
+make docs-install
+
+# Serve documentation locally
+make docs-serve
+
+# OR build static HTML documentation
+make docs
+
+# Clean docs artifacts
+make docs-clean
+```
diff --git a/clients/aws-sdk-transcribe-streaming/docs/hooks/copyright.py b/clients/aws-sdk-transcribe-streaming/docs/hooks/copyright.py
new file mode 100644
index 0000000..1260def
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/docs/hooks/copyright.py
@@ -0,0 +1,6 @@
+from datetime import datetime
+
+
+def on_config(config, **kwargs):
+ config.copyright = f"Copyright © {datetime.now().year}, Amazon Web Services, Inc"
+ return config
diff --git a/clients/aws-sdk-transcribe-streaming/docs/index.md b/clients/aws-sdk-transcribe-streaming/docs/index.md
new file mode 100644
index 0000000..612c7a5
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/docs/index.md
@@ -0,0 +1 @@
+--8<-- "README.md"
diff --git a/clients/aws-sdk-transcribe-streaming/docs/stylesheets/extra.css b/clients/aws-sdk-transcribe-streaming/docs/stylesheets/extra.css
new file mode 100644
index 0000000..21d1b09
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/docs/stylesheets/extra.css
@@ -0,0 +1,9 @@
+/* Custom breadcrumb styling */
+.breadcrumb {
+ font-size: 0.85em;
+ color: var(--md-default-fg-color--light);
+}
+
+p:has(span.breadcrumb) {
+margin-top: 0;
+}
diff --git a/clients/aws-sdk-transcribe-streaming/mkdocs.yml b/clients/aws-sdk-transcribe-streaming/mkdocs.yml
new file mode 100644
index 0000000..3787400
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/mkdocs.yml
@@ -0,0 +1,96 @@
+site_name: AWS SDK for Python - Transcribe Streaming
+site_description: Documentation for AWS Transcribe Streaming Client
+
+repo_name: awslabs/aws-sdk-python
+repo_url: https://github.com/awslabs/aws-sdk-python
+
+exclude_docs: |
+ README.md
+
+hooks:
+ - docs/hooks/copyright.py
+
+theme:
+ name: material
+ favicon: ""
+ palette:
+ # Palette toggle for automatic mode
+ - media: "(prefers-color-scheme)"
+ scheme: default
+ toggle:
+ icon: material/brightness-auto
+ name: Switch to light mode
+ primary: white
+ # Palette toggle for light mode
+ - media: "(prefers-color-scheme: light)"
+ scheme: default
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+ primary: white
+ # Palette toggle for dark mode
+ - media: "(prefers-color-scheme: dark)"
+ scheme: slate
+ toggle:
+ icon: material/brightness-4
+ name: Switch to system preference
+ primary: black
+ features:
+ - navigation.indexes
+ - navigation.instant
+ - navigation.top
+ - search.suggest
+ - search.highlight
+ - content.code.copy
+
+plugins:
+- search
+- mkdocstrings:
+ handlers:
+ python:
+ options:
+ show_source: false
+ show_signature: true
+ show_signature_annotations: true
+ show_root_heading: true
+ show_root_full_path: false
+ show_object_full_path: false
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ show_if_no_docstring: true
+ show_category_heading: true
+ group_by_category: true
+ separate_signature: true
+ signature_crossrefs: true
+ filters:
+ - "!^_"
+ - "!^deserialize"
+ - "!^serialize"
+
+markdown_extensions:
+ - pymdownx.highlight
+ - pymdownx.inlinehilite
+ - pymdownx.snippets:
+ check_paths: true
+ - pymdownx.superfences
+ - admonition
+ - def_list
+ - toc:
+ permalink: true
+ toc_depth: 3
+
+nav:
+ - Overview: index.md
+ - Client: client/index.md
+
+extra:
+ social:
+ - icon: fontawesome/brands/github
+ link: https://github.com/awslabs/aws-sdk-python
+
+extra_css:
+ - stylesheets/extra.css
+
+validation:
+ nav:
+ omitted_files: ignore
diff --git a/clients/aws-sdk-transcribe-streaming/pyproject.toml b/clients/aws-sdk-transcribe-streaming/pyproject.toml
index ed871d6..71b62f6 100644
--- a/clients/aws-sdk-transcribe-streaming/pyproject.toml
+++ b/clients/aws-sdk-transcribe-streaming/pyproject.toml
@@ -34,6 +34,12 @@ test = [
"pytest-asyncio>=0.20.3,<0.21.0"
]
+docs = [
+ "mkdocs~=1.6.1",
+ "mkdocs-material==9.7.0",
+ "mkdocstrings[python]==1.0.0"
+]
+
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
diff --git a/clients/aws-sdk-transcribe-streaming/scripts/docs/generate_doc_stubs.py b/clients/aws-sdk-transcribe-streaming/scripts/docs/generate_doc_stubs.py
new file mode 100644
index 0000000..056708a
--- /dev/null
+++ b/clients/aws-sdk-transcribe-streaming/scripts/docs/generate_doc_stubs.py
@@ -0,0 +1,607 @@
+"""
+Generate markdown API Reference stubs for AWS SDK for Python clients.
+
+This script generates MkDocs markdown stub files for a single client package.
+It uses griffe to analyze the Python source and outputs mkdocstrings directives
+for the client, operations, models (structures, unions, enums), and errors.
+"""
+
+import argparse
+import logging
+import sys
+from collections.abc import Sequence
+from dataclasses import dataclass
+from enum import Enum
+from pathlib import Path
+from typing import TypeGuard
+
+import griffe
+from griffe import (
+ Alias,
+ Attribute,
+ Class,
+ Expr,
+ ExprBinOp,
+ ExprName,
+ ExprSubscript,
+ ExprTuple,
+ Function,
+ Module,
+ Object,
+)
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger("generate_doc_stubs")
+
+
+class StreamType(Enum):
+ """Type of event stream for operations."""
+
+ INPUT = "InputEventStream"
+ OUTPUT = "OutputEventStream"
+ DUPLEX = "DuplexEventStream"
+
+ @property
+ def description(self) -> str:
+ """Return a string description for documentation."""
+ descriptions = {
+ StreamType.INPUT: "an `InputEventStream` for client-to-server streaming",
+ StreamType.OUTPUT: "an `OutputEventStream` for server-to-client streaming",
+ StreamType.DUPLEX: "a `DuplexEventStream` for bidirectional streaming",
+ }
+ return descriptions[self]
+
+
+@dataclass(frozen=True)
+class TypeInfo:
+ """Information about a type (structure, enum, error, config, plugin)."""
+
+ name: str # e.g., "ConverseOperationOutput"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.models.ConverseOperationOutput"
+
+
+@dataclass
+class UnionInfo:
+ """Information about a union type."""
+
+ name: str
+ module_path: str
+ members: list[TypeInfo]
+
+
+@dataclass
+class OperationInfo:
+ """Information about a client operation."""
+
+ name: str
+ module_path: str
+ input: TypeInfo
+ output: TypeInfo
+ stream_type: StreamType | None
+ event_input_type: str | None # For input/duplex streams
+ event_output_type: str | None # For output/duplex streams
+
+
+@dataclass
+class ModelsInfo:
+ """Information about all modeled types."""
+
+ structures: list[TypeInfo]
+ unions: list[UnionInfo]
+ enums: list[TypeInfo]
+ errors: list[TypeInfo]
+
+
+@dataclass
+class ClientInfo:
+ """Complete information about a client package."""
+
+ name: str # e.g., "BedrockRuntimeClient"
+ module_path: str # e.g., "aws_sdk_bedrock_runtime.client.BedrockRuntimeClient"
+ package_name: str # e.g., "aws_sdk_bedrock_runtime"
+ config: TypeInfo
+ plugin: TypeInfo
+ operations: list[OperationInfo]
+ models: ModelsInfo
+
+
+class DocStubGenerator:
+ """Generate markdown API Reference stubs for AWS SDK for Python clients."""
+
+ def __init__(self, client_dir: Path, output_dir: Path) -> None:
+ """
+ Initialize the documentation generator.
+
+ Args:
+ client_dir: Path to the client source directory
+ output_dir: Path to the output directory for generated doc stubs
+ """
+ self.client_dir = client_dir
+ self.output_dir = output_dir
+ # Extract service name from package name
+ # (e.g., "aws_sdk_bedrock_runtime" -> "Bedrock Runtime")
+ self.service_name = client_dir.name.replace("aws_sdk_", "").replace("_", " ").title()
+
+ def generate(self) -> bool:
+ """
+ Generate the documentation stubs to the output directory.
+
+ Returns:
+ True if documentation was generated successfully, False otherwise.
+ """
+ logger.info(f"Generating doc stubs for {self.service_name}...")
+
+ package_name = self.client_dir.name
+ client_info = self._analyze_client_package(package_name)
+ if not self._generate_client_docs(client_info):
+ return False
+
+ logger.info(f"Finished generating doc stubs for {self.service_name}")
+ return True
+
+ def _analyze_client_package(self, package_name: str) -> ClientInfo:
+ """Analyze a client package using griffe."""
+ logger.info(f"Analyzing package: {package_name}")
+ package = griffe.load(package_name)
+
+ # Ensure required modules exist
+ required = ["client", "config", "models"]
+ missing = [name for name in required if not package.modules.get(name)]
+ if missing:
+ raise ValueError(f"Missing required modules in {package_name}: {', '.join(missing)}")
+
+ # Parse submodules
+ client_module = package.modules["client"]
+ config_module = package.modules["config"]
+ models_module = package.modules["models"]
+
+ client_class = self._find_class_with_suffix(client_module, "Client")
+ if not client_class:
+ raise ValueError(f"No class ending with 'Client' found in {package_name}.client")
+
+ config_class = config_module.members.get("Config")
+ plugin_alias = config_module.members.get("Plugin")
+ if not config_class or not plugin_alias:
+ raise ValueError(f"Missing Config or Plugin in {package_name}.config")
+
+ config = TypeInfo(name=config_class.name, module_path=config_class.path)
+ plugin = TypeInfo(name=plugin_alias.name, module_path=plugin_alias.path)
+
+ operations = self._extract_operations(client_class)
+ models = self._extract_models(models_module, operations)
+
+ logger.info(
+ f"Analyzed {client_class.name}: {len(operations)} operations, "
+ f"{len(models.structures)} structures, {len(models.errors)} errors, "
+ f"{len(models.unions)} unions, {len(models.enums)} enums"
+ )
+
+ return ClientInfo(
+ name=client_class.name,
+ module_path=client_class.path,
+ package_name=package_name,
+ config=config,
+ plugin=plugin,
+ operations=operations,
+ models=models,
+ )
+
+ def _find_class_with_suffix(self, module: Module, suffix: str) -> Class | None:
+ """Find the class in the module with a matching suffix."""
+ for cls in module.classes.values():
+ if cls.name.endswith(suffix):
+ return cls
+ return None
+
+ def _extract_operations(self, client_class: Class) -> list[OperationInfo]:
+ """Extract operation information from client class."""
+ operations = []
+ for op in client_class.functions.values():
+ if op.is_private or op.is_init_method:
+ continue
+ operations.append(self._analyze_operation(op))
+ return operations
+
+ def _analyze_operation(self, operation: Function) -> OperationInfo:
+ """Analyze an operation method to extract information."""
+ stream_type = None
+ event_input_type = None
+ event_output_type = None
+
+ input_param = operation.parameters["input"]
+ input_annotation = self._get_expr(
+ input_param.annotation, f"'{operation.name}' input annotation"
+ )
+ input_info = TypeInfo(
+ name=input_annotation.canonical_name,
+ module_path=input_annotation.canonical_path,
+ )
+
+ returns = self._get_expr(operation.returns, f"'{operation.name}' return type")
+ output_type = returns.canonical_name
+ stream_type_map = {s.value: s for s in StreamType}
+
+ if output_type in stream_type_map:
+ stream_type = stream_type_map[output_type]
+ stream_args = self._get_subscript_elements(returns, f"'{operation.name}' stream type")
+
+ if stream_type in (StreamType.INPUT, StreamType.DUPLEX):
+ event_input_type = stream_args[0].canonical_name
+ if stream_type in (StreamType.OUTPUT, StreamType.DUPLEX):
+ idx = 1 if stream_type == StreamType.DUPLEX else 0
+ event_output_type = stream_args[idx].canonical_name
+
+ output_info = TypeInfo(
+ name=stream_args[-1].canonical_name, module_path=stream_args[-1].canonical_path
+ )
+ else:
+ output_info = TypeInfo(name=output_type, module_path=returns.canonical_path)
+
+ return OperationInfo(
+ name=operation.name,
+ module_path=operation.path,
+ input=input_info,
+ output=output_info,
+ stream_type=stream_type,
+ event_input_type=event_input_type,
+ event_output_type=event_output_type,
+ )
+
+ def _get_expr(self, annotation: str | Expr | None, context: str) -> Expr:
+ """Extract and validate an Expr from an annotation."""
+ if not isinstance(annotation, Expr):
+ raise TypeError(f"{context}: expected Expr, got {type(annotation).__name__}")
+ return annotation
+
+ def _get_subscript_elements(self, expr: Expr, context: str) -> list[Expr]:
+ """Extract type arguments from a subscript expression like Generic[A, B, C]."""
+ if not isinstance(expr, ExprSubscript):
+ raise TypeError(f"{context}: expected subscript, got {type(expr).__name__}")
+ slice_expr = expr.slice
+ if isinstance(slice_expr, str):
+ raise TypeError(f"{context}: unexpected string slice '{slice_expr}'")
+ if isinstance(slice_expr, ExprTuple):
+ return [el for el in slice_expr.elements if isinstance(el, Expr)]
+ return [slice_expr]
+
+ def _extract_models(self, models_module: Module, operations: list[OperationInfo]) -> ModelsInfo:
+ """Extract structures, unions, enums, and errors from models module."""
+ structures, unions, enums, errors = [], [], [], []
+
+ for member in models_module.members.values():
+ # Skip imported and private members
+ if member.is_imported or member.is_private:
+ continue
+
+ if self._is_union(member):
+ unions.append(
+ UnionInfo(
+ name=member.name,
+ module_path=member.path,
+ members=self._extract_union_members(member, models_module),
+ )
+ )
+ elif self._is_enum(member):
+ enums.append(TypeInfo(name=member.name, module_path=member.path))
+ elif self._is_error(member):
+ errors.append(TypeInfo(name=member.name, module_path=member.path))
+ elif member.is_class:
+ structures.append(TypeInfo(name=member.name, module_path=member.path))
+
+ duplicates = set()
+ for structure in structures:
+ if self._is_operation_io_type(structure.name, operations) or self._is_union_member(
+ structure.name, unions
+ ):
+ duplicates.add(structure)
+
+ structures = [struct for struct in structures if struct not in duplicates]
+
+ return ModelsInfo(structures=structures, unions=unions, enums=enums, errors=errors)
+
+ def _is_union(self, member: Object | Alias) -> TypeGuard[Attribute]:
+ """Check if a module member is a union type."""
+ if not isinstance(member, Attribute):
+ return False
+
+ value = member.value
+ # Check for Union[...] syntax
+ if isinstance(value, ExprSubscript):
+ left = value.left
+ if isinstance(left, ExprName) and left.name == "Union":
+ return True
+
+ # Check for PEP 604 (X | Y) syntax
+ if isinstance(value, ExprBinOp):
+ return True
+
+ return False
+
+ def _extract_union_members(
+ self, union_attr: Attribute, models_module: Module
+ ) -> list[TypeInfo]:
+ """Extract member types from a union."""
+ members = []
+ value_str = str(union_attr.value)
+
+ # Clean up value_str for Union[X | Y | Z] syntax
+ if value_str.startswith("Union[") and value_str.endswith("]"):
+ value_str = value_str.removeprefix("Union[").removesuffix("]")
+
+ member_names = [member.strip() for member in value_str.split("|")]
+
+ for name in member_names:
+ if not (member_object := models_module.members.get(name)):
+ raise ValueError(f"Union member '{name}' not found in models module")
+ members.append(TypeInfo(name=member_object.name, module_path=member_object.path))
+
+ return members
+
+ def _is_enum(self, member: Object | Alias) -> TypeGuard[Class]:
+ """Check if a module member is an enum."""
+ if not isinstance(member, Class):
+ return False
+ return any(
+ isinstance(base, ExprName) and base.name in ("StrEnum", "IntEnum")
+ for base in member.bases
+ )
+
+ def _is_error(self, member: Object | Alias) -> TypeGuard[Class]:
+ """Check if a module member is an error."""
+ if not isinstance(member, Class):
+ return False
+ return any(
+ isinstance(base, ExprName) and base.name in ("ServiceError", "ModeledError")
+ for base in member.bases
+ )
+
+ def _is_operation_io_type(self, type_name: str, operations: list[OperationInfo]) -> bool:
+ """Check if a type is used as operation input/output."""
+ return any(type_name in (op.input.name, op.output.name) for op in operations)
+
+ def _is_union_member(self, type_name: str, unions: list[UnionInfo]) -> bool:
+ """Check if a type is used as union member."""
+ return any(type_name == m.name for u in unions for m in u.members)
+
+ def _generate_client_docs(self, client_info: ClientInfo) -> bool:
+ """Generate all documentation files for a client."""
+ logger.info(f"Writing doc stubs to {self.output_dir}...")
+
+ try:
+ self._generate_index(client_info)
+ self._generate_operation_stubs(client_info.operations)
+ self._generate_type_stubs(
+ client_info.models.structures, "structures", "Structure Class"
+ )
+ self._generate_type_stubs(client_info.models.errors, "errors", "Error Class")
+ self._generate_type_stubs(
+ client_info.models.enums, "enums", "Enum Class", ["members: true"]
+ )
+ self._generate_union_stubs(client_info.models.unions)
+ except OSError as e:
+ logger.error(f"Failed to write documentation files: {e}")
+ return False
+ return True
+
+ def _generate_index(self, client_info: ClientInfo) -> None:
+ """Generate the main index.md file."""
+ lines = []
+ lines.append(f"# {self.service_name}")
+ lines.append("")
+ lines.append("## Client")
+ lines.append("")
+ lines.append(f"::: {client_info.module_path}")
+ lines.append(" options:")
+ lines.append(" members: false")
+ lines.append(" heading_level: 3")
+ lines.append(" merge_init_into_class: true")
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+ lines.append("")
+
+ # Operations section
+ if client_info.operations:
+ lines.append("## Operations")
+ lines.append("")
+ for op in sorted(client_info.operations, key=lambda x: x.name):
+ lines.append(f"- [`{op.name}`](operations/{op.name}.md)")
+ lines.append("")
+
+ # Configuration section
+ lines.append("## Configuration")
+ lines.append("")
+ lines.append(f"::: {client_info.config.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append(" merge_init_into_class: true")
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+ lines.append("")
+ lines.append(f"::: {client_info.plugin.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ models = client_info.models
+
+ # Model sections
+ sections: list[tuple[str, str, Sequence[TypeInfo | UnionInfo]]] = [
+ ("Structures", "structures", models.structures),
+ ("Errors", "errors", models.errors),
+ ("Unions", "unions", models.unions),
+ ("Enums", "enums", models.enums),
+ ]
+ for title, folder, items in sections:
+ if items:
+ lines.append("")
+ lines.append(f"## {title}")
+ lines.append("")
+ for item in sorted(items, key=lambda x: x.name):
+ lines.append(f"- [`{item.name}`]({folder}/{item.name}.md)")
+
+ output_path = self.output_dir / "index.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ content = "\n".join(lines)
+ output_path.write_text(content)
+
+ logger.info("Wrote client index file!")
+
+ def _generate_operation_stubs(self, operations: list[OperationInfo]) -> None:
+ """Generate operation documentation files."""
+ for op in operations:
+ lines = []
+ lines.append(f"# {op.name}")
+ lines.append("")
+
+ # Operation section
+ lines.append("## Operation")
+ lines.append("")
+ lines.append(f"::: {op.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Input section
+ lines.append("## Input")
+ lines.append("")
+ lines.append(f"::: {op.input.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Output section - handle all stream types
+ lines.append("## Output")
+ lines.append("")
+
+ if op.stream_type:
+ lines.append(f"This operation returns {op.stream_type.description}.")
+ lines.append("")
+ lines.append("### Event Stream Structure")
+ lines.append("")
+
+ if op.event_input_type:
+ lines.append("#### Input Event Type")
+ lines.append("")
+ lines.append(f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)")
+ lines.append("")
+ if op.event_output_type:
+ lines.append("#### Output Event Type")
+ lines.append("")
+ lines.append(f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)")
+ lines.append("")
+
+ lines.append("### Initial Response Structure")
+ lines.append("")
+ lines.append(f"::: {op.output.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 4")
+ else:
+ lines.append(f"::: {op.output.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ output_path = self.output_dir / "operations" / f"{op.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb("Operations", op.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(operations)} operation files")
+
+ def _generate_type_stubs(
+ self,
+ items: list[TypeInfo],
+ category: str,
+ section_title: str,
+ extra_options: list[str] | None = None,
+ ) -> None:
+ """Generate documentation files for a category of types."""
+ for item in items:
+ lines = [
+ f"# {item.name}",
+ "",
+ f"## {section_title}",
+ f"::: {item.module_path}",
+ " options:",
+ " heading_level: 3",
+ ]
+ if extra_options:
+ lines.extend(f" {opt}" for opt in extra_options)
+
+ output_path = self.output_dir / category / f"{item.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb(category.title(), item.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(items)} {category} files")
+
+ def _generate_union_stubs(self, unions: list[UnionInfo]) -> None:
+ """Generate union documentation files."""
+ for union in unions:
+ lines = []
+ lines.append(f"# {union.name}")
+ lines.append("")
+ lines.append("## Union Type")
+ lines.append(f"::: {union.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+ lines.append("")
+
+ # Add union members
+ if union.members:
+ lines.append("## Union Member Types")
+ for member in union.members:
+ lines.append("")
+ lines.append(f"::: {member.module_path}")
+ lines.append(" options:")
+ lines.append(" heading_level: 3")
+
+ output_path = self.output_dir / "unions" / f"{union.name}.md"
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(self._breadcrumb("Unions", union.name) + "\n".join(lines))
+
+ logger.info(f"Wrote {len(unions)} union files")
+
+ def _breadcrumb(self, category: str, name: str) -> str:
+ """Generate a breadcrumb navigation element."""
+ separator = " > "
+ home = f"[{self.service_name}](../index.md)"
+ section = f"[{category}](../index.md#{category.lower()})"
+ return f'{home}{separator}{section}{separator}{name}\n'
+
+
+def main() -> int:
+ """Main entry point for the single-client documentation generator."""
+ parser = argparse.ArgumentParser(
+ description="Generate API documentation stubs for AWS SDK Python client."
+ )
+ parser.add_argument(
+ "-c", "--client-dir", type=Path, required=True, help="Path to the client source package"
+ )
+ parser.add_argument(
+ "-o",
+ "--output-dir",
+ type=Path,
+ required=True,
+ help="Output directory for generated doc stubs",
+ )
+
+ args = parser.parse_args()
+ client_dir = args.client_dir.resolve()
+ output_dir = args.output_dir.resolve()
+
+ if not client_dir.exists():
+ logger.error(f"Client directory not found: {client_dir}")
+ return 1
+
+ try:
+ generator = DocStubGenerator(client_dir, output_dir)
+ success = generator.generate()
+ return 0 if success else 1
+ except Exception as e:
+ logger.error(f"Unexpected error generating doc stubs: {e}", exc_info=True)
+ return 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/docs/assets/aws-logo-dark.svg b/docs/assets/aws-logo-dark.svg
new file mode 100644
index 0000000..70619b8
--- /dev/null
+++ b/docs/assets/aws-logo-dark.svg
@@ -0,0 +1,35 @@
+
+
+
diff --git a/docs/assets/aws-logo-white.svg b/docs/assets/aws-logo-white.svg
new file mode 100644
index 0000000..982571b
--- /dev/null
+++ b/docs/assets/aws-logo-white.svg
@@ -0,0 +1,38 @@
+
+
+
diff --git a/docs/contributing.md b/docs/contributing.md
new file mode 100644
index 0000000..e079654
--- /dev/null
+++ b/docs/contributing.md
@@ -0,0 +1 @@
+--8<-- "CONTRIBUTING.md"
\ No newline at end of file
diff --git a/docs/contributing/index.md b/docs/contributing/index.md
deleted file mode 100644
index 1331899..0000000
--- a/docs/contributing/index.md
+++ /dev/null
@@ -1,4 +0,0 @@
---8<-- "CONTRIBUTING.md:docs"
-## Licensing
-
-See the [LICENSE](https://github.com/awslabs/aws-sdk-python/blob/develop/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
\ No newline at end of file
diff --git a/docs/hooks/copyright.py b/docs/hooks/copyright.py
new file mode 100644
index 0000000..1260def
--- /dev/null
+++ b/docs/hooks/copyright.py
@@ -0,0 +1,6 @@
+from datetime import datetime
+
+
+def on_config(config, **kwargs):
+ config.copyright = f"Copyright © {datetime.now().year}, Amazon Web Services, Inc"
+ return config
diff --git a/docs/index.md b/docs/index.md
index 9123f93..0f88098 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -1,3 +1,3 @@
# AWS SDK for Python
---8<-- "README.md:docs"
\ No newline at end of file
+--8<-- "README.md:2"
\ No newline at end of file
diff --git a/docs/javascript/nav-expand.js b/docs/javascript/nav-expand.js
new file mode 100644
index 0000000..1984a62
--- /dev/null
+++ b/docs/javascript/nav-expand.js
@@ -0,0 +1,29 @@
+/**
+ * Keep API Reference nav expanded on /clients/ pages and highlight active client.
+ * Uses Material for MkDocs document$ observable for instant navigation compatibility.
+ */
+function expandClientsNav() {
+ if (!location.pathname.includes("/clients/")) return;
+ document.querySelectorAll(".md-nav__item--nested").forEach(function (item) {
+ var link = item.querySelector(":scope > .md-nav__link");
+ if (link && link.textContent.trim().includes("Available Clients")) {
+ // Expand "All Available Clients" drop down
+ var toggle = item.querySelector(":scope > .md-nav__toggle");
+ if (toggle) toggle.checked = true;
+ item.setAttribute("data-md-state", "expanded");
+
+ // Highlight active client
+ var navItems = item.querySelectorAll(".md-nav__item .md-nav__link");
+ navItems.forEach(function (navLink) {
+ if (navLink.href && location.pathname.includes(navLink.pathname)) {
+ navLink.classList.add("md-nav__link--active");
+ }
+ });
+ }
+ });
+}
+
+// Subscribe to Material's document$ observable for instant navigation support
+document$.subscribe(expandClientsNav);
+// Also run on initial page load
+expandClientsNav();
\ No newline at end of file
diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css
index 32aa2b6..3df1023 100644
--- a/docs/stylesheets/extra.css
+++ b/docs/stylesheets/extra.css
@@ -1,3 +1,14 @@
-.md-grid {
- max-width: 70rem;
+/* Custom breadcrumb styling */
+.breadcrumb {
+ font-size: 0.85em;
+ color: var(--md-default-fg-color--light);
+}
+
+p:has(span.breadcrumb) {
+ margin-top: 0;
+}
+
+/* Light mode - use dark logo */
+[data-md-color-scheme="default"] .md-header__button.md-logo img {
+ content: url('../assets/aws-logo-dark.svg');
}
\ No newline at end of file
diff --git a/mkdocs.yml b/mkdocs.yml
index 71675cf..05784d2 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -1,28 +1,44 @@
site_name: AWS SDK for Python
site_description: Documentation for AWS SDK for Python Clients
-copyright: Copyright © 2025, Amazon Web Services, Inc
-repo_name: awslabs/aws-sdk-python
+repo_name: awslabs/aws-sdk-pythons
repo_url: https://github.com/awslabs/aws-sdk-python
+exclude_docs: |
+ README.md
+
+hooks:
+ - docs/hooks/copyright.py
+
theme:
name: material
+ logo: assets/aws-logo-white.svg
+ favicon: ""
palette:
- - scheme: default
+ # Palette toggle for automatic mode
+ - media: "(prefers-color-scheme)"
+ scheme: default
+ toggle:
+ icon: material/brightness-auto
+ name: Switch to light mode
primary: white
- accent: light blue
+ # Palette toggle for light mode
+ - media: "(prefers-color-scheme: light)"
+ scheme: default
toggle:
icon: material/brightness-7
name: Switch to dark mode
- - scheme: slate
- primary: black
- accent: light blue
+ primary: white
+ # Palette toggle for dark mode
+ - media: "(prefers-color-scheme: dark)"
+ scheme: slate
toggle:
icon: material/brightness-4
- name: Switch to light mode
+ name: Switch to system preference
+ primary: black
features:
- - navigation.tabs
- - navigation.tabs.sticky
+ - navigation.indexes
+ - navigation.instant
- navigation.top
- search.suggest
- search.highlight
@@ -30,10 +46,8 @@ theme:
plugins:
- search
- - gen-files:
- scripts:
- - scripts/generate_doc_stubs.py
- - scripts/generate_nav.py
+ - literate-nav:
+ nav_file: SUMMARY.md
- mkdocstrings:
handlers:
python:
@@ -46,20 +60,21 @@ plugins:
show_object_full_path: false
show_symbol_type_heading: true
show_symbol_type_toc: true
+ show_if_no_docstring: true
show_category_heading: true
group_by_category: true
separate_signature: true
signature_crossrefs: true
filters:
- - "!^_"
- - "!^deserialize"
- - "!^serialize"
+ - "!^_"
+ - "!^deserialize"
+ - "!^serialize"
markdown_extensions:
- pymdownx.highlight
- pymdownx.inlinehilite
- pymdownx.snippets:
- base_path: ['.']
+ check_paths: true
- pymdownx.superfences
- pymdownx.tabbed:
alternate_style: true
@@ -70,12 +85,16 @@ markdown_extensions:
toc_depth: 3
extra:
- social:
+ social:
- icon: fontawesome/brands/github
link: https://github.com/awslabs/aws-sdk-python
+extra_javascript:
+ - path: javascript/nav-expand.js
+ defer: true
+
extra_css:
- - stylesheets/extra.css
+ - stylesheets/extra.css
validation:
nav:
diff --git a/pyproject.toml b/pyproject.toml
deleted file mode 100644
index 501b27e..0000000
--- a/pyproject.toml
+++ /dev/null
@@ -1,20 +0,0 @@
-[project]
-name = "aws-sdk-python"
-version = "0.1.0"
-description = "Add your description here"
-readme = "README.md"
-requires-python = ">=3.12"
-dependencies = [
- "mkdocs-awesome-pages-plugin>=2.10.1",
-]
-
-[dependency-groups]
-dev = [
- "mkdocs-awesome-pages-plugin>=2.10.1",
-]
-docs = [
- "mkdocs==1.6.1",
- "mkdocstrings[python]==0.30.1",
- "mkdocs-material==9.7.0",
- "mkdocs-gen-files>=0.5.0",
-]
diff --git a/requirements-docs.in b/requirements-docs.in
new file mode 100644
index 0000000..9d00568
--- /dev/null
+++ b/requirements-docs.in
@@ -0,0 +1,4 @@
+mkdocs==1.6.1
+mkdocstrings[python]==1.0.0
+mkdocs-material==9.7.0
+mkdocs-literate-nav==0.6.1
\ No newline at end of file
diff --git a/scripts/docs/generate_all_doc_stubs.py b/scripts/docs/generate_all_doc_stubs.py
new file mode 100644
index 0000000..d16e628
--- /dev/null
+++ b/scripts/docs/generate_all_doc_stubs.py
@@ -0,0 +1,209 @@
+"""
+Generate documentation stubs for all AWS SDK Python clients.
+
+This script iterates through each client directory and runs the
+generate_doc_stubs.py script with output directed to the top-level docs folder.
+It also generates the clients index page.
+"""
+
+import logging
+import os
+import subprocess
+import sys
+from collections import defaultdict
+from concurrent.futures import ProcessPoolExecutor, as_completed
+from dataclasses import dataclass
+from pathlib import Path
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger("generate_all_doc_stubs")
+
+DEFAULT_CPU_COUNT = 1
+
+@dataclass
+class ClientInfo:
+ """Information about a client for documentation generation."""
+
+ dir: Path
+ service_name: str
+ package_name: str
+ path_name: str
+
+
+def discover_clients(clients_dir: Path) -> list[ClientInfo]:
+ """
+ Discover all clients that have a generate_doc_stubs.py script.
+
+ Args:
+ clients_dir: Path to the clients directory.
+
+ Returns:
+ List of ClientInfo objects.
+ """
+ if not clients_dir.exists():
+ raise FileNotFoundError(f"Clients directory not found: {clients_dir}")
+
+ clients = []
+ for client_dir in sorted(clients_dir.iterdir()):
+ script_path = client_dir / "scripts" / "docs" / "generate_doc_stubs.py"
+ if not script_path.exists():
+ continue
+
+ # Convert "aws-sdk-bedrock-runtime" -> "Bedrock Runtime" / "bedrock-runtime"
+ package_name = client_dir.name
+ path_name = package_name.replace("aws-sdk-", "")
+ service_name = path_name.replace("-", " ").title()
+ clients.append(ClientInfo(client_dir, service_name, package_name, path_name))
+
+ return clients
+
+
+def generate_all_doc_stubs(clients: list[ClientInfo], docs_dir: Path) -> bool:
+ """
+ Generate doc stubs for all clients by running each client's generate_doc_stubs.py.
+
+ Args:
+ clients: List of ClientInfo objects.
+ docs_dir: Path to the docs directory.
+
+ Returns:
+ True if all doc stubs were generated successfully, False otherwise.
+ """
+ top_level_docs = docs_dir / "clients"
+ max_workers = os.cpu_count() or DEFAULT_CPU_COUNT
+
+ logger.info(f"Generating doc stubs for {len(clients)} clients using {max_workers} workers...")
+
+ with ProcessPoolExecutor(max_workers=max_workers) as executor:
+ futures = {
+ executor.submit(
+ _generate_doc_stub,
+ client.dir,
+ client.service_name,
+ top_level_docs / client.path_name,
+ ): client
+ for client in clients
+ }
+
+ failed = []
+ for future in as_completed(futures):
+ service_name, success = future.result()
+ if success:
+ logger.info(f"✅ Generated docs stubs for {service_name}")
+ else:
+ logger.error(f"❌ Failed to generate docs stubs for {service_name}")
+ failed.append(service_name)
+
+ if failed:
+ logger.error(f"Failed to generate doc stubs for: {', '.join(failed)}")
+ return False
+
+ return True
+
+
+def _generate_doc_stub(client_dir: Path, service_name: str, output_dir: Path) -> tuple[str, bool]:
+ """
+ Generate doc stubs for a single client.
+
+ Args:
+ client_dir: Path to the client directory.
+ service_name: Name of the service.
+ output_dir: Path to the output directory.
+
+ Returns:
+ Tuple of (service_name, success).
+ """
+ script_path = client_dir / "scripts" / "docs" / "generate_doc_stubs.py"
+
+ result = subprocess.run(
+ [
+ sys.executable,
+ str(script_path),
+ "--client-dir",
+ str(client_dir / "src" / client_dir.name.replace("-", "_")),
+ "--output-dir",
+ str(output_dir),
+ ],
+ cwd=client_dir,
+ )
+
+ return service_name, result.returncode == 0
+
+
+def generate_clients_index(clients: list[ClientInfo], docs_dir: Path) -> bool:
+ """
+ Generate clients/index.md (with alphabetical tabs).
+
+ Args:
+ clients: List of ClientInfo objects.
+ docs_dir: Path to the docs directory.
+
+ Returns:
+ True if the index was generated successfully, False otherwise.
+ """
+ lines = ["# Available Clients", ""]
+
+ # Group by first letter
+ grouped: defaultdict[str, list[ClientInfo]] = defaultdict(list)
+ for client in clients:
+ letter = client.service_name[0].upper()
+ grouped[letter].append(client)
+
+ # Tab for all services
+ lines.append("=== \"All\"")
+ lines.append("")
+ lines.append(" | Service | Package Name |")
+ lines.append(" |----------|--------------|")
+ for client in clients:
+ lines.append(f" | **[{client.service_name}]({client.path_name}/index.md)** | `{client.package_name}` |")
+ lines.append("")
+
+ # Individual letter tabs
+ for letter in sorted(grouped.keys()):
+ lines.append(f"=== \"{letter}\"")
+ lines.append("")
+ lines.append(" | Service | Package Name |")
+ lines.append(" |----------|--------------|")
+ for client in grouped[letter]:
+ lines.append(f" | **[{client.service_name}]({client.path_name}/index.md)** | `{client.package_name}` |")
+ lines.append("")
+
+ index_path = docs_dir / "clients" / "index.md"
+ try:
+ index_path.write_text("\n".join(lines) + "\n")
+ except OSError as e:
+ logger.error(f"Failed to write clients index: {e}")
+ return False
+
+ logger.info(f"✅ Generated clients index page with {len(grouped)} letter tabs")
+ return True
+
+
+def main() -> int:
+ """Main entry point for generating doc stubs for all clients."""
+ repo_root = Path(__file__).parent.parent.parent
+ clients_dir = repo_root / "clients"
+ docs_dir = repo_root / "docs"
+
+ try:
+ clients = discover_clients(clients_dir)
+
+ if not generate_all_doc_stubs(clients, docs_dir):
+ return 1
+
+ if not generate_clients_index(clients, docs_dir):
+ return 1
+
+ except Exception as e:
+ logger.error(f"Unexpected error: {e}")
+ return 1
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/docs/generate_nav.py b/scripts/docs/generate_nav.py
new file mode 100644
index 0000000..3cd600b
--- /dev/null
+++ b/scripts/docs/generate_nav.py
@@ -0,0 +1,91 @@
+# scripts/docs/generate_nav.py
+"""
+Generate client documentation navigation dynamically.
+
+Run this script before mkdocs build to generate:
+docs/SUMMARY.md - Navigation file for literate-nav plugin
+"""
+
+import logging
+import sys
+
+from pathlib import Path
+
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger("generate_nav")
+
+
+def generate_nav(repo_root: Path) -> bool:
+ """
+ Generate navigation structure for clients using literate-nav SUMMARY.md format.
+
+ Args:
+ repo_root: Path to the repository root.
+
+ Returns:
+ True if navigation was generated successfully, False otherwise.
+ """
+ logger.info("Generating navigation structure...")
+
+ clients_dir = repo_root / "clients"
+ if not clients_dir.exists():
+ logger.error(f"Clients directory not found: {clients_dir}")
+ return False
+
+ # Build the SUMMARY.md content for literate-nav
+ lines = [
+ "* [Overview](index.md)",
+ "* [Contributing](contributing.md)",
+ "* [Available Clients](clients/index.md)",
+ ]
+
+ # Discover clients and add each as a nested item under Available Clients
+ client_count = 0
+ for client_path in sorted(clients_dir.iterdir()):
+ if not (client_path / "scripts" / "docs" / "generate_doc_stubs.py").exists():
+ continue
+
+ # Extract service name and path from package name
+ # (e.g., "aws-sdk-bedrock-runtime" -> "Bedrock Runtime" / "bedrock-runtime")
+ path_name = client_path.name.replace("aws-sdk-", "")
+ display_name = path_name.replace("-", " ").title()
+
+ lines.append(f" * [{display_name}](clients/{path_name}/index.md)")
+ logger.info(f"Discovered client: {display_name}")
+ client_count += 1
+
+ logger.info(f"Found {client_count} total clients")
+
+ # Write the SUMMARY.md file to the docs directory
+ summary_path = repo_root / "docs" / "SUMMARY.md"
+ try:
+ summary_path.write_text("\n".join(lines) + "\n")
+ except OSError as e:
+ logger.error(f"Failed to write SUMMARY.md: {e}")
+ return False
+
+ logger.info(f"✅ Generated SUMMARY.md navigation for {client_count} clients")
+ return True
+
+
+def main() -> int:
+ """Main entry point to generate navigation."""
+ repo_root = Path(__file__).parent.parent.parent
+
+ try:
+ if not generate_nav(repo_root):
+ return 1
+ except Exception as e:
+ logger.error(f"Unexpected error: {e}")
+ return 1
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/generate_doc_stubs.py b/scripts/generate_doc_stubs.py
deleted file mode 100644
index 928eca3..0000000
--- a/scripts/generate_doc_stubs.py
+++ /dev/null
@@ -1,590 +0,0 @@
-"""
-Generate API documentation for AWS SDK Python clients.
-
-This script generates MkDocs Material documentation that matches the reference
-documentation format. It analyzes Python client packages using griffe
-and creates structured documentation with proper formatting.
-"""
-
-import logging
-import sys
-from dataclasses import dataclass, field
-from enum import Enum
-from pathlib import Path
-
-import griffe
-import mkdocs_gen_files
-from griffe import Alias, Class, Function, Module, Object, ExprBinOp, ExprSubscript, TypeAlias
-
-
-logging.basicConfig(
- level=logging.INFO,
- format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
- datefmt="%Y-%m-%d %H:%M:%S",
-)
-logger = logging.getLogger("generate_doc_stubs")
-
-EVENT_STREAM_TYPES = ["InputEventStream", "OutputEventStream", "DuplexEventStream"]
-
-class StreamType(Enum):
- """Type of event stream for operations."""
-
- NONE = "none"
- INPUT = "InputEventStream" # Client-to-server streaming
- OUTPUT = "OutputEventStream" # Server-to-client streaming
- DUPLEX = "DuplexEventStream" # Bidirectional streaming
-
-
-@dataclass(frozen=True)
-class StructureInfo:
- """Information about a structure (dataclass)."""
-
- name: str
- module_path: str # e.g., "aws_sdk_bedrock_runtime.models"
-
-
-@dataclass
-class UnionInfo:
- """Information about a union type."""
-
- name: str
- module_path: str
- members: list[StructureInfo]
-
-
-@dataclass
-class EnumInfo:
- """Information about an enum."""
-
- name: str
- module_path: str
-
-
-@dataclass
-class ErrorInfo:
- """Information about an error/exception."""
-
- name: str
- module_path: str
-
-
-@dataclass
-class ConfigInfo:
- """Information about a configuration class."""
-
- name: str
- module_path: str
-
-
-@dataclass
-class PluginInfo:
- """Information about a plugin type alias."""
-
- name: str
- module_path: str
-
-
-@dataclass
-class OperationInfo:
- """Information about a client operation."""
-
- name: str # e.g., "converse"
- module_path: str # e.g., "aws_sdk_bedrock_runtime.client.BedrockRuntimeClient.converse"
- input: StructureInfo # e.g., "ConverseInput"
- output: StructureInfo # e.g., "ConverseOperationOutput"
- stream_type: StreamType.NONE
- event_input_type: str | None # For input/duplex streams
- event_output_type: str | None # For output/duplex streams
-
-
-@dataclass
-class ModelsInfo:
- """Information about all models."""
-
- structures: list[StructureInfo]
- unions: list[UnionInfo]
- enums: list[EnumInfo]
- errors: list[ErrorInfo]
-
-
-@dataclass
-class ClientInfo:
- """Complete information about a client package."""
-
- name: str # e.g., "BedrockRuntimeClient"
- module_path: str # e.g., "aws_sdk_bedrock_runtime.client.BedrockRuntimeClient"
- package_name: str # e.g., "aws_sdk_bedrock_runtime"
- config: ConfigInfo
- plugin: PluginInfo
- operations: list[OperationInfo]
- models: ModelsInfo
-
-
-Member = Object | Alias
-
-class DocStubGenerator:
- """Generates MkDocs stubs for AWS SDK client documentation."""
-
- def __init__(self, client_dir: Path, docs_dir: Path, service_name: str) -> None:
- """Initialize the documentation generator."""
- self.client_dir = client_dir
- self.service_name = service_name
-
- def generate(self) -> None:
- """Generate the documentation stubs to the output directory."""
- client_name = self.client_dir.name
- package_name = client_name.replace("-", "_")
- client_info = self._analyze_client_package(package_name)
- self._generate_client_docs(client_info)
-
- def _analyze_client_package(self, package_name: str) -> ClientInfo:
- """Analyze a client package using griffe."""
- logger.info(f"Analyzing package: {package_name}")
- package = griffe.load(package_name)
-
- # Parse submodules
- client_module = package.modules.get("client")
- config_module = package.modules.get("config")
- models_module = package.modules.get("models")
-
- client_class = self._find_class_with_suffix(client_module, "Client")
- config_class = config_module.members.get("Config")
- plugin_alias = config_module.members.get("Plugin")
- config = ConfigInfo(
- name=config_class.name,
- module_path=config_class.path
- )
- plugin = PluginInfo(
- name=plugin_alias.name,
- module_path=plugin_alias.path
- )
- operations = self._extract_operations(client_class)
- models = self._extract_models(models_module, operations)
-
- return ClientInfo(
- name=client_class.name,
- module_path=client_class.path,
- package_name=package_name,
- config=config,
- plugin=plugin,
- operations=operations,
- models=models
- )
-
- def _find_class_with_suffix(self, module: Module, suffix: str) -> Class:
- """Find the class in the module with a matching suffix."""
- for cls in module.classes.values():
- if cls.name.endswith(suffix):
- return cls
-
- def _extract_operations(self, client_class: Class) -> list[OperationInfo]:
- """Extract operation information from client class."""
- operations = []
- for op in client_class.functions.values():
- if op.is_private or op.is_init_method:
- continue
- operations.append(self._analyze_operation(op))
- return operations
-
- def _analyze_operation(self, operation: Function) -> None:
- """Analyze an operation method to extract information."""
- stream_type = None
- event_input_type = None
- event_output_type = None
-
- input_param = operation.parameters["input"]
- input_info = StructureInfo(
- name=input_param.annotation.canonical_name,
- module_path=input_param.annotation.canonical_path
- )
-
- output_type = operation.returns.canonical_name
- if any(type in output_type for type in EVENT_STREAM_TYPES):
- stream_args = operation.returns.slice.elements
-
- if output_type == "InputEventStream":
- stream_type = StreamType.INPUT
- event_input_type = stream_args[0].canonical_name
- elif output_type == "OutputEventStream":
- stream_type = StreamType.OUTPUT
- event_output_type = stream_args[0].canonical_name
- elif output_type == "DuplexEventStream":
- stream_type = StreamType.DUPLEX
- event_input_type = stream_args[0].canonical_name
- event_output_type = stream_args[1].canonical_name
-
- output_info = StructureInfo(
- name=stream_args[-1].canonical_name,
- module_path=stream_args[-1].canonical_path
- )
- else:
- output_info = StructureInfo(
- name=output_type,
- module_path=operation.returns.canonical_path
- )
-
- return OperationInfo(
- name=operation.name,
- module_path=operation.path,
- input=input_info,
- output=output_info,
- stream_type=stream_type,
- event_input_type=event_input_type,
- event_output_type=event_output_type,
- )
-
- def _extract_models(self, models_module: Module, operations: list[OperationInfo]) -> ModelsInfo:
- """Extract structures, unions, enums, and errors from the models module."""
- structures, unions, enums, errors = [], [], [], []
-
- for member in models_module.members.values():
- # Skip imported and private members
- if member.is_imported or member.is_private:
- continue
-
- if self._is_union(member):
- unions.append(UnionInfo(
- name=member.name,
- module_path=member.path,
- members=self._extract_union_members(member, models_module)
- ))
- elif self._is_enum(member):
- enums.append(EnumInfo(
- name=member.name,
- module_path=member.path
- ))
- elif self._is_error(member):
- errors.append(ErrorInfo(
- name=member.name,
- module_path=member.path
- ))
- else:
- if member.is_class:
- structures.append(StructureInfo(
- name=member.name,
- module_path=member.path
- ))
-
- duplicates = set()
- for structure in structures:
- if (self._is_operation_io_type(structure.name, operations) or
- self._is_union_member(structure.name, unions)):
- duplicates.add(structure)
-
- structures = [struct for struct in structures if struct not in duplicates]
-
- return ModelsInfo(
- structures=structures,
- unions=unions,
- enums=enums,
- errors=errors
- )
-
- def _is_union(self, member: Member) -> bool:
- """Check if a type is a union type."""
- if member.is_attribute:
- # Check for Union[...] syntax
- if isinstance(member.value, ExprSubscript):
- if member.value.left.name == "Union":
- return True
-
- # Check for PEP 604 (X | Y) syntax
- if isinstance(member.value, ExprBinOp):
- return True
-
- return False
-
- def _extract_union_members(self, union_class: TypeAlias, models_module: Module) -> list[StructureInfo]:
- """Extract member types from a union."""
- members = []
- value_str = str(union_class.value)
-
- # Handle Union[X | Y | Z] syntax
- if value_str.startswith("Union[") and value_str.endswith("]"):
- value_str = value_str.removeprefix("Union[").removesuffix("]")
-
- member_names = [member.strip() for member in value_str.split("|")]
-
- for name in member_names:
- member_object = models_module.members.get(name)
- members.append(StructureInfo(
- name=member_object.name,
- module_path=member_object.path
- ))
-
- return members
-
- def _is_enum(self, member: Member) -> bool:
- """Check if a module member is an enum."""
- if not member.is_class:
- return False
-
- for base in member.bases:
- if base.name in ('StrEnum', 'IntEnum'):
- return True
-
- return False
-
- def _is_error(self, member: Member) -> bool:
- """Check if a module member is an error."""
- if not member.is_class:
- return False
-
- for base in member.bases:
- if base.name in ('ServiceError', 'ModeledError'):
- return True
-
- return False
-
- def _is_operation_io_type(self, type_name: str, operations: list[OperationInfo]) -> bool:
- """Check if a type is used as operation input/output."""
- for op in operations:
- if type_name == op.input.name or type_name == op.output.name:
- return True
- return False
-
- def _is_union_member(self, type_name:str, unions: list[UnionInfo]) -> bool:
- """Check if a type is used as union member."""
- for union in unions:
- for member in union.members:
- if type_name == member.name:
- return True
- return False
-
- def _generate_client_docs(self, client_info: ClientInfo) -> None:
- """Generate all documentation files for a client."""
- logger.info(f"Writing files to mkdocs virtual filesystem for {self.service_name}")
-
- self._generate_index(client_info)
- self._generate_operations(client_info.operations)
- self._generate_structures(client_info.models.structures)
- self._generate_errors(client_info.models.errors)
- self._generate_unions(client_info.models.unions)
- self._generate_enums(client_info.models.enums)
-
- def _generate_index(self, client_info: ClientInfo) -> None:
- """Generate the main index.md file."""
- content = f"# {self.service_name}\n\n"
-
- # Client section
- content += "## Client\n\n"
- content += f"::: {client_info.module_path}\n"
- content += " options:\n"
- content += " merge_init_into_class: true\n"
- content += " docstring_options:\n"
- content += " ignore_init_summary: true\n"
- content += " members: false\n"
- content += " heading_level: 3\n\n"
-
- # Operations section
- if client_info.operations:
- content += "## Available Operations\n\n"
- for op in sorted(client_info.operations, key=lambda x: x.name):
- content += f"- [`{op.name}`](operations/{op.name}.md)\n\n"
-
- # Configuration section
- content += "## Configuration\n\n"
- content += f"::: {client_info.config.module_path}\n"
- content += " options:\n"
- content += " merge_init_into_class: true\n"
- content += " docstring_options:\n"
- content += " ignore_init_summary: true\n"
- content += " heading_level: 3\n\n"
- content += f"::: {client_info.plugin.module_path}\n"
- content += " options:\n"
- content += " heading_level: 3\n\n"
-
- models = client_info.models
-
- # Structures section
- if models.structures:
- content += "## Structures\n\n"
- for struct in sorted(models.structures, key=lambda x: x.name):
- content += f"- [`{struct.name}`](structures/{struct.name}.md)\n\n"
-
- # Errors section
- if models.errors:
- content += "## Errors\n\n"
- for error in sorted(models.errors, key=lambda x: x.name):
- content += f"- [`{error.name}`](errors/{error.name}.md)\n\n"
-
- # Unions section
- if models.unions:
- content += "## Unions\n\n"
- for union in sorted(models.unions, key=lambda x: x.name):
- content += f"- [`{union.name}`](unions/{union.name}.md)\n\n"
-
- # Enums section
- if models.enums:
- content += "## Enums\n\n"
- for enum in sorted(models.enums, key=lambda x: x.name):
- content += f"- [`{enum.name}`](enums/{enum.name}.md)\n\n"
-
- docs_path = f"clients/{self.service_name}/index.md"
- with mkdocs_gen_files.open(docs_path, "w") as f:
- f.write(content)
-
- logger.info(f"Generated index.md")
-
- def _generate_operations(self, operations: list[OperationInfo]) -> None:
- """Generate operation documentation files."""
- for op in operations:
- content = f"# {op.name}\n\n"
-
- # Operation section
- content += "## Operation\n\n"
- content += f"::: {op.module_path}\n"
- content += " options:\n"
- content += " heading_level: 3\n\n"
-
- # Input section
- content += "## Input\n\n"
- content += f"::: {op.input.module_path}\n"
- content += " options:\n"
- content += " heading_level: 3\n\n"
-
- # Output section - handle all stream types
- content += "## Output\n\n"
-
- if op.stream_type == StreamType.INPUT:
- content += "This operation returns an `InputEventStream` for client-to-server streaming.\n\n"
- content += "### Event Stream Structure\n\n"
- content += "#### Input Event Type\n\n"
- content += f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)\n\n"
- content += "### Initial Response Structure\n\n"
- content += f"::: {op.output.module_path}\n"
- content += " options:\n"
- content += " heading_level: 4\n\n"
- elif op.stream_type == StreamType.OUTPUT:
- content += "This operation returns an `OutputEventStream` for server-to-client streaming.\n\n"
- content += "### Event Stream Structure\n\n"
- content += "#### Output Event Type\n\n"
- if op.event_output_type:
- content += f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)\n\n"
- content += "### Initial Response Structure\n\n"
- content += f"::: {op.output.module_path}\n"
- content += " options:\n"
- content += " heading_level: 4\n\n"
- elif op.stream_type == StreamType.DUPLEX:
- content += "This operation returns a `DuplexEventStream` for bidirectional streaming.\n\n"
- content += "### Event Stream Structure\n\n"
- content += "#### Input Event Type\n\n"
- if op.event_input_type:
- content += f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)\n\n"
- content += "#### Output Event Type\n\n"
- if op.event_output_type:
- content += f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)\n\n"
- content += "### Initial Response Structure\n\n"
- content += f"::: {op.output.module_path}\n"
- content += " options:\n"
- content += " heading_level: 4\n\n"
- else:
- # No streaming
- content += f"::: {op.output.module_path}\n"
- content += " options:\n"
- content += " heading_level: 3\n\n"
-
- docs_path = f"clients/{self.service_name}/operations/{op.name}.md"
- with mkdocs_gen_files.open(docs_path, "w") as f:
- f.write(content)
-
- logger.info(f"Generated {len(operations)} operation files")
-
- def _generate_structures(self, structures: list[StructureInfo]) -> None:
- """Generate structure documentation files."""
- for struct in structures:
- content = f"::: {struct.module_path}\n"
- content += " options:\n"
- content += " heading_level: 1\n"
-
- docs_path = f"clients/{self.service_name}/structures/{struct.name}.md"
- with mkdocs_gen_files.open(docs_path, "w") as f:
- f.write(content)
-
- logger.info(f"Generated {len(structures)} structure files")
-
- def _generate_unions(self, unions: list[UnionInfo]) -> None:
- """Generate union documentation files."""
- for union in unions:
- content = f"::: {union.module_path}\n"
- content += " options:\n"
- content += " heading_level: 1\n\n"
-
- # Add union members
- if union.members:
- content += "## Union Members\n\n"
- for member in union.members:
- content += f"::: {member.module_path}\n"
- content += " options:\n"
- content += " heading_level: 3\n\n"
-
- docs_path = f"clients/{self.service_name}/unions/{union.name}.md"
- with mkdocs_gen_files.open(docs_path, "w") as f:
- f.write(content)
-
- logger.info(f"Generated {len(unions)} union files")
-
- def _generate_enums(self, enums: list[EnumInfo]) -> None:
- """Generate enum documentation files."""
- for enum in enums:
- content = f"::: {enum.module_path}\n"
- content += " options:\n"
- content += " heading_level: 1\n"
- content += " members: true\n"
-
- docs_path = f"clients/{self.service_name}/enums/{enum.name}.md"
- with mkdocs_gen_files.open(docs_path, "w") as f:
- f.write(content)
-
- logger.info(f"Generated {len(enums)} enum files")
-
- def _generate_errors(self, errors: list[ErrorInfo]) -> None:
- """Generate error documentation files."""
- for error in errors:
- content = f"::: {error.module_path}\n"
- content += " options:\n"
- content += " heading_level: 1\n"
- content += " members: true\n"
-
- docs_path = f"clients/{self.service_name}/errors/{error.name}.md"
- with mkdocs_gen_files.open(docs_path, "w") as f:
- f.write(content)
-
- logger.info(f"Generated {len(errors)} error files")
-
-
-def extract_service_name(package_name: str) -> str:
- """Extract service name from client package name."""
- return (
- package_name
- .replace("aws-sdk-", "")
- .replace("-", " ")
- .title()
- )
-
-
-def main() -> int:
- """Main entry point for the documentation generator."""
- repo_root = Path(__file__).parent.parent.absolute()
- output_dir = repo_root / "docs" / "clients"
- clients_dir = repo_root / "clients"
-
- try:
- for client_dir in clients_dir.iterdir():
- if client_dir.is_dir() and client_dir.name != "aws-sdk-python":
- service_name = extract_service_name(client_dir.name)
- logger.info(f"Generating docs for {service_name}")
- generator = DocStubGenerator(client_dir, output_dir / service_name, service_name)
- generator.generate()
-
- return 0
- except Exception as e:
- logger.error(f"Error generating doc stubs: {e}", exc_info=True)
- return 1
-
-
-if __name__ == "__main__":
- sys.exit(main())
-else:
- # When imported by mkdocs-gen-files, run the generation
- main()
diff --git a/scripts/generate_nav.py b/scripts/generate_nav.py
deleted file mode 100644
index cc0f194..0000000
--- a/scripts/generate_nav.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# docs/scripts/generate_nav.py
-"""
-Generate client documentation navigation dynamically.
-
-Executed by mkdocs-gen-files during the build process after generate_doc_stubs.py.
-It discovers client documentation already generated in docs/clients/ by generate_doc_stubs.py
-and generates a dynamic index (clients/index.md) that groups clients alphabetically.
-"""
-
-import logging
-import sys
-
-from collections import defaultdict
-from dataclasses import dataclass
-from pathlib import Path
-
-import mkdocs_gen_files
-
-
-logging.basicConfig(
- level=logging.INFO,
- format="[%(asctime)s - %(name)s - %(levelname)s] %(message)s",
- datefmt="%Y-%m-%d %H:%M:%S",
-)
-logger = logging.getLogger("generate_nav")
-
-
-@dataclass
-class ClientDocInfo:
- """Information about a client's documentation directory."""
-
- name: str
- package_name: str
- docs_path: Path
-
-
-def generate_nav(repo_root: Path) -> int:
- """Generate navigation for clients."""
- try:
- clients = discover_clients(repo_root)
- generate_clients_index(clients)
- build_nav_structure(clients)
- except Exception as e:
- logger.error(f"Error generating navigation: {e}")
- return 1
-
- return 0
-
-
-def discover_clients(repo_root: Path) -> list[ClientDocInfo]:
- """Discover clients from clients packages."""
- clients = []
- clients_dir = repo_root / "clients"
-
- if not clients_dir.exists():
- raise FileNotFoundError(f"No clients directory found at {clients_dir}")
-
- for client_path in sorted(clients_dir.iterdir()):
- if not client_path.is_dir() or client_path.name == "aws-sdk-python":
- continue
-
- # Extract service name from package name (e.g., "aws-sdk-bedrock-runtime" -> "Bedrock Runtime")
- service_name = client_path.name.replace("aws-sdk-", "").replace("-", " ").title()
- package_name = client_path.name
-
- clients.append(ClientDocInfo(
- name=service_name,
- package_name=package_name,
- docs_path=client_path,
- ))
-
- logger.info(f"✅ Discovered client: {service_name}")
-
- return clients
-
-
-def generate_clients_index(clients: list[ClientDocInfo]) -> None:
- """Generate clients/index.md (with alphabetical tabs)."""
- content = "# All Available Clients\n\n"
-
- # Group by first letter
- grouped = defaultdict(list)
- for client in clients:
- letter = client.name[0].upper()
- grouped[letter].append(client)
-
- # Tab for all services
- content += "=== \"All\"\n\n"
- content += " | Service | Package Name |\n"
- content += " |----------|--------------|\n"
- for client in sorted(clients, key=lambda x: x.name):
- content += f" | **[{client.name}]({client.name}/index.md)** | `{client.package_name}` |\n"
- content += "\n"
-
- # Individual letter tabs
- for letter in sorted(grouped.keys()):
- content += f"=== \"{letter}\"\n\n"
- content += " | Service | Package Name |\n"
- content += " |----------|--------------|\n"
- for client in sorted(grouped[letter], key=lambda x: x.name):
- content += f" | **[{client.name}]({client.name}/index.md)** | `{client.package_name}` |\n"
- content += "\n"
-
- with mkdocs_gen_files.open("clients/index.md", "w") as f:
- f.write(content)
-
- logger.info(f"✅ Generated clients index page with {len(clients)} letter tabs")
-
-
-def build_nav_structure(clients: list[ClientDocInfo]) -> None:
- """Build navigation structure for clients."""
- nav_structure = [
- {
- "Getting Started": [
- {"Overview": "index.md"},
- {"Contributing": "contributing/index.md"},
- ]
- },
- {
- "Clients API Reference": [
- "clients/index.md",
- *[f"clients/{client.name}/index.md" for client in sorted(clients, key=lambda x: x.name)]
- ]
- }
- ]
- mkdocs_gen_files.config["nav"] = nav_structure
- logger.info(f"✅ Generated navigation structure for {len(clients)} clients")
-
-
-def main() -> int:
- """Main entry point to generate navigation."""
- repo_root = Path(__file__).parent.parent
-
- return generate_nav(repo_root)
-
-
-if __name__ == "__main__":
- sys.exit(main())
-else:
- # When imported by mkdocs-gen-files, run the generation
- main()
\ No newline at end of file
From 83c2d402fce487f2cecc11a3e68cbff10c5aa91c Mon Sep 17 00:00:00 2001
From: Antonio Aranda <102337110+arandito@users.noreply.github.com>
Date: Wed, 31 Dec 2025 04:00:31 -0500
Subject: [PATCH 4/4] Address PR feedback
---
.gitignore | 3 +-
Makefile | 2 +-
clients/aws-sdk-bedrock-runtime/Makefile | 2 +-
clients/aws-sdk-bedrock-runtime/mkdocs.yml | 46 ++--
.../aws-sdk-bedrock-runtime/pyproject.toml | 2 +-
.../scripts/docs/generate_doc_stubs.py | 223 ++++++++++--------
.../aws-sdk-sagemaker-runtime-http2/Makefile | 2 +-
.../mkdocs.yml | 46 ++--
.../pyproject.toml | 6 +-
.../scripts/docs/generate_doc_stubs.py | 223 ++++++++++--------
clients/aws-sdk-transcribe-streaming/Makefile | 2 +-
.../aws-sdk-transcribe-streaming/mkdocs.yml | 46 ++--
.../pyproject.toml | 2 +-
.../scripts/docs/generate_doc_stubs.py | 223 ++++++++++--------
docs/contributing.md | 2 +-
docs/index.md | 2 +-
docs/javascript/nav-expand.js | 2 +-
docs/stylesheets/extra.css | 2 +-
mkdocs.yml | 7 +-
requirements-docs.in | 2 +-
scripts/docs/generate_all_doc_stubs.py | 21 +-
scripts/docs/generate_nav.py | 2 +-
22 files changed, 485 insertions(+), 383 deletions(-)
diff --git a/.gitignore b/.gitignore
index 2975313..83e7a22 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,8 +11,9 @@ docs/clients/
.venv
venv
-# Python bytecode cache
+# Caches
__pycache__/
+.ruff_cache/
# Dependency lock file for uv
uv.lock
diff --git a/Makefile b/Makefile
index b6b9c76..af29938 100644
--- a/Makefile
+++ b/Makefile
@@ -22,4 +22,4 @@ docs: docs-generate
docs-serve:
@[ -d site ] || $(MAKE) docs
- uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
\ No newline at end of file
+ uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
diff --git a/clients/aws-sdk-bedrock-runtime/Makefile b/clients/aws-sdk-bedrock-runtime/Makefile
index 43adf73..3bd0a82 100644
--- a/clients/aws-sdk-bedrock-runtime/Makefile
+++ b/clients/aws-sdk-bedrock-runtime/Makefile
@@ -22,4 +22,4 @@ docs: docs-generate
docs-serve:
@[ -d site ] || $(MAKE) docs
- uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
\ No newline at end of file
+ uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
diff --git a/clients/aws-sdk-bedrock-runtime/mkdocs.yml b/clients/aws-sdk-bedrock-runtime/mkdocs.yml
index 9e0024e..993ae7a 100644
--- a/clients/aws-sdk-bedrock-runtime/mkdocs.yml
+++ b/clients/aws-sdk-bedrock-runtime/mkdocs.yml
@@ -44,34 +44,34 @@ theme:
- content.code.copy
plugins:
-- search
-- mkdocstrings:
- handlers:
- python:
- options:
- show_source: false
- show_signature: true
- show_signature_annotations: true
- show_root_heading: true
- show_root_full_path: false
- show_object_full_path: false
- show_symbol_type_heading: true
- show_symbol_type_toc: true
- show_if_no_docstring: true
- show_category_heading: true
- group_by_category: true
- separate_signature: true
- signature_crossrefs: true
- filters:
- - "!^_"
- - "!^deserialize"
- - "!^serialize"
+ - search
+ - mkdocstrings:
+ handlers:
+ python:
+ options:
+ show_source: false
+ show_signature: true
+ show_signature_annotations: true
+ show_root_heading: true
+ show_root_full_path: false
+ show_object_full_path: false
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ show_if_no_docstring: true
+ show_category_heading: true
+ group_by_category: true
+ separate_signature: true
+ signature_crossrefs: true
+ filters:
+ - "!^_"
+ - "!^deserialize"
+ - "!^serialize"
markdown_extensions:
- pymdownx.highlight
- pymdownx.inlinehilite
- pymdownx.snippets:
- check_paths: true
+ check_paths: true
- pymdownx.superfences
- admonition
- def_list
diff --git a/clients/aws-sdk-bedrock-runtime/pyproject.toml b/clients/aws-sdk-bedrock-runtime/pyproject.toml
index 3023696..f4e0c5e 100644
--- a/clients/aws-sdk-bedrock-runtime/pyproject.toml
+++ b/clients/aws-sdk-bedrock-runtime/pyproject.toml
@@ -35,7 +35,7 @@ test = [
]
docs = [
- "mkdocs~=1.6.1",
+ "mkdocs==1.6.1",
"mkdocs-material==9.7.0",
"mkdocstrings[python]==1.0.0"
]
diff --git a/clients/aws-sdk-bedrock-runtime/scripts/docs/generate_doc_stubs.py b/clients/aws-sdk-bedrock-runtime/scripts/docs/generate_doc_stubs.py
index 056708a..e73fd4c 100644
--- a/clients/aws-sdk-bedrock-runtime/scripts/docs/generate_doc_stubs.py
+++ b/clients/aws-sdk-bedrock-runtime/scripts/docs/generate_doc_stubs.py
@@ -37,6 +37,9 @@
)
logger = logging.getLogger("generate_doc_stubs")
+ENUM_BASE_CLASSES = ("StrEnum", "IntEnum")
+ERROR_BASE_CLASSES = ("ServiceError", "ModeledError")
+
class StreamType(Enum):
"""Type of event stream for operations."""
@@ -56,7 +59,7 @@ def description(self) -> str:
return descriptions[self]
-@dataclass(frozen=True)
+@dataclass
class TypeInfo:
"""Information about a type (structure, enum, error, config, plugin)."""
@@ -149,7 +152,7 @@ def _analyze_client_package(self, package_name: str) -> ClientInfo:
package = griffe.load(package_name)
# Ensure required modules exist
- required = ["client", "config", "models"]
+ required = ("client", "config", "models")
missing = [name for name in required if not package.modules.get(name)]
if missing:
raise ValueError(f"Missing required modules in {package_name}: {', '.join(missing)}")
@@ -292,12 +295,12 @@ def _extract_models(self, models_module: Module, operations: list[OperationInfo]
elif member.is_class:
structures.append(TypeInfo(name=member.name, module_path=member.path))
- duplicates = set()
+ duplicates = []
for structure in structures:
if self._is_operation_io_type(structure.name, operations) or self._is_union_member(
structure.name, unions
):
- duplicates.add(structure)
+ duplicates.append(structure)
structures = [struct for struct in structures if struct not in duplicates]
@@ -346,8 +349,7 @@ def _is_enum(self, member: Object | Alias) -> TypeGuard[Class]:
if not isinstance(member, Class):
return False
return any(
- isinstance(base, ExprName) and base.name in ("StrEnum", "IntEnum")
- for base in member.bases
+ isinstance(base, ExprName) and base.name in ENUM_BASE_CLASSES for base in member.bases
)
def _is_error(self, member: Object | Alias) -> TypeGuard[Class]:
@@ -355,8 +357,7 @@ def _is_error(self, member: Object | Alias) -> TypeGuard[Class]:
if not isinstance(member, Class):
return False
return any(
- isinstance(base, ExprName) and base.name in ("ServiceError", "ModeledError")
- for base in member.bases
+ isinstance(base, ExprName) and base.name in ERROR_BASE_CLASSES for base in member.bases
)
def _is_operation_io_type(self, type_name: str, operations: list[OperationInfo]) -> bool:
@@ -378,9 +379,7 @@ def _generate_client_docs(self, client_info: ClientInfo) -> bool:
client_info.models.structures, "structures", "Structure Class"
)
self._generate_type_stubs(client_info.models.errors, "errors", "Error Class")
- self._generate_type_stubs(
- client_info.models.enums, "enums", "Enum Class", ["members: true"]
- )
+ self._generate_type_stubs(client_info.models.enums, "enums", "Enum Class", members=True)
self._generate_union_stubs(client_info.models.unions)
except OSError as e:
logger.error(f"Failed to write documentation files: {e}")
@@ -389,19 +388,19 @@ def _generate_client_docs(self, client_info: ClientInfo) -> bool:
def _generate_index(self, client_info: ClientInfo) -> None:
"""Generate the main index.md file."""
- lines = []
- lines.append(f"# {self.service_name}")
- lines.append("")
- lines.append("## Client")
- lines.append("")
- lines.append(f"::: {client_info.module_path}")
- lines.append(" options:")
- lines.append(" members: false")
- lines.append(" heading_level: 3")
- lines.append(" merge_init_into_class: true")
- lines.append(" docstring_options:")
- lines.append(" ignore_init_summary: true")
- lines.append("")
+ lines = [
+ f"# {self.service_name}",
+ "",
+ "## Client",
+ "",
+ *self._mkdocs_directive(
+ client_info.module_path,
+ members=False,
+ merge_init_into_class=True,
+ ignore_init_summary=True,
+ ),
+ "",
+ ]
# Operations section
if client_info.operations:
@@ -412,18 +411,19 @@ def _generate_index(self, client_info: ClientInfo) -> None:
lines.append("")
# Configuration section
- lines.append("## Configuration")
- lines.append("")
- lines.append(f"::: {client_info.config.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
- lines.append(" merge_init_into_class: true")
- lines.append(" docstring_options:")
- lines.append(" ignore_init_summary: true")
- lines.append("")
- lines.append(f"::: {client_info.plugin.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
+ lines.extend(
+ [
+ "## Configuration",
+ "",
+ *self._mkdocs_directive(
+ client_info.config.module_path,
+ merge_init_into_class=True,
+ ignore_init_summary=True,
+ ),
+ "",
+ *self._mkdocs_directive(client_info.plugin.module_path),
+ ]
+ )
models = client_info.models
@@ -452,56 +452,59 @@ def _generate_index(self, client_info: ClientInfo) -> None:
def _generate_operation_stubs(self, operations: list[OperationInfo]) -> None:
"""Generate operation documentation files."""
for op in operations:
- lines = []
- lines.append(f"# {op.name}")
- lines.append("")
-
- # Operation section
- lines.append("## Operation")
- lines.append("")
- lines.append(f"::: {op.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
- lines.append("")
-
- # Input section
- lines.append("## Input")
- lines.append("")
- lines.append(f"::: {op.input.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
- lines.append("")
-
- # Output section - handle all stream types
- lines.append("## Output")
- lines.append("")
+ lines = [
+ f"# {op.name}",
+ "",
+ "## Operation",
+ "",
+ *self._mkdocs_directive(op.module_path),
+ "",
+ "## Input",
+ "",
+ *self._mkdocs_directive(op.input.module_path),
+ "",
+ "## Output",
+ "",
+ ]
if op.stream_type:
- lines.append(f"This operation returns {op.stream_type.description}.")
- lines.append("")
- lines.append("### Event Stream Structure")
- lines.append("")
+ lines.extend(
+ [
+ f"This operation returns {op.stream_type.description}.",
+ "",
+ "### Event Stream Structure",
+ "",
+ ]
+ )
if op.event_input_type:
- lines.append("#### Input Event Type")
- lines.append("")
- lines.append(f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)")
- lines.append("")
+ lines.extend(
+ [
+ "#### Input Event Type",
+ "",
+ f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)",
+ "",
+ ]
+ )
if op.event_output_type:
- lines.append("#### Output Event Type")
- lines.append("")
- lines.append(f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)")
- lines.append("")
+ lines.extend(
+ [
+ "#### Output Event Type",
+ "",
+ f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)",
+ "",
+ ]
+ )
- lines.append("### Initial Response Structure")
- lines.append("")
- lines.append(f"::: {op.output.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 4")
+ lines.extend(
+ [
+ "### Initial Response Structure",
+ "",
+ *self._mkdocs_directive(op.output.module_path, heading_level=4),
+ ]
+ )
else:
- lines.append(f"::: {op.output.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
+ lines.extend(self._mkdocs_directive(op.output.module_path))
output_path = self.output_dir / "operations" / f"{op.name}.md"
output_path.parent.mkdir(parents=True, exist_ok=True)
@@ -514,7 +517,7 @@ def _generate_type_stubs(
items: list[TypeInfo],
category: str,
section_title: str,
- extra_options: list[str] | None = None,
+ members: bool | None = None,
) -> None:
"""Generate documentation files for a category of types."""
for item in items:
@@ -522,12 +525,8 @@ def _generate_type_stubs(
f"# {item.name}",
"",
f"## {section_title}",
- f"::: {item.module_path}",
- " options:",
- " heading_level: 3",
+ *self._mkdocs_directive(item.module_path, members=members),
]
- if extra_options:
- lines.extend(f" {opt}" for opt in extra_options)
output_path = self.output_dir / category / f"{item.name}.md"
output_path.parent.mkdir(parents=True, exist_ok=True)
@@ -538,23 +537,20 @@ def _generate_type_stubs(
def _generate_union_stubs(self, unions: list[UnionInfo]) -> None:
"""Generate union documentation files."""
for union in unions:
- lines = []
- lines.append(f"# {union.name}")
- lines.append("")
- lines.append("## Union Type")
- lines.append(f"::: {union.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
- lines.append("")
+ lines = [
+ f"# {union.name}",
+ "",
+ "## Union Type",
+ *self._mkdocs_directive(union.module_path),
+ "",
+ ]
# Add union members
if union.members:
lines.append("## Union Member Types")
for member in union.members:
lines.append("")
- lines.append(f"::: {member.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
+ lines.extend(self._mkdocs_directive(member.module_path))
output_path = self.output_dir / "unions" / f"{union.name}.md"
output_path.parent.mkdir(parents=True, exist_ok=True)
@@ -562,6 +558,41 @@ def _generate_union_stubs(self, unions: list[UnionInfo]) -> None:
logger.info(f"Wrote {len(unions)} union files")
+ def _mkdocs_directive(
+ self,
+ module_path: str,
+ heading_level: int = 3,
+ members: bool | None = None,
+ merge_init_into_class: bool = False,
+ ignore_init_summary: bool = False,
+ ) -> list[str]:
+ """Generate mkdocstrings directive lines for a module path.
+
+ Args:
+ module_path: The Python module path for the directive.
+ heading_level: The heading level for rendered documentation.
+ members: Whether to show members (None omits the option).
+ merge_init_into_class: Whether to merge __init__ docstring into class docs.
+ ignore_init_summary: Whether to ignore init summary in docstrings.
+
+ Returns:
+ List of strings representing the mkdocstrings directive.
+ """
+ lines = [
+ f"::: {module_path}",
+ " options:",
+ f" heading_level: {heading_level}",
+ ]
+ if members is not None:
+ lines.append(f" members: {'true' if members else 'false'}")
+ if merge_init_into_class:
+ lines.append(" merge_init_into_class: true")
+ if ignore_init_summary:
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+
+ return lines
+
def _breadcrumb(self, category: str, name: str) -> str:
"""Generate a breadcrumb navigation element."""
separator = " > "
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/Makefile b/clients/aws-sdk-sagemaker-runtime-http2/Makefile
index 44f9df9..70404bb 100644
--- a/clients/aws-sdk-sagemaker-runtime-http2/Makefile
+++ b/clients/aws-sdk-sagemaker-runtime-http2/Makefile
@@ -22,4 +22,4 @@ docs: docs-generate
docs-serve:
@[ -d site ] || $(MAKE) docs
- uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
\ No newline at end of file
+ uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/mkdocs.yml b/clients/aws-sdk-sagemaker-runtime-http2/mkdocs.yml
index aeb4186..02d1f13 100644
--- a/clients/aws-sdk-sagemaker-runtime-http2/mkdocs.yml
+++ b/clients/aws-sdk-sagemaker-runtime-http2/mkdocs.yml
@@ -44,34 +44,34 @@ theme:
- content.code.copy
plugins:
-- search
-- mkdocstrings:
- handlers:
- python:
- options:
- show_source: false
- show_signature: true
- show_signature_annotations: true
- show_root_heading: true
- show_root_full_path: false
- show_object_full_path: false
- show_symbol_type_heading: true
- show_symbol_type_toc: true
- show_if_no_docstring: true
- show_category_heading: true
- group_by_category: true
- separate_signature: true
- signature_crossrefs: true
- filters:
- - "!^_"
- - "!^deserialize"
- - "!^serialize"
+ - search
+ - mkdocstrings:
+ handlers:
+ python:
+ options:
+ show_source: false
+ show_signature: true
+ show_signature_annotations: true
+ show_root_heading: true
+ show_root_full_path: false
+ show_object_full_path: false
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ show_if_no_docstring: true
+ show_category_heading: true
+ group_by_category: true
+ separate_signature: true
+ signature_crossrefs: true
+ filters:
+ - "!^_"
+ - "!^deserialize"
+ - "!^serialize"
markdown_extensions:
- pymdownx.highlight
- pymdownx.inlinehilite
- pymdownx.snippets:
- check_paths: true
+ check_paths: true
- pymdownx.superfences
- admonition
- def_list
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml b/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml
index f0ba4c0..29e24f8 100644
--- a/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml
+++ b/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml
@@ -33,8 +33,9 @@ test = [
"pytest>=7.2.0,<8.0.0",
"pytest-asyncio>=0.20.3,<0.21.0"
]
+
docs = [
- "mkdocs~=1.6.1",
+ "mkdocs==1.6.1",
"mkdocs-material==9.7.0",
"mkdocstrings[python]==1.0.0"
]
@@ -62,4 +63,5 @@ ignore = ["F841"]
skip-magic-trailing-comma = true
[tool.pytest.ini_options]
-# python_classes = ["!Test"]
+python_classes = ["!Test"]
+asyncio_mode = "auto"
diff --git a/clients/aws-sdk-sagemaker-runtime-http2/scripts/docs/generate_doc_stubs.py b/clients/aws-sdk-sagemaker-runtime-http2/scripts/docs/generate_doc_stubs.py
index 056708a..e73fd4c 100644
--- a/clients/aws-sdk-sagemaker-runtime-http2/scripts/docs/generate_doc_stubs.py
+++ b/clients/aws-sdk-sagemaker-runtime-http2/scripts/docs/generate_doc_stubs.py
@@ -37,6 +37,9 @@
)
logger = logging.getLogger("generate_doc_stubs")
+ENUM_BASE_CLASSES = ("StrEnum", "IntEnum")
+ERROR_BASE_CLASSES = ("ServiceError", "ModeledError")
+
class StreamType(Enum):
"""Type of event stream for operations."""
@@ -56,7 +59,7 @@ def description(self) -> str:
return descriptions[self]
-@dataclass(frozen=True)
+@dataclass
class TypeInfo:
"""Information about a type (structure, enum, error, config, plugin)."""
@@ -149,7 +152,7 @@ def _analyze_client_package(self, package_name: str) -> ClientInfo:
package = griffe.load(package_name)
# Ensure required modules exist
- required = ["client", "config", "models"]
+ required = ("client", "config", "models")
missing = [name for name in required if not package.modules.get(name)]
if missing:
raise ValueError(f"Missing required modules in {package_name}: {', '.join(missing)}")
@@ -292,12 +295,12 @@ def _extract_models(self, models_module: Module, operations: list[OperationInfo]
elif member.is_class:
structures.append(TypeInfo(name=member.name, module_path=member.path))
- duplicates = set()
+ duplicates = []
for structure in structures:
if self._is_operation_io_type(structure.name, operations) or self._is_union_member(
structure.name, unions
):
- duplicates.add(structure)
+ duplicates.append(structure)
structures = [struct for struct in structures if struct not in duplicates]
@@ -346,8 +349,7 @@ def _is_enum(self, member: Object | Alias) -> TypeGuard[Class]:
if not isinstance(member, Class):
return False
return any(
- isinstance(base, ExprName) and base.name in ("StrEnum", "IntEnum")
- for base in member.bases
+ isinstance(base, ExprName) and base.name in ENUM_BASE_CLASSES for base in member.bases
)
def _is_error(self, member: Object | Alias) -> TypeGuard[Class]:
@@ -355,8 +357,7 @@ def _is_error(self, member: Object | Alias) -> TypeGuard[Class]:
if not isinstance(member, Class):
return False
return any(
- isinstance(base, ExprName) and base.name in ("ServiceError", "ModeledError")
- for base in member.bases
+ isinstance(base, ExprName) and base.name in ERROR_BASE_CLASSES for base in member.bases
)
def _is_operation_io_type(self, type_name: str, operations: list[OperationInfo]) -> bool:
@@ -378,9 +379,7 @@ def _generate_client_docs(self, client_info: ClientInfo) -> bool:
client_info.models.structures, "structures", "Structure Class"
)
self._generate_type_stubs(client_info.models.errors, "errors", "Error Class")
- self._generate_type_stubs(
- client_info.models.enums, "enums", "Enum Class", ["members: true"]
- )
+ self._generate_type_stubs(client_info.models.enums, "enums", "Enum Class", members=True)
self._generate_union_stubs(client_info.models.unions)
except OSError as e:
logger.error(f"Failed to write documentation files: {e}")
@@ -389,19 +388,19 @@ def _generate_client_docs(self, client_info: ClientInfo) -> bool:
def _generate_index(self, client_info: ClientInfo) -> None:
"""Generate the main index.md file."""
- lines = []
- lines.append(f"# {self.service_name}")
- lines.append("")
- lines.append("## Client")
- lines.append("")
- lines.append(f"::: {client_info.module_path}")
- lines.append(" options:")
- lines.append(" members: false")
- lines.append(" heading_level: 3")
- lines.append(" merge_init_into_class: true")
- lines.append(" docstring_options:")
- lines.append(" ignore_init_summary: true")
- lines.append("")
+ lines = [
+ f"# {self.service_name}",
+ "",
+ "## Client",
+ "",
+ *self._mkdocs_directive(
+ client_info.module_path,
+ members=False,
+ merge_init_into_class=True,
+ ignore_init_summary=True,
+ ),
+ "",
+ ]
# Operations section
if client_info.operations:
@@ -412,18 +411,19 @@ def _generate_index(self, client_info: ClientInfo) -> None:
lines.append("")
# Configuration section
- lines.append("## Configuration")
- lines.append("")
- lines.append(f"::: {client_info.config.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
- lines.append(" merge_init_into_class: true")
- lines.append(" docstring_options:")
- lines.append(" ignore_init_summary: true")
- lines.append("")
- lines.append(f"::: {client_info.plugin.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
+ lines.extend(
+ [
+ "## Configuration",
+ "",
+ *self._mkdocs_directive(
+ client_info.config.module_path,
+ merge_init_into_class=True,
+ ignore_init_summary=True,
+ ),
+ "",
+ *self._mkdocs_directive(client_info.plugin.module_path),
+ ]
+ )
models = client_info.models
@@ -452,56 +452,59 @@ def _generate_index(self, client_info: ClientInfo) -> None:
def _generate_operation_stubs(self, operations: list[OperationInfo]) -> None:
"""Generate operation documentation files."""
for op in operations:
- lines = []
- lines.append(f"# {op.name}")
- lines.append("")
-
- # Operation section
- lines.append("## Operation")
- lines.append("")
- lines.append(f"::: {op.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
- lines.append("")
-
- # Input section
- lines.append("## Input")
- lines.append("")
- lines.append(f"::: {op.input.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
- lines.append("")
-
- # Output section - handle all stream types
- lines.append("## Output")
- lines.append("")
+ lines = [
+ f"# {op.name}",
+ "",
+ "## Operation",
+ "",
+ *self._mkdocs_directive(op.module_path),
+ "",
+ "## Input",
+ "",
+ *self._mkdocs_directive(op.input.module_path),
+ "",
+ "## Output",
+ "",
+ ]
if op.stream_type:
- lines.append(f"This operation returns {op.stream_type.description}.")
- lines.append("")
- lines.append("### Event Stream Structure")
- lines.append("")
+ lines.extend(
+ [
+ f"This operation returns {op.stream_type.description}.",
+ "",
+ "### Event Stream Structure",
+ "",
+ ]
+ )
if op.event_input_type:
- lines.append("#### Input Event Type")
- lines.append("")
- lines.append(f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)")
- lines.append("")
+ lines.extend(
+ [
+ "#### Input Event Type",
+ "",
+ f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)",
+ "",
+ ]
+ )
if op.event_output_type:
- lines.append("#### Output Event Type")
- lines.append("")
- lines.append(f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)")
- lines.append("")
+ lines.extend(
+ [
+ "#### Output Event Type",
+ "",
+ f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)",
+ "",
+ ]
+ )
- lines.append("### Initial Response Structure")
- lines.append("")
- lines.append(f"::: {op.output.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 4")
+ lines.extend(
+ [
+ "### Initial Response Structure",
+ "",
+ *self._mkdocs_directive(op.output.module_path, heading_level=4),
+ ]
+ )
else:
- lines.append(f"::: {op.output.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
+ lines.extend(self._mkdocs_directive(op.output.module_path))
output_path = self.output_dir / "operations" / f"{op.name}.md"
output_path.parent.mkdir(parents=True, exist_ok=True)
@@ -514,7 +517,7 @@ def _generate_type_stubs(
items: list[TypeInfo],
category: str,
section_title: str,
- extra_options: list[str] | None = None,
+ members: bool | None = None,
) -> None:
"""Generate documentation files for a category of types."""
for item in items:
@@ -522,12 +525,8 @@ def _generate_type_stubs(
f"# {item.name}",
"",
f"## {section_title}",
- f"::: {item.module_path}",
- " options:",
- " heading_level: 3",
+ *self._mkdocs_directive(item.module_path, members=members),
]
- if extra_options:
- lines.extend(f" {opt}" for opt in extra_options)
output_path = self.output_dir / category / f"{item.name}.md"
output_path.parent.mkdir(parents=True, exist_ok=True)
@@ -538,23 +537,20 @@ def _generate_type_stubs(
def _generate_union_stubs(self, unions: list[UnionInfo]) -> None:
"""Generate union documentation files."""
for union in unions:
- lines = []
- lines.append(f"# {union.name}")
- lines.append("")
- lines.append("## Union Type")
- lines.append(f"::: {union.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
- lines.append("")
+ lines = [
+ f"# {union.name}",
+ "",
+ "## Union Type",
+ *self._mkdocs_directive(union.module_path),
+ "",
+ ]
# Add union members
if union.members:
lines.append("## Union Member Types")
for member in union.members:
lines.append("")
- lines.append(f"::: {member.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
+ lines.extend(self._mkdocs_directive(member.module_path))
output_path = self.output_dir / "unions" / f"{union.name}.md"
output_path.parent.mkdir(parents=True, exist_ok=True)
@@ -562,6 +558,41 @@ def _generate_union_stubs(self, unions: list[UnionInfo]) -> None:
logger.info(f"Wrote {len(unions)} union files")
+ def _mkdocs_directive(
+ self,
+ module_path: str,
+ heading_level: int = 3,
+ members: bool | None = None,
+ merge_init_into_class: bool = False,
+ ignore_init_summary: bool = False,
+ ) -> list[str]:
+ """Generate mkdocstrings directive lines for a module path.
+
+ Args:
+ module_path: The Python module path for the directive.
+ heading_level: The heading level for rendered documentation.
+ members: Whether to show members (None omits the option).
+ merge_init_into_class: Whether to merge __init__ docstring into class docs.
+ ignore_init_summary: Whether to ignore init summary in docstrings.
+
+ Returns:
+ List of strings representing the mkdocstrings directive.
+ """
+ lines = [
+ f"::: {module_path}",
+ " options:",
+ f" heading_level: {heading_level}",
+ ]
+ if members is not None:
+ lines.append(f" members: {'true' if members else 'false'}")
+ if merge_init_into_class:
+ lines.append(" merge_init_into_class: true")
+ if ignore_init_summary:
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+
+ return lines
+
def _breadcrumb(self, category: str, name: str) -> str:
"""Generate a breadcrumb navigation element."""
separator = " > "
diff --git a/clients/aws-sdk-transcribe-streaming/Makefile b/clients/aws-sdk-transcribe-streaming/Makefile
index 016d87e..90d0af6 100644
--- a/clients/aws-sdk-transcribe-streaming/Makefile
+++ b/clients/aws-sdk-transcribe-streaming/Makefile
@@ -22,4 +22,4 @@ docs: docs-generate
docs-serve:
@[ -d site ] || $(MAKE) docs
- uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
\ No newline at end of file
+ uv run python -m http.server $(DOCS_PORT) --bind 127.0.0.1 --directory site
diff --git a/clients/aws-sdk-transcribe-streaming/mkdocs.yml b/clients/aws-sdk-transcribe-streaming/mkdocs.yml
index 3787400..2a11b96 100644
--- a/clients/aws-sdk-transcribe-streaming/mkdocs.yml
+++ b/clients/aws-sdk-transcribe-streaming/mkdocs.yml
@@ -44,34 +44,34 @@ theme:
- content.code.copy
plugins:
-- search
-- mkdocstrings:
- handlers:
- python:
- options:
- show_source: false
- show_signature: true
- show_signature_annotations: true
- show_root_heading: true
- show_root_full_path: false
- show_object_full_path: false
- show_symbol_type_heading: true
- show_symbol_type_toc: true
- show_if_no_docstring: true
- show_category_heading: true
- group_by_category: true
- separate_signature: true
- signature_crossrefs: true
- filters:
- - "!^_"
- - "!^deserialize"
- - "!^serialize"
+ - search
+ - mkdocstrings:
+ handlers:
+ python:
+ options:
+ show_source: false
+ show_signature: true
+ show_signature_annotations: true
+ show_root_heading: true
+ show_root_full_path: false
+ show_object_full_path: false
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ show_if_no_docstring: true
+ show_category_heading: true
+ group_by_category: true
+ separate_signature: true
+ signature_crossrefs: true
+ filters:
+ - "!^_"
+ - "!^deserialize"
+ - "!^serialize"
markdown_extensions:
- pymdownx.highlight
- pymdownx.inlinehilite
- pymdownx.snippets:
- check_paths: true
+ check_paths: true
- pymdownx.superfences
- admonition
- def_list
diff --git a/clients/aws-sdk-transcribe-streaming/pyproject.toml b/clients/aws-sdk-transcribe-streaming/pyproject.toml
index 71b62f6..084de1a 100644
--- a/clients/aws-sdk-transcribe-streaming/pyproject.toml
+++ b/clients/aws-sdk-transcribe-streaming/pyproject.toml
@@ -35,7 +35,7 @@ test = [
]
docs = [
- "mkdocs~=1.6.1",
+ "mkdocs==1.6.1",
"mkdocs-material==9.7.0",
"mkdocstrings[python]==1.0.0"
]
diff --git a/clients/aws-sdk-transcribe-streaming/scripts/docs/generate_doc_stubs.py b/clients/aws-sdk-transcribe-streaming/scripts/docs/generate_doc_stubs.py
index 056708a..e73fd4c 100644
--- a/clients/aws-sdk-transcribe-streaming/scripts/docs/generate_doc_stubs.py
+++ b/clients/aws-sdk-transcribe-streaming/scripts/docs/generate_doc_stubs.py
@@ -37,6 +37,9 @@
)
logger = logging.getLogger("generate_doc_stubs")
+ENUM_BASE_CLASSES = ("StrEnum", "IntEnum")
+ERROR_BASE_CLASSES = ("ServiceError", "ModeledError")
+
class StreamType(Enum):
"""Type of event stream for operations."""
@@ -56,7 +59,7 @@ def description(self) -> str:
return descriptions[self]
-@dataclass(frozen=True)
+@dataclass
class TypeInfo:
"""Information about a type (structure, enum, error, config, plugin)."""
@@ -149,7 +152,7 @@ def _analyze_client_package(self, package_name: str) -> ClientInfo:
package = griffe.load(package_name)
# Ensure required modules exist
- required = ["client", "config", "models"]
+ required = ("client", "config", "models")
missing = [name for name in required if not package.modules.get(name)]
if missing:
raise ValueError(f"Missing required modules in {package_name}: {', '.join(missing)}")
@@ -292,12 +295,12 @@ def _extract_models(self, models_module: Module, operations: list[OperationInfo]
elif member.is_class:
structures.append(TypeInfo(name=member.name, module_path=member.path))
- duplicates = set()
+ duplicates = []
for structure in structures:
if self._is_operation_io_type(structure.name, operations) or self._is_union_member(
structure.name, unions
):
- duplicates.add(structure)
+ duplicates.append(structure)
structures = [struct for struct in structures if struct not in duplicates]
@@ -346,8 +349,7 @@ def _is_enum(self, member: Object | Alias) -> TypeGuard[Class]:
if not isinstance(member, Class):
return False
return any(
- isinstance(base, ExprName) and base.name in ("StrEnum", "IntEnum")
- for base in member.bases
+ isinstance(base, ExprName) and base.name in ENUM_BASE_CLASSES for base in member.bases
)
def _is_error(self, member: Object | Alias) -> TypeGuard[Class]:
@@ -355,8 +357,7 @@ def _is_error(self, member: Object | Alias) -> TypeGuard[Class]:
if not isinstance(member, Class):
return False
return any(
- isinstance(base, ExprName) and base.name in ("ServiceError", "ModeledError")
- for base in member.bases
+ isinstance(base, ExprName) and base.name in ERROR_BASE_CLASSES for base in member.bases
)
def _is_operation_io_type(self, type_name: str, operations: list[OperationInfo]) -> bool:
@@ -378,9 +379,7 @@ def _generate_client_docs(self, client_info: ClientInfo) -> bool:
client_info.models.structures, "structures", "Structure Class"
)
self._generate_type_stubs(client_info.models.errors, "errors", "Error Class")
- self._generate_type_stubs(
- client_info.models.enums, "enums", "Enum Class", ["members: true"]
- )
+ self._generate_type_stubs(client_info.models.enums, "enums", "Enum Class", members=True)
self._generate_union_stubs(client_info.models.unions)
except OSError as e:
logger.error(f"Failed to write documentation files: {e}")
@@ -389,19 +388,19 @@ def _generate_client_docs(self, client_info: ClientInfo) -> bool:
def _generate_index(self, client_info: ClientInfo) -> None:
"""Generate the main index.md file."""
- lines = []
- lines.append(f"# {self.service_name}")
- lines.append("")
- lines.append("## Client")
- lines.append("")
- lines.append(f"::: {client_info.module_path}")
- lines.append(" options:")
- lines.append(" members: false")
- lines.append(" heading_level: 3")
- lines.append(" merge_init_into_class: true")
- lines.append(" docstring_options:")
- lines.append(" ignore_init_summary: true")
- lines.append("")
+ lines = [
+ f"# {self.service_name}",
+ "",
+ "## Client",
+ "",
+ *self._mkdocs_directive(
+ client_info.module_path,
+ members=False,
+ merge_init_into_class=True,
+ ignore_init_summary=True,
+ ),
+ "",
+ ]
# Operations section
if client_info.operations:
@@ -412,18 +411,19 @@ def _generate_index(self, client_info: ClientInfo) -> None:
lines.append("")
# Configuration section
- lines.append("## Configuration")
- lines.append("")
- lines.append(f"::: {client_info.config.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
- lines.append(" merge_init_into_class: true")
- lines.append(" docstring_options:")
- lines.append(" ignore_init_summary: true")
- lines.append("")
- lines.append(f"::: {client_info.plugin.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
+ lines.extend(
+ [
+ "## Configuration",
+ "",
+ *self._mkdocs_directive(
+ client_info.config.module_path,
+ merge_init_into_class=True,
+ ignore_init_summary=True,
+ ),
+ "",
+ *self._mkdocs_directive(client_info.plugin.module_path),
+ ]
+ )
models = client_info.models
@@ -452,56 +452,59 @@ def _generate_index(self, client_info: ClientInfo) -> None:
def _generate_operation_stubs(self, operations: list[OperationInfo]) -> None:
"""Generate operation documentation files."""
for op in operations:
- lines = []
- lines.append(f"# {op.name}")
- lines.append("")
-
- # Operation section
- lines.append("## Operation")
- lines.append("")
- lines.append(f"::: {op.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
- lines.append("")
-
- # Input section
- lines.append("## Input")
- lines.append("")
- lines.append(f"::: {op.input.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
- lines.append("")
-
- # Output section - handle all stream types
- lines.append("## Output")
- lines.append("")
+ lines = [
+ f"# {op.name}",
+ "",
+ "## Operation",
+ "",
+ *self._mkdocs_directive(op.module_path),
+ "",
+ "## Input",
+ "",
+ *self._mkdocs_directive(op.input.module_path),
+ "",
+ "## Output",
+ "",
+ ]
if op.stream_type:
- lines.append(f"This operation returns {op.stream_type.description}.")
- lines.append("")
- lines.append("### Event Stream Structure")
- lines.append("")
+ lines.extend(
+ [
+ f"This operation returns {op.stream_type.description}.",
+ "",
+ "### Event Stream Structure",
+ "",
+ ]
+ )
if op.event_input_type:
- lines.append("#### Input Event Type")
- lines.append("")
- lines.append(f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)")
- lines.append("")
+ lines.extend(
+ [
+ "#### Input Event Type",
+ "",
+ f"[`{op.event_input_type}`](../unions/{op.event_input_type}.md)",
+ "",
+ ]
+ )
if op.event_output_type:
- lines.append("#### Output Event Type")
- lines.append("")
- lines.append(f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)")
- lines.append("")
+ lines.extend(
+ [
+ "#### Output Event Type",
+ "",
+ f"[`{op.event_output_type}`](../unions/{op.event_output_type}.md)",
+ "",
+ ]
+ )
- lines.append("### Initial Response Structure")
- lines.append("")
- lines.append(f"::: {op.output.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 4")
+ lines.extend(
+ [
+ "### Initial Response Structure",
+ "",
+ *self._mkdocs_directive(op.output.module_path, heading_level=4),
+ ]
+ )
else:
- lines.append(f"::: {op.output.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
+ lines.extend(self._mkdocs_directive(op.output.module_path))
output_path = self.output_dir / "operations" / f"{op.name}.md"
output_path.parent.mkdir(parents=True, exist_ok=True)
@@ -514,7 +517,7 @@ def _generate_type_stubs(
items: list[TypeInfo],
category: str,
section_title: str,
- extra_options: list[str] | None = None,
+ members: bool | None = None,
) -> None:
"""Generate documentation files for a category of types."""
for item in items:
@@ -522,12 +525,8 @@ def _generate_type_stubs(
f"# {item.name}",
"",
f"## {section_title}",
- f"::: {item.module_path}",
- " options:",
- " heading_level: 3",
+ *self._mkdocs_directive(item.module_path, members=members),
]
- if extra_options:
- lines.extend(f" {opt}" for opt in extra_options)
output_path = self.output_dir / category / f"{item.name}.md"
output_path.parent.mkdir(parents=True, exist_ok=True)
@@ -538,23 +537,20 @@ def _generate_type_stubs(
def _generate_union_stubs(self, unions: list[UnionInfo]) -> None:
"""Generate union documentation files."""
for union in unions:
- lines = []
- lines.append(f"# {union.name}")
- lines.append("")
- lines.append("## Union Type")
- lines.append(f"::: {union.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
- lines.append("")
+ lines = [
+ f"# {union.name}",
+ "",
+ "## Union Type",
+ *self._mkdocs_directive(union.module_path),
+ "",
+ ]
# Add union members
if union.members:
lines.append("## Union Member Types")
for member in union.members:
lines.append("")
- lines.append(f"::: {member.module_path}")
- lines.append(" options:")
- lines.append(" heading_level: 3")
+ lines.extend(self._mkdocs_directive(member.module_path))
output_path = self.output_dir / "unions" / f"{union.name}.md"
output_path.parent.mkdir(parents=True, exist_ok=True)
@@ -562,6 +558,41 @@ def _generate_union_stubs(self, unions: list[UnionInfo]) -> None:
logger.info(f"Wrote {len(unions)} union files")
+ def _mkdocs_directive(
+ self,
+ module_path: str,
+ heading_level: int = 3,
+ members: bool | None = None,
+ merge_init_into_class: bool = False,
+ ignore_init_summary: bool = False,
+ ) -> list[str]:
+ """Generate mkdocstrings directive lines for a module path.
+
+ Args:
+ module_path: The Python module path for the directive.
+ heading_level: The heading level for rendered documentation.
+ members: Whether to show members (None omits the option).
+ merge_init_into_class: Whether to merge __init__ docstring into class docs.
+ ignore_init_summary: Whether to ignore init summary in docstrings.
+
+ Returns:
+ List of strings representing the mkdocstrings directive.
+ """
+ lines = [
+ f"::: {module_path}",
+ " options:",
+ f" heading_level: {heading_level}",
+ ]
+ if members is not None:
+ lines.append(f" members: {'true' if members else 'false'}")
+ if merge_init_into_class:
+ lines.append(" merge_init_into_class: true")
+ if ignore_init_summary:
+ lines.append(" docstring_options:")
+ lines.append(" ignore_init_summary: true")
+
+ return lines
+
def _breadcrumb(self, category: str, name: str) -> str:
"""Generate a breadcrumb navigation element."""
separator = " > "
diff --git a/docs/contributing.md b/docs/contributing.md
index e079654..ea38c9b 100644
--- a/docs/contributing.md
+++ b/docs/contributing.md
@@ -1 +1 @@
---8<-- "CONTRIBUTING.md"
\ No newline at end of file
+--8<-- "CONTRIBUTING.md"
diff --git a/docs/index.md b/docs/index.md
index 0f88098..31cb0f2 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -1,3 +1,3 @@
# AWS SDK for Python
---8<-- "README.md:2"
\ No newline at end of file
+--8<-- "README.md:2"
diff --git a/docs/javascript/nav-expand.js b/docs/javascript/nav-expand.js
index 1984a62..54832c2 100644
--- a/docs/javascript/nav-expand.js
+++ b/docs/javascript/nav-expand.js
@@ -26,4 +26,4 @@ function expandClientsNav() {
// Subscribe to Material's document$ observable for instant navigation support
document$.subscribe(expandClientsNav);
// Also run on initial page load
-expandClientsNav();
\ No newline at end of file
+expandClientsNav();
diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css
index 3df1023..4e555dd 100644
--- a/docs/stylesheets/extra.css
+++ b/docs/stylesheets/extra.css
@@ -11,4 +11,4 @@ p:has(span.breadcrumb) {
/* Light mode - use dark logo */
[data-md-color-scheme="default"] .md-header__button.md-logo img {
content: url('../assets/aws-logo-dark.svg');
-}
\ No newline at end of file
+}
diff --git a/mkdocs.yml b/mkdocs.yml
index 05784d2..5a99545 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -1,12 +1,9 @@
site_name: AWS SDK for Python
site_description: Documentation for AWS SDK for Python Clients
-repo_name: awslabs/aws-sdk-pythons
+repo_name: awslabs/aws-sdk-python
repo_url: https://github.com/awslabs/aws-sdk-python
-exclude_docs: |
- README.md
-
hooks:
- docs/hooks/copyright.py
@@ -98,4 +95,4 @@ extra_css:
validation:
nav:
- omitted_files: ignore
\ No newline at end of file
+ omitted_files: ignore
diff --git a/requirements-docs.in b/requirements-docs.in
index 9d00568..655a4bf 100644
--- a/requirements-docs.in
+++ b/requirements-docs.in
@@ -1,4 +1,4 @@
mkdocs==1.6.1
mkdocstrings[python]==1.0.0
mkdocs-material==9.7.0
-mkdocs-literate-nav==0.6.1
\ No newline at end of file
+mkdocs-literate-nav==0.6.1
diff --git a/scripts/docs/generate_all_doc_stubs.py b/scripts/docs/generate_all_doc_stubs.py
index d16e628..3d45074 100644
--- a/scripts/docs/generate_all_doc_stubs.py
+++ b/scripts/docs/generate_all_doc_stubs.py
@@ -24,6 +24,7 @@
DEFAULT_CPU_COUNT = 1
+
@dataclass
class ClientInfo:
"""Information about a client for documentation generation."""
@@ -76,7 +77,9 @@ def generate_all_doc_stubs(clients: list[ClientInfo], docs_dir: Path) -> bool:
top_level_docs = docs_dir / "clients"
max_workers = os.cpu_count() or DEFAULT_CPU_COUNT
- logger.info(f"Generating doc stubs for {len(clients)} clients using {max_workers} workers...")
+ logger.info(
+ f"Generating doc stubs for {len(clients)} clients using {max_workers} workers..."
+ )
with ProcessPoolExecutor(max_workers=max_workers) as executor:
futures = {
@@ -105,7 +108,9 @@ def generate_all_doc_stubs(clients: list[ClientInfo], docs_dir: Path) -> bool:
return True
-def _generate_doc_stub(client_dir: Path, service_name: str, output_dir: Path) -> tuple[str, bool]:
+def _generate_doc_stub(
+ client_dir: Path, service_name: str, output_dir: Path
+) -> tuple[str, bool]:
"""
Generate doc stubs for a single client.
@@ -154,22 +159,26 @@ def generate_clients_index(clients: list[ClientInfo], docs_dir: Path) -> bool:
grouped[letter].append(client)
# Tab for all services
- lines.append("=== \"All\"")
+ lines.append('=== "All"')
lines.append("")
lines.append(" | Service | Package Name |")
lines.append(" |----------|--------------|")
for client in clients:
- lines.append(f" | **[{client.service_name}]({client.path_name}/index.md)** | `{client.package_name}` |")
+ lines.append(
+ f" | **[{client.service_name}]({client.path_name}/index.md)** | `{client.package_name}` |"
+ )
lines.append("")
# Individual letter tabs
for letter in sorted(grouped.keys()):
- lines.append(f"=== \"{letter}\"")
+ lines.append(f'=== "{letter}"')
lines.append("")
lines.append(" | Service | Package Name |")
lines.append(" |----------|--------------|")
for client in grouped[letter]:
- lines.append(f" | **[{client.service_name}]({client.path_name}/index.md)** | `{client.package_name}` |")
+ lines.append(
+ f" | **[{client.service_name}]({client.path_name}/index.md)** | `{client.package_name}` |"
+ )
lines.append("")
index_path = docs_dir / "clients" / "index.md"
diff --git a/scripts/docs/generate_nav.py b/scripts/docs/generate_nav.py
index 3cd600b..2de9b4d 100644
--- a/scripts/docs/generate_nav.py
+++ b/scripts/docs/generate_nav.py
@@ -88,4 +88,4 @@ def main() -> int:
if __name__ == "__main__":
- sys.exit(main())
\ No newline at end of file
+ sys.exit(main())