From f6fe0bd22758d940d93508cc4966cf1e5d724529 Mon Sep 17 00:00:00 2001 From: Scott Anderson Date: Thu, 8 Jan 2026 11:57:58 +0000 Subject: [PATCH 1/2] feat: Add async support (PoC) with shared operation builders - Implement AsyncSmartsheet client with async/await support - Add async versions of Sheets.add_rows() and Workspaces.list_workspaces() - Create shared operation builders to eliminate code duplication - Refactor sync classes to use shared operations - Add comprehensive async tests including async framework integration tests - Add documentation: async-quickstart.md, ASYNC_MIGRATION.md, examples - Resolves event loop blocking issues in async frameworks PoC scope: Foundation + add_rows + list_workspaces Dependencies: httpx>=0.27.0, pytest-asyncio>=0.21.0 --- .markdownlint.json | 2 +- ASYNC_MIGRATION.md | 795 +++++++++++++++ README.md | 75 +- docs-source/async-design.md | 964 ++++++++++++++++++ docs-source/async-quickstart.md | 634 ++++++++++++ examples/async_examples.py | 570 +++++++++++ pyproject.toml | 5 +- smartsheet/__init__.py | 1 + smartsheet/async_session.py | 140 +++ smartsheet/async_sheets.py | 135 +++ smartsheet/async_smartsheet.py | 693 +++++++++++++ smartsheet/async_workspaces.py | 152 +++ smartsheet/operations/__init__.py | 60 ++ smartsheet/operations/sheets_operations.py | 112 ++ .../operations/workspaces_operations.py | 161 +++ smartsheet/sheets.py | 14 +- smartsheet/workspaces.py | 49 +- tests/README_ASYNC_TESTS.md | 345 +++++++ tests/test_async_framework_integration.py | 540 ++++++++++ tests/test_async_poc.py | 600 +++++++++++ 20 files changed, 5989 insertions(+), 58 deletions(-) create mode 100644 ASYNC_MIGRATION.md create mode 100644 docs-source/async-design.md create mode 100644 docs-source/async-quickstart.md create mode 100644 examples/async_examples.py create mode 100644 smartsheet/async_session.py create mode 100644 smartsheet/async_sheets.py create mode 100644 smartsheet/async_smartsheet.py create mode 100644 smartsheet/async_workspaces.py create mode 100644 smartsheet/operations/__init__.py create mode 100644 smartsheet/operations/sheets_operations.py create mode 100644 smartsheet/operations/workspaces_operations.py create mode 100644 tests/README_ASYNC_TESTS.md create mode 100644 tests/test_async_framework_integration.py create mode 100644 tests/test_async_poc.py diff --git a/.markdownlint.json b/.markdownlint.json index f0925a33..6c227252 100644 --- a/.markdownlint.json +++ b/.markdownlint.json @@ -3,4 +3,4 @@ "no-duplicate-heading": { "siblings_only": true } -} \ No newline at end of file +} diff --git a/ASYNC_MIGRATION.md b/ASYNC_MIGRATION.md new file mode 100644 index 00000000..98ecd51f --- /dev/null +++ b/ASYNC_MIGRATION.md @@ -0,0 +1,795 @@ +# Async Migration Guide + +This guide helps you migrate from the synchronous Smartsheet Python SDK to the new async implementation. Whether you're building a new async application or converting existing code, this guide provides practical examples and best practices. + +## Table of Contents + +- [Why Migrate to Async?](#why-migrate-to-async) +- [Prerequisites](#prerequisites) +- [Quick Migration Checklist](#quick-migration-checklist) +- [Side-by-Side Comparisons](#side-by-side-comparisons) +- [Common Patterns](#common-patterns) +- [Performance Considerations](#performance-considerations) +- [Troubleshooting](#troubleshooting) +- [Best Practices](#best-practices) + +## Why Migrate to Async? + +Consider migrating to async if your application: + +- **Makes multiple concurrent API calls** - Async can significantly reduce total execution time +- **Integrates with async frameworks** - FastAPI, aiohttp, and other async services +- **Handles high throughput** - Process many sheets or workspaces efficiently +- **Needs non-blocking I/O** - Keep your application responsive during API calls +- **Runs in an async environment** - Already using asyncio event loop + +**Don't migrate if:** + +- Your application makes sequential API calls only +- You're satisfied with current performance +- Your codebase doesn't support async/await patterns +- You're not familiar with async Python programming + +## Prerequisites + +### 1. Install Required Dependencies + +```bash +pip install smartsheet-python-sdk httpx +``` + +### 2. Python Version + +Async support requires Python 3.7 or higher (same as the sync SDK). + +### 3. Understanding Async/Await + +Familiarize yourself with Python's async/await syntax: + +- [Python asyncio documentation](https://docs.python.org/3/library/asyncio.html) +- [Real Python async tutorial](https://realpython.com/async-io-python/) + +## Quick Migration Checklist + +Use this checklist when converting code: + +- [ ] Install `httpx` dependency +- [ ] Change `import smartsheet` to `from smartsheet import AsyncSmartsheet` +- [ ] Change `Smartsheet()` to `AsyncSmartsheet()` +- [ ] Add `async` keyword to function definitions +- [ ] Add `await` before all API method calls +- [ ] Change `with` to `async with` for context managers +- [ ] Replace `client.close()` with `await client.aclose()` +- [ ] Wrap execution in `asyncio.run()` if running from main script +- [ ] Update error handling if needed +- [ ] Test thoroughly with your use cases + +## Side-by-Side Comparisons + +### Basic Client Usage + +#### Synchronous (Before) + +```python +import smartsheet + +# Create client +client = smartsheet.Smartsheet(access_token="your_token") + +# Use client +workspaces = client.Workspaces.list_workspaces() +print(f"Found {len(workspaces.data)} workspaces") + +# No explicit cleanup needed +``` + +#### Asynchronous (After) + +```python +import asyncio +from smartsheet import AsyncSmartsheet + +async def main(): + # Create client with context manager + async with AsyncSmartsheet(access_token="your_token") as client: + # Use client with await + workspaces = await client.Workspaces.list_workspaces() + print(f"Found {len(workspaces.data)} workspaces") + # Automatic cleanup + +# Run async function +asyncio.run(main()) +``` + +### Listing Workspaces + +#### Synchronous (Before) + +```python +import smartsheet + +client = smartsheet.Smartsheet(access_token="your_token") + +# Legacy pagination +result = client.Workspaces.list_workspaces(page_size=100, page=1) + +for workspace in result.data: + print(f"{workspace.name}: {workspace.id}") +``` + +#### Asynchronous (After) + +```python +import asyncio +from smartsheet import AsyncSmartsheet + +async def main(): + async with AsyncSmartsheet(access_token="your_token") as client: + # Token pagination (recommended) + result = await client.Workspaces.list_workspaces( + pagination_type='token', + max_items=100 + ) + + for workspace in result.data: + print(f"{workspace.name}: {workspace.id}") + +asyncio.run(main()) +``` + +### Adding Rows to a Sheet + +#### Synchronous (Before) + +```python +import smartsheet +from smartsheet.models import Row, Cell + +client = smartsheet.Smartsheet(access_token="your_token") + +# Create row +row = Row() +row.to_bottom = True +row.cells = [ + Cell({'column_id': 123456, 'value': 'Task Name'}), + Cell({'column_id': 789012, 'value': 'In Progress'}) +] + +# Add row +result = client.Sheets.add_rows(sheet_id=111222, list_of_rows=[row]) + +if result.message == 'SUCCESS': + print(f"Added row {result.data[0].id}") +``` + +#### Asynchronous (After) + +```python +import asyncio +from smartsheet import AsyncSmartsheet +from smartsheet.models import Row, Cell + +async def main(): + async with AsyncSmartsheet(access_token="your_token") as client: + # Create row (same as sync) + row = Row() + row.to_bottom = True + row.cells = [ + Cell({'column_id': 123456, 'value': 'Task Name'}), + Cell({'column_id': 789012, 'value': 'In Progress'}) + ] + + # Add row with await + result = await client.Sheets.add_rows(sheet_id=111222, list_of_rows=[row]) + + if result.message == 'SUCCESS': + print(f"Added row {result.data[0].id}") + +asyncio.run(main()) +``` + +### Error Handling + +#### Synchronous (Before) + +```python +import smartsheet +from smartsheet.models import Error + +client = smartsheet.Smartsheet(access_token="your_token") + +result = client.Workspaces.list_workspaces() + +if isinstance(result, Error): + print(f"Error: {result.result.message}") +else: + print(f"Success: {len(result.data)} workspaces") +``` + +#### Asynchronous (After) + +```python +import asyncio +from smartsheet import AsyncSmartsheet +from smartsheet.models import Error + +async def main(): + async with AsyncSmartsheet(access_token="your_token") as client: + result = await client.Workspaces.list_workspaces() + + if isinstance(result, Error): + print(f"Error: {result.result.message}") + else: + print(f"Success: {len(result.data)} workspaces") + +asyncio.run(main()) +``` + +### Exception-Based Error Handling + +#### Synchronous (Before) + +```python +import smartsheet +from smartsheet.exceptions import ApiError + +client = smartsheet.Smartsheet(access_token="your_token") +client.errors_as_exceptions(True) + +try: + result = client.Workspaces.list_workspaces() + print(f"Found {len(result.data)} workspaces") +except ApiError as e: + print(f"API Error: {e}") +``` + +#### Asynchronous (After) + +```python +import asyncio +from smartsheet import AsyncSmartsheet +from smartsheet.exceptions import ApiError + +async def main(): + async with AsyncSmartsheet(access_token="your_token") as client: + client.errors_as_exceptions(True) + + try: + result = await client.Workspaces.list_workspaces() + print(f"Found {len(result.data)} workspaces") + except ApiError as e: + print(f"API Error: {e}") + +asyncio.run(main()) +``` + +## Common Patterns + +### Pattern 1: Sequential Operations + +When operations must happen in sequence: + +#### Synchronous (Before) + +```python +import smartsheet + +client = smartsheet.Smartsheet(access_token="your_token") + +# Sequential operations +workspaces = client.Workspaces.list_workspaces() +print(f"Step 1: Found {len(workspaces.data)} workspaces") + +result = client.Sheets.add_rows(sheet_id, rows) +print(f"Step 2: Added {len(result.data)} rows") +``` + +#### Asynchronous (After) + +```python +import asyncio +from smartsheet import AsyncSmartsheet + +async def main(): + async with AsyncSmartsheet(access_token="your_token") as client: + # Sequential operations (still use await) + workspaces = await client.Workspaces.list_workspaces() + print(f"Step 1: Found {len(workspaces.data)} workspaces") + + result = await client.Sheets.add_rows(sheet_id, rows) + print(f"Step 2: Added {len(result.data)} rows") + +asyncio.run(main()) +``` + +### Pattern 2: Concurrent Operations (New Capability!) + +One of the main benefits of async - operations that can run concurrently: + +#### Synchronous (Before) - Sequential Only + +```python +import smartsheet +import time + +client = smartsheet.Smartsheet(access_token="your_token") + +start = time.time() + +# Must run sequentially +result1 = client.Sheets.add_rows(sheet_id_1, rows) +result2 = client.Sheets.add_rows(sheet_id_2, rows) +result3 = client.Sheets.add_rows(sheet_id_3, rows) + +elapsed = time.time() - start +print(f"Completed in {elapsed:.2f} seconds") +``` + +#### Asynchronous (After) - Concurrent + +```python +import asyncio +import time +from smartsheet import AsyncSmartsheet + +async def main(): + async with AsyncSmartsheet(access_token="your_token") as client: + start = time.time() + + # Run concurrently - much faster! + results = await asyncio.gather( + client.Sheets.add_rows(sheet_id_1, rows), + client.Sheets.add_rows(sheet_id_2, rows), + client.Sheets.add_rows(sheet_id_3, rows) + ) + + elapsed = time.time() - start + print(f"Completed in {elapsed:.2f} seconds") # Significantly faster! + +asyncio.run(main()) +``` + +### Pattern 3: Processing Multiple Items + +#### Synchronous (Before) + +```python +import smartsheet + +client = smartsheet.Smartsheet(access_token="your_token") + +sheet_ids = [111, 222, 333, 444, 555] + +for sheet_id in sheet_ids: + result = client.Sheets.add_rows(sheet_id, rows) + print(f"Processed sheet {sheet_id}") +``` + +#### Asynchronous (After) - With Concurrency + +```python +import asyncio +from smartsheet import AsyncSmartsheet + +async def main(): + async with AsyncSmartsheet(access_token="your_token") as client: + sheet_ids = [111, 222, 333, 444, 555] + + # Process all sheets concurrently + tasks = [ + client.Sheets.add_rows(sheet_id, rows) + for sheet_id in sheet_ids + ] + + results = await asyncio.gather(*tasks) + + for sheet_id, result in zip(sheet_ids, results): + print(f"Processed sheet {sheet_id}") + +asyncio.run(main()) +``` + +### Pattern 4: Reusable Client in a Class + +#### Synchronous (Before) + +```python +import smartsheet + +class SmartsheetService: + def __init__(self, access_token): + self.client = smartsheet.Smartsheet(access_token=access_token) + + def get_workspaces(self): + return self.client.Workspaces.list_workspaces() + + def add_row(self, sheet_id, row): + return self.client.Sheets.add_rows(sheet_id, [row]) + +# Usage +service = SmartsheetService("your_token") +workspaces = service.get_workspaces() +``` + +#### Asynchronous (After) + +```python +import asyncio +from smartsheet import AsyncSmartsheet + +class AsyncSmartsheetService: + def __init__(self, access_token): + self.access_token = access_token + self.client = None + + async def __aenter__(self): + self.client = AsyncSmartsheet(access_token=self.access_token) + await self.client._ensure_session() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + if self.client: + await self.client.aclose() + return False + + async def get_workspaces(self): + return await self.client.Workspaces.list_workspaces() + + async def add_row(self, sheet_id, row): + return await self.client.Sheets.add_rows(sheet_id, [row]) + +# Usage +async def main(): + async with AsyncSmartsheetService("your_token") as service: + workspaces = await service.get_workspaces() + print(f"Found {len(workspaces.data)} workspaces") + +asyncio.run(main()) +``` + +### Pattern 5: Async Service Integration + +#### New Pattern - Async Only + +```python +from smartsheet import AsyncSmartsheet +from smartsheet.models import Row, Cell + +# Example async service pattern +# (Works with any async framework) + +# Global client instance +smartsheet_client = None + +async def get_client(): + """Get or create the Smartsheet client.""" + global smartsheet_client + if smartsheet_client is None: + smartsheet_client = AsyncSmartsheet(access_token="your_token") + return smartsheet_client + +@mcp.tool() +async def list_workspaces() -> list[dict]: + """List all Smartsheet workspaces.""" + client = await get_client() + result = await client.Workspaces.list_workspaces( + pagination_type='token', + max_items=100 + ) + + return [ + {"id": ws.id, "name": ws.name, "access_level": ws.access_level} + for ws in result.data + ] + +@mcp.tool() +async def add_sheet_row(sheet_id: int, column_values: dict[int, str]) -> str: + """Add a row to a Smartsheet.""" + client = await get_client() + + row = Row() + row.to_bottom = True + row.cells = [ + Cell({'column_id': col_id, 'value': value}) + for col_id, value in column_values.items() + ] + + result = await client.Sheets.add_rows(sheet_id, [row]) + + if result.message == 'SUCCESS': + return f"Added row {result.data[0].id}" + else: + return f"Error: {result.message}" + +@mcp.on_shutdown +async def cleanup(): + """Clean up resources on shutdown.""" + global smartsheet_client + if smartsheet_client: + await smartsheet_client.aclose() +``` + +## Performance Considerations + +### When Async Provides Benefits + +**Significant Performance Gains:** + +```python +# Adding rows to 10 sheets +# Sync: ~10 seconds (sequential) +# Async: ~1-2 seconds (concurrent) + +async def concurrent_example(): + async with AsyncSmartsheet(access_token="token") as client: + tasks = [client.Sheets.add_rows(sid, rows) for sid in sheet_ids] + results = await asyncio.gather(*tasks) # Much faster! +``` + +**Minimal Performance Difference:** + +```python +# Single operation +# Sync: ~1 second +# Async: ~1 second (no concurrency benefit) + +async def single_operation(): + async with AsyncSmartsheet(access_token="token") as client: + result = await client.Workspaces.list_workspaces() # Same speed as sync +``` + +### Overhead Considerations + +- **Client initialization**: Slightly more overhead for async client setup +- **Single operations**: No performance benefit over sync +- **Memory usage**: Async may use slightly more memory for event loop +- **Complexity**: Async code is more complex to write and debug + +### Optimization Tips + +1. **Batch concurrent operations** - Group related API calls together +2. **Limit concurrency** - Don't overwhelm the API with too many concurrent requests +3. **Reuse client instances** - Create one client and reuse it +4. **Use token pagination** - More efficient than legacy pagination +5. **Monitor rate limits** - The client handles retries, but be aware of limits + +## Troubleshooting + +### Issue: "RuntimeError: Event loop is closed" + +**Cause**: Trying to run async code in an environment with a closed event loop. + +**Solution**: + +```python +# In scripts, use asyncio.run() +asyncio.run(main()) + +# In Jupyter/IPython, await directly +await main() + +# In existing async context, just await +result = await client.Workspaces.list_workspaces() +``` + +### Issue: "ModuleNotFoundError: No module named 'httpx'" + +**Cause**: Missing httpx dependency. + +**Solution**: + +```bash +pip install httpx +``` + +### Issue: "AttributeError: 'AsyncSmartsheet' object has no attribute 'X'" + +**Cause**: Trying to use an API method that hasn't been implemented in async yet. + +**Solution**: Check the [PoC scope](docs-source/async-quickstart.md#current-status-proof-of-concept-poc). Currently only `add_rows` and `list_workspaces` are supported. Use the sync client for other operations or request the feature. + +### Issue: Slower than expected performance + +**Cause**: Not using concurrent operations, or hitting rate limits. + +**Solution**: + +```python +# Bad - sequential (no benefit) +for sheet_id in sheet_ids: + result = await client.Sheets.add_rows(sheet_id, rows) + +# Good - concurrent (much faster) +tasks = [client.Sheets.add_rows(sid, rows) for sid in sheet_ids] +results = await asyncio.gather(*tasks) +``` + +### Issue: "Session is None" errors + +**Cause**: Not properly initializing the client. + +**Solution**: Always use `async with` or manually call `await client._ensure_session()`: + +```python +# Recommended +async with AsyncSmartsheet(access_token="token") as client: + result = await client.Workspaces.list_workspaces() + +# Or manual initialization +client = AsyncSmartsheet(access_token="token") +await client._ensure_session() +try: + result = await client.Workspaces.list_workspaces() +finally: + await client.aclose() +``` + +### Issue: Rate limiting errors + +**Cause**: Too many concurrent requests. + +**Solution**: Limit concurrency or increase retry time: + +```python +# Limit concurrent operations +from asyncio import Semaphore + +async def limited_concurrency(): + async with AsyncSmartsheet(access_token="token") as client: + semaphore = Semaphore(5) # Max 5 concurrent operations + + async def add_with_limit(sheet_id): + async with semaphore: + return await client.Sheets.add_rows(sheet_id, rows) + + tasks = [add_with_limit(sid) for sid in sheet_ids] + results = await asyncio.gather(*tasks) + +# Or increase retry time +client = AsyncSmartsheet( + access_token="token", + max_retry_time=120 # Retry for up to 2 minutes +) +``` + +## Best Practices + +### 1. Always Use Context Managers + +```python +# Good +async with AsyncSmartsheet(access_token="token") as client: + result = await client.Workspaces.list_workspaces() + +# Avoid +client = AsyncSmartsheet(access_token="token") +result = await client.Workspaces.list_workspaces() +# Easy to forget: await client.aclose() +``` + +### 2. Reuse Client Instances + +```python +# Good - one client for multiple operations +async with AsyncSmartsheet(access_token="token") as client: + ws = await client.Workspaces.list_workspaces() + r1 = await client.Sheets.add_rows(sheet1, rows) + r2 = await client.Sheets.add_rows(sheet2, rows) + +# Avoid - creating multiple clients +async with AsyncSmartsheet(access_token="token") as c1: + ws = await c1.Workspaces.list_workspaces() +async with AsyncSmartsheet(access_token="token") as c2: + r1 = await c2.Sheets.add_rows(sheet1, rows) +``` + +### 3. Use Concurrent Operations Wisely + +```python +# Good - reasonable concurrency +tasks = [client.Sheets.add_rows(sid, rows) for sid in sheet_ids[:10]] +results = await asyncio.gather(*tasks) + +# Be careful - may hit rate limits +tasks = [client.Sheets.add_rows(sid, rows) for sid in sheet_ids[:1000]] +results = await asyncio.gather(*tasks) # Too many concurrent requests +``` + +### 4. Handle Errors Appropriately + +```python +# For fine-grained error handling +result = await client.Workspaces.list_workspaces() +if isinstance(result, Error): + # Handle error + pass + +# For exception-based flow +client.errors_as_exceptions(True) +try: + result = await client.Workspaces.list_workspaces() +except ApiError as e: + # Handle exception + pass +``` + +### 5. Test Thoroughly + +- Test with real API calls in a development environment +- Test error scenarios (invalid IDs, rate limits, etc.) +- Test concurrent operations with your actual workload +- Monitor performance and adjust concurrency as needed + +### 6. Document Async Functions + +```python +async def process_sheets(sheet_ids: list[int]) -> list[Result]: + """Process multiple sheets concurrently. + + Args: + sheet_ids: List of sheet IDs to process + + Returns: + List of results from each sheet operation + + Note: + This is an async function and must be awaited. + """ + async with AsyncSmartsheet(access_token="token") as client: + tasks = [client.Sheets.add_rows(sid, rows) for sid in sheet_ids] + return await asyncio.gather(*tasks) +``` + +## Migration Strategy + +### Incremental Migration + +You can use both sync and async clients in the same application: + +```python +import smartsheet +from smartsheet import AsyncSmartsheet +import asyncio + +# Sync client for existing code +sync_client = smartsheet.Smartsheet(access_token="token") +workspaces = sync_client.Workspaces.list_workspaces() + +# Async client for new code +async def new_feature(): + async with AsyncSmartsheet(access_token="token") as async_client: + result = await async_client.Sheets.add_rows(sheet_id, rows) + return result + +# Call async from sync context +result = asyncio.run(new_feature()) +``` + +### Full Migration Steps + +1. **Start with non-critical code** - Migrate less critical features first +2. **Test thoroughly** - Ensure async version works correctly +3. **Measure performance** - Verify you're getting expected benefits +4. **Migrate incrementally** - Don't try to migrate everything at once +5. **Update documentation** - Document which parts are async +6. **Train team** - Ensure team understands async patterns + +## Additional Resources + +- [Async Quick Start Guide](docs-source/async-quickstart.md) +- [Example Code](examples/async_examples.py) +- [Python asyncio Documentation](https://docs.python.org/3/library/asyncio.html) +- [Real Python Async Tutorial](https://realpython.com/async-io-python/) +- [GitHub Issues](https://github.com/smartsheet/smartsheet-python-sdk/issues) + +## Getting Help + +If you encounter issues during migration: + +1. Check this guide and the [troubleshooting section](#troubleshooting) +2. Review the [async quickstart guide](docs-source/async-quickstart.md) +3. Look at the [example code](examples/async_examples.py) +4. Search [GitHub Issues](https://github.com/smartsheet/smartsheet-python-sdk/issues) +5. Create a new issue with details about your use case + +We welcome feedback on the async implementation and this migration guide! diff --git a/README.md b/README.md index f5970565..51fcf7eb 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,7 @@ To get started with the Smartsheet Python SDK: import smartsheet -smart = smartsheet.Smartsheet() # Create a Smartsheet client +smart = smartsheet.Smartsheet() # Create a Smartsheet client response = smart.Sheets.list_sheets() # Call the list_sheets() function and store the response object sheetId = response.data[0].id # Get the ID of the first sheet in the response @@ -39,6 +39,69 @@ sheet = smart.Sheets.get_sheet(sheetId) # Load the sheet by using its ID print(f"The sheet {sheet.name} has {sheet.total_row_count} rows") # Print information about the sheet ``` +## Async Support (Beta) + +The SDK now includes **beta async support** for non-blocking I/O operations, perfect for async frameworks and high-throughput applications. The async client uses `httpx` for async HTTP operations and provides the same familiar API with async/await patterns. + +### Quick Example + +```python +import asyncio +from smartsheet import AsyncSmartsheet + +async def main(): + async with AsyncSmartsheet(access_token="your_token_here") as client: + # List workspaces asynchronously + workspaces = await client.Workspaces.list_workspaces() + print(f"Found {len(workspaces.data)} workspaces") + + # Add rows to a sheet + from smartsheet.models import Row, Cell + row = Row() + row.to_bottom = True + row.cells = [Cell({'column_id': 123456, 'value': 'New Value'})] + result = await client.Sheets.add_rows(sheet_id, [row]) + print(f"Added {len(result.data)} rows") + +asyncio.run(main()) +``` + +### Installation + +Async support requires the `httpx` library: + +```bash +pip install smartsheet-python-sdk httpx +``` + +### Current PoC Scope + +This is a **proof-of-concept** implementation with limited scope: + +- ✅ **Sheets.add_rows()** - Add rows to a sheet +- ✅ **Workspaces.list_workspaces()** - List workspaces with token pagination +- ✅ Concurrent operations support +- ✅ Automatic retry with exponential backoff +- ✅ Context manager for resource management + +Additional async operations will be added based on user feedback. + +### Key Features + +- **Non-blocking I/O**: Make multiple API calls concurrently +- **Async Framework Integration**: Perfect for async services and event loop-based applications +- **Familiar API**: Same models and patterns as the sync client +- **Automatic Cleanup**: Use `async with` for automatic resource management + +### Learn More + +- **[Async Quick Start Guide](docs-source/async-quickstart.md)** - Comprehensive guide with examples +- **[Async Migration Guide](ASYNC_MIGRATION.md)** - How to migrate from sync to async +- **[Example Code](examples/async_examples.py)** - Runnable examples +- **[Design Document](docs-source/async-design.md)** - Technical design details + +We welcome feedback on the async implementation! Please share your use cases and requirements via [GitHub Issues](https://github.com/smartsheet/smartsheet-python-sdk/issues). + ## Documentation Use the following resources to learn more about the SDK capabilities: @@ -64,8 +127,8 @@ Review the [Developer Program Agreement](https://www.smartsheet.com/legal/develo We would like to thank the following people for their contributions to this project: -* Tim Wells - [timwellswa](https://github.com/timwellswa) -* Scott Wimer - [happybob007](https://github.com/happybob007) -* Steve Weil - [seweil](https://github.com/seweil) -* Kevin Fansler - [kfansler](https://github.com/kfansler) -* Nathan Armstrong - [armstnp](https://github.com/armstnp) +- Tim Wells - [timwellswa](https://github.com/timwellswa) +- Scott Wimer - [happybob007](https://github.com/happybob007) +- Steve Weil - [seweil](https://github.com/seweil) +- Kevin Fansler - [kfansler](https://github.com/kfansler) +- Nathan Armstrong - [armstnp](https://github.com/armstnp) diff --git a/docs-source/async-design.md b/docs-source/async-design.md new file mode 100644 index 00000000..1b661574 --- /dev/null +++ b/docs-source/async-design.md @@ -0,0 +1,964 @@ +# Async Support Design for Smartsheet Python SDK - Alternative Approaches + +## Executive Summary + +This document explores alternative architectures for adding asynchronous (async) support to the Smartsheet Python SDK with **minimal code duplication** and **minimal overall change**. After analyzing Python's async/await constraints and evaluating multiple approaches, this document provides a detailed comparison to help choose the best path forward. + +## Current Architecture Analysis + +### Core Components + +#### 1. Main Client ([`smartsheet.py`](smartsheet/smartsheet.py:118)) + +- **[`Smartsheet`](smartsheet/smartsheet.py:118)** class: Primary entry point for SDK +- Uses synchronous `requests` library for HTTP communication +- Manages session via [`pinned_session()`](smartsheet/session.py:51) +- Implements retry logic with exponential backoff in [`request_with_retry()`](smartsheet/smartsheet.py:383) +- Lazy-loads API module classes via `__getattr__` + +#### 2. Session Management ([`session.py`](smartsheet/session.py:51)) + +- **[`pinned_session()`](smartsheet/session.py:51)**: Creates configured `requests.Session` +- Custom SSL adapter ([`_SSLAdapter`](smartsheet/session.py:32)) for security +- Built-in retry mechanism using `urllib3.Retry` +- Token redaction hook for security + +#### 3. HTTP Request Flow + +```mermaid +graph TD + A[API Method Call] --> B[prepare_request] + B --> C[request] + C --> D[request_with_retry] + D --> E[_request] + E --> F[session.send] + F --> G[OperationResult] + G --> H[native conversion] + H --> I[Return typed object] +``` + +**Key Methods:** + +- [`prepare_request()`](smartsheet/smartsheet.py:425): Builds HTTP request with headers, auth, params +- [`request()`](smartsheet/smartsheet.py:278): Validates and converts response to native objects +- [`request_with_retry()`](smartsheet/smartsheet.py:383): Implements retry logic with backoff +- [`_request()`](smartsheet/smartsheet.py:355): Low-level HTTP execution + +#### 4. API Modules Pattern + +All API modules ([`sheets.py`](smartsheet/sheets.py), [`users.py`](smartsheet/users.py), etc.) follow identical pattern: + +```python +class Sheets: + def __init__(self, smartsheet_obj): + self._base = smartsheet_obj + + def get_sheet(self, sheet_id, **kwargs): + _op = fresh_operation("get_sheet") + _op["method"] = "GET" + _op["path"] = f"/sheets/{sheet_id}" + # ... configure operation + + prepped_request = self._base.prepare_request(_op) + response = self._base.request(prepped_request, expected, _op) + return response +``` + +### Synchronous Patterns Identified + +1. **Blocking HTTP calls**: `self._session.send()` blocks event loop +2. **Synchronous retry logic**: `time.sleep()` in retry backoff +3. **File I/O operations**: Synchronous file reading for attachments/uploads +4. **No async context managers**: Session lifecycle not async-aware + +### Dependencies + +From [`pyproject.toml`](pyproject.toml:13-19): + +- `requests`: Synchronous HTTP library +- `requests-toolbelt`: Request utilities +- `six`: Python 2/3 compatibility +- `certifi`: SSL certificates +- `python-dateutil`: Date parsing + +## Python Async/Await Constraints + +### Fundamental Limitations + +1. **Methods are either sync OR async, not both** + - A method defined with `def` is synchronous + - A method defined with `async def` is asynchronous + - You cannot conditionally make a method async at runtime + +2. **Async methods must be awaited** + - `async def` methods return coroutines that must be awaited + - Sync code cannot await async methods + - Async code cannot directly call sync methods without blocking + +3. **Context managers are different** + - Sync: `with obj:` uses `__enter__` and `__exit__` + - Async: `async with obj:` uses `__aenter__` and `__aexit__` + - These are separate protocols + +4. **No runtime async/sync switching** + - Cannot use `if async_mode: await method()` else `method()` + - The `await` keyword is syntax, not a runtime operation + +## Alternative Approaches Evaluated + +### Option A: Single Class with `async_mode` Parameter ❌ + +**Concept**: Pass `async_mode=True` to constructor and switch behavior internally. + +```python +# Hypothetical usage +client = Smartsheet(async_mode=True) +sheet = client.Sheets.get_sheet(123) # How to await this? +``` + +**Implementation Attempt**: + +```python +class Smartsheet: + def __init__(self, async_mode=False): + self.async_mode = async_mode + if async_mode: + self._session = httpx.AsyncClient() + else: + self._session = requests.Session() + + def request(self, prepped_request, expected, operation): + if self.async_mode: + # ERROR: Cannot await in a non-async method + return await self._async_request(prepped_request, expected, operation) + else: + return self._sync_request(prepped_request, expected, operation) +``` + +**Why This Fails**: + +1. **Cannot conditionally await**: The `request()` method would need to be `async def` to use `await`, but then sync users couldn't call it without `await` +2. **Method signature incompatibility**: Sync methods return values directly; async methods return coroutines +3. **API module methods**: Every method in `Sheets`, `Users`, etc. would face the same problem +4. **Type hints break**: Return type would be `Union[T, Coroutine[Any, Any, T]]` which is unusable + +**Verdict**: ❌ **Not feasible** due to Python's async/await syntax constraints. + +--- + +### Option B: Shared Base Classes with Thin Sync/Async Wrappers ⚠️ + +**Concept**: Extract all logic into base classes, create thin sync/async wrappers. + +```python +# Shared base logic +class BaseSheetsLogic: + def _prepare_get_sheet_operation(self, sheet_id, **kwargs): + _op = fresh_operation("get_sheet") + _op["method"] = "GET" + _op["path"] = f"/sheets/{sheet_id}" + # ... configure operation + return _op + +# Sync wrapper +class Sheets: + def __init__(self, smartsheet_obj): + self._base = smartsheet_obj + self._logic = BaseSheetsLogic() + + def get_sheet(self, sheet_id, **kwargs): + _op = self._logic._prepare_get_sheet_operation(sheet_id, **kwargs) + prepped_request = self._base.prepare_request(_op) + return self._base.request(prepped_request, "Sheet", _op) + +# Async wrapper +class AsyncSheets: + def __init__(self, smartsheet_obj): + self._base = smartsheet_obj + self._logic = BaseSheetsLogic() + + async def get_sheet(self, sheet_id, **kwargs): + _op = self._logic._prepare_get_sheet_operation(sheet_id, **kwargs) + prepped_request = self._base.prepare_request(_op) + return await self._base.request(prepped_request, "Sheet", _op) +``` + +**Pros**: + +- ✅ Shared operation preparation logic +- ✅ Reduced duplication of parameter handling +- ✅ Clear separation of concerns +- ✅ Type safety maintained + +**Cons**: + +- ⚠️ Still requires duplicate wrapper classes for all 15+ API modules +- ⚠️ Boilerplate for every method (prepare operation, call base) +- ⚠️ Maintenance burden: changes require updating base + 2 wrappers +- ⚠️ More files: base classes + sync classes + async classes + +**Code Duplication Estimate**: ~40% (operation logic shared, wrappers duplicated) + +**Verdict**: ⚠️ **Possible but not optimal** - reduces duplication but still requires significant wrapper code. + +--- + +### Option C: Dynamic Method Generation/Decoration 🤔 + +**Concept**: Generate async methods dynamically from sync methods using decorators or metaclasses. + +```python +def async_wrapper(sync_method): + """Convert a sync method to async by wrapping its calls.""" + async def wrapper(self, *args, **kwargs): + # Prepare operation (sync, no I/O) + _op = sync_method._prepare_operation(self, *args, **kwargs) + # Async HTTP call + prepped_request = self._base.prepare_request(_op) + return await self._base.request(prepped_request, sync_method._expected, _op) + return wrapper + +class AsyncSheets: + def __init__(self, smartsheet_obj): + self._base = smartsheet_obj + # Dynamically create async versions of all Sheets methods + for name, method in inspect.getmembers(Sheets, predicate=inspect.isfunction): + if not name.startswith('_'): + setattr(self, name, async_wrapper(method)) +``` + +**Alternative: Metaclass Approach**: + +```python +class AsyncMethodMeta(type): + def __new__(mcs, name, bases, namespace): + # Find sync base class + sync_class = namespace.get('__sync_class__') + if sync_class: + # Generate async methods from sync methods + for method_name in dir(sync_class): + if not method_name.startswith('_'): + method = getattr(sync_class, method_name) + if callable(method): + namespace[method_name] = mcs._make_async(method) + return super().__new__(mcs, name, bases, namespace) + + @staticmethod + def _make_async(sync_method): + async def async_method(self, *args, **kwargs): + # Complex logic to extract operation and make async call + ... + return async_method + +class AsyncSheets(metaclass=AsyncMethodMeta): + __sync_class__ = Sheets +``` + +**Pros**: + +- ✅ Minimal code duplication (methods generated automatically) +- ✅ Single source of truth for method signatures +- ✅ Changes to sync methods automatically reflected in async + +**Cons**: + +- ❌ **Complex and fragile**: Relies on introspection and dynamic code generation +- ❌ **Poor IDE support**: Type hints don't work well with dynamic methods +- ❌ **Debugging nightmare**: Stack traces go through metaclass machinery +- ❌ **Maintenance burden**: Complex metaclass logic hard to understand +- ❌ **Method structure assumptions**: Assumes all methods follow exact same pattern +- ❌ **Edge cases**: Methods with special behavior (file uploads, downloads) need special handling + +**Verdict**: ❌ **Not recommended** - too complex and fragile for production use. + +--- + +### Option D: Protocol/ABC-Based Shared Interfaces 🤔 + +**Concept**: Define shared interfaces using Protocols or ABCs, implement separately for sync/async. + +```python +from typing import Protocol, Union +from abc import ABC, abstractmethod + +# Define interface +class SheetsInterface(Protocol): + def get_sheet(self, sheet_id: int, **kwargs) -> Union[Sheet, Error]: ... + def add_rows(self, sheet_id: int, rows: List[Row]) -> Union[Result[Row], Error]: ... + # ... all other methods + +# Sync implementation +class Sheets: + def __init__(self, smartsheet_obj): + self._base = smartsheet_obj + + def get_sheet(self, sheet_id: int, **kwargs) -> Union[Sheet, Error]: + _op = fresh_operation("get_sheet") + _op["method"] = "GET" + _op["path"] = f"/sheets/{sheet_id}" + # ... configure operation + prepped_request = self._base.prepare_request(_op) + return self._base.request(prepped_request, "Sheet", _op) + +# Async implementation +class AsyncSheets: + def __init__(self, smartsheet_obj): + self._base = smartsheet_obj + + async def get_sheet(self, sheet_id: int, **kwargs) -> Union[Sheet, Error]: + _op = fresh_operation("get_sheet") + _op["method"] = "GET" + _op["path"] = f"/sheets/{sheet_id}" + # ... configure operation + prepped_request = self._base.prepare_request(_op) + return await self._base.request(prepped_request, "Sheet", _op) +``` + +**Pros**: + +- ✅ Clear interface contracts +- ✅ Type checking via Protocol +- ✅ Documentation of expected methods + +**Cons**: + +- ❌ **Full duplication**: Every method body duplicated between sync/async +- ❌ **No code sharing**: Protocol only defines interface, not implementation +- ❌ **Maintenance burden**: Changes must be made in two places +- ❌ **Doesn't solve the problem**: This is essentially the dual-class approach with extra ceremony + +**Code Duplication Estimate**: ~95% (only imports and class structure differ) + +**Verdict**: ❌ **Not recommended** - doesn't reduce duplication, just adds interface layer. + +--- + +### Option E: Dual-Class Approach (Current Design) ✅ + +**Concept**: Separate `Smartsheet` and `AsyncSmartsheet` classes with separate API modules. + +```python +# Sync client +class Smartsheet: + def __init__(self, access_token=None, ...): + self._session = pinned_session() + # ... sync initialization + + def request(self, prepped_request, expected, operation): + res = self.request_with_retry(prepped_request, operation) + return res.native(expected) + + def request_with_retry(self, prepped_request, operation): + while True: + result = self._request(prepped_request, operation) + if should_retry: + time.sleep(backoff) # Blocking sleep + else: + break + return result + +# Async client +class AsyncSmartsheet: + def __init__(self, access_token=None, ...): + self._session = None # Lazy init + # ... async initialization + + async def __aenter__(self): + await self._ensure_session() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.close() + + async def request(self, prepped_request, expected, operation): + res = await self.request_with_retry(prepped_request, operation) + return res.native(expected) + + async def request_with_retry(self, prepped_request, operation): + while True: + result = await self._request(prepped_request, operation) + if should_retry: + await asyncio.sleep(backoff) # Non-blocking sleep + else: + break + return result +``` + +**File Structure**: + +```text +smartsheet/ +├── smartsheet.py # Sync client +├── async_smartsheet.py # Async client +├── sheets.py # Sync Sheets +├── async_sheets.py # Async Sheets +├── users.py # Sync Users +├── async_users.py # Async Users +├── models/ # Shared (no changes) +├── util.py # Shared (no changes) +└── exceptions.py # Shared (no changes) +``` + +**Shared Components**: + +- ✅ All model classes (`Sheet`, `Row`, `Column`, etc.) +- ✅ Serialization logic ([`util.py`](smartsheet/util.py)) +- ✅ Exception classes +- ✅ Type definitions +- ✅ Operation preparation (`fresh_operation()`) +- ✅ Request preparation logic (can be extracted to shared helper) + +**Duplicated Components**: + +- ⚠️ Client classes (`Smartsheet` vs `AsyncSmartsheet`) +- ⚠️ API module classes (`Sheets` vs `AsyncSheets`, etc.) +- ⚠️ HTTP request methods + +**Minimizing Duplication**: + +1. **Extract shared request preparation**: + +```python +# shared_http.py +class RequestPreparation: + @staticmethod + def prepare_request_dict(operation, access_token, user_agent, api_base, assume_user=None): + """Prepare request parameters (sync/async agnostic).""" + # Build headers, params, etc. + return { + "method": operation["method"], + "url": api_base + operation["path"], + "headers": headers, + "params": query_params, + "json": json_data, + } +``` + +1. **Use composition for API modules**: + +```python +# sheets_operations.py (shared) +class SheetsOperations: + @staticmethod + def prepare_get_sheet(sheet_id, **kwargs): + _op = fresh_operation("get_sheet") + _op["method"] = "GET" + _op["path"] = f"/sheets/{sheet_id}" + _op["query_params"]["include"] = kwargs.get("include") + # ... configure operation + return _op, "Sheet" + +# sheets.py (sync) +class Sheets: + def get_sheet(self, sheet_id, **kwargs): + _op, expected = SheetsOperations.prepare_get_sheet(sheet_id, **kwargs) + prepped_request = self._base.prepare_request(_op) + return self._base.request(prepped_request, expected, _op) + +# async_sheets.py (async) +class AsyncSheets: + async def get_sheet(self, sheet_id, **kwargs): + _op, expected = SheetsOperations.prepare_get_sheet(sheet_id, **kwargs) + prepped_request = self._base.prepare_request(_op) + return await self._base.request(prepped_request, expected, _op) +``` + +**Pros**: + +- ✅ **Clear separation**: Sync and async code completely separate +- ✅ **Type safety**: Full type hint support in IDEs +- ✅ **Debuggable**: Straightforward stack traces +- ✅ **Maintainable**: Easy to understand, no magic +- ✅ **Testable**: Can test sync and async independently +- ✅ **Backward compatible**: Existing sync code unchanged +- ✅ **Proven pattern**: Used by major libraries (httpx, aiohttp clients) + +**Cons**: + +- ⚠️ More files (but organized and clear) +- ⚠️ Some duplication (but can be minimized with shared helpers) +- ⚠️ Changes need to be made in two places (but this is explicit and clear) + +**Code Duplication Estimate**: ~30-40% with shared operation preparation helpers + +**Verdict**: ✅ **Recommended** - best balance of clarity, maintainability, and minimal duplication. + +--- + +## Detailed Comparison Matrix + +| Criterion | Option A Single Class | Option B Shared Base | Option C Dynamic Gen | Option D Protocol | Option E Dual Class | +| - ----------| - --------------------------| - -------------------------| - -------------------------| - ---------------------| - -----------------------| +| **Feasibility** | ❌ Not possible | ✅ Possible | ⚠️ Possible but fragile | ✅ Possible | ✅ Fully feasible | +| **Code Duplication** | N/A | ~40% | ~10% | ~95% | ~30-40% | +| **Type Safety** | ❌ Broken | ✅ Full support | ❌ Poor | ✅ Full support | ✅ Full support | +| **IDE Support** | ❌ Broken | ✅ Excellent | ❌ Poor | ✅ Excellent | ✅ Excellent | +| **Debuggability** | N/A | ✅ Clear traces | ❌ Complex traces | ✅ Clear traces | ✅ Clear traces | +| **Maintainability** | N/A | ⚠️ Moderate | ❌ Difficult | ⚠️ Moderate | ✅ Easy | +| **Learning Curve** | N/A | ⚠️ Moderate | ❌ Steep | ⚠️ Moderate | ✅ Gentle | +| **Backward Compat** | N/A | ✅ Full | ✅ Full | ✅ Full | ✅ Full | +| **Performance** | N/A | ✅ No overhead | ⚠️ Reflection overhead | ✅ No overhead | ✅ No overhead | +| **Testing** | N/A | ✅ Straightforward | ⚠️ Complex | ✅ Straightforward | ✅ Straightforward | + +## Recommended Approach: Enhanced Dual-Class with Shared Helpers + +### Architecture + +```mermaid +graph TB + subgraph Sync Path + A[Smartsheet] --> B[requests.Session] + A --> C[Sheets] + C --> D[sync methods] + end + + subgraph Async Path + E[AsyncSmartsheet] --> F[httpx.AsyncClient] + E --> G[AsyncSheets] + G --> H[async methods] + end + + subgraph Shared Components + I[Models] + J[Serialization] + K[Validation] + L[Error Handling] + M[Operation Builders] + N[Request Preparation] + end + + D --> I + H --> I + D --> J + H --> J + D --> M + H --> M + D --> N + H --> N +``` + +### Implementation Strategy + +#### 1. Shared Operation Builders + +Create shared operation preparation logic: + +```python +# smartsheet/operations/sheets_operations.py +class SheetsOperations: + """Shared operation builders for Sheets API.""" + + @staticmethod + def build_get_sheet(sheet_id, include=None, exclude=None, **kwargs): + """Build operation for get_sheet.""" + _op = fresh_operation("get_sheet") + _op["method"] = "GET" + _op["path"] = f"/sheets/{sheet_id}" + _op["query_params"]["include"] = include + _op["query_params"]["exclude"] = exclude + _op["query_params"]["rowIds"] = kwargs.get("row_ids") + _op["query_params"]["rowNumbers"] = kwargs.get("row_numbers") + _op["query_params"]["columnIds"] = kwargs.get("column_ids") + _op["query_params"]["pageSize"] = kwargs.get("page_size") + _op["query_params"]["page"] = kwargs.get("page") + _op["query_params"]["ifVersionAfter"] = kwargs.get("if_version_after") + _op["query_params"]["level"] = kwargs.get("level") + _op["query_params"]["rowsModifiedSince"] = kwargs.get("rows_modified_since") + _op["query_params"]["filterId"] = kwargs.get("filter_id") + return _op, "Sheet" + + @staticmethod + def build_add_rows(sheet_id, list_of_rows): + """Build operation for add_rows.""" + if isinstance(list_of_rows, (dict, Row)): + arg_value = list_of_rows + list_of_rows = TypedList(Row) + list_of_rows.append(arg_value) + + _op = fresh_operation("add_rows") + _op["method"] = "POST" + _op["path"] = f"/sheets/{sheet_id}/rows" + _op["json"] = list_of_rows + return _op, ["Result", "Row"] +``` + +#### 2. Sync Implementation + +```python +# smartsheet/sheets.py +from .operations.sheets_operations import SheetsOperations + +class Sheets: + def __init__(self, smartsheet_obj): + self._base = smartsheet_obj + self._log = logging.getLogger(__name__) + + def get_sheet(self, sheet_id, include=None, exclude=None, **kwargs): + """Get the specified Sheet.""" + _op, expected = SheetsOperations.build_get_sheet( + sheet_id, include, exclude, **kwargs + ) + prepped_request = self._base.prepare_request(_op) + response = self._base.request(prepped_request, expected, _op) + return response + + def add_rows(self, sheet_id, list_of_rows): + """Insert one or more Rows into the specified Sheet.""" + _op, expected = SheetsOperations.build_add_rows(sheet_id, list_of_rows) + prepped_request = self._base.prepare_request(_op) + response = self._base.request(prepped_request, expected, _op) + return response +``` + +#### 3. Async Implementation + +```python +# smartsheet/async_sheets.py +from .operations.sheets_operations import SheetsOperations + +class AsyncSheets: + def __init__(self, smartsheet_obj): + self._base = smartsheet_obj + self._log = logging.getLogger(__name__) + + async def get_sheet(self, sheet_id, include=None, exclude=None, **kwargs): + """Get the specified Sheet (async).""" + _op, expected = SheetsOperations.build_get_sheet( + sheet_id, include, exclude, **kwargs + ) + prepped_request = self._base.prepare_request(_op) + response = await self._base.request(prepped_request, expected, _op) + return response + + async def add_rows(self, sheet_id, list_of_rows): + """Insert one or more Rows into the specified Sheet (async).""" + _op, expected = SheetsOperations.build_add_rows(sheet_id, list_of_rows) + prepped_request = self._base.prepare_request(_op) + response = await self._base.request(prepped_request, expected, _op) + return response +``` + +### Code Duplication Analysis + +With this approach: + +**Shared (0% duplication)**: + +- Operation builders (~60% of method logic) +- All models and serialization +- Validation logic +- Error handling +- Utility functions + +**Duplicated (100% duplication)**: + +- Method signatures and docstrings (~20% of code) +- Method wrapper calls (~20% of code) + +**Overall Duplication**: ~30-35% of total codebase + +### Benefits of This Approach + +1. **Minimal Duplication**: Operation preparation logic (the complex part) is shared +2. **Clear and Explicit**: Easy to understand what's sync vs async +3. **Type Safe**: Full IDE support and type checking +4. **Maintainable**: Changes to operation logic happen in one place +5. **Testable**: Can test operation builders independently +6. **Debuggable**: Clear stack traces, no magic +7. **Backward Compatible**: Existing sync code unchanged + +### HTTP Library: httpx + +**Recommendation**: Use `httpx` for async implementation + +**Rationale**: + +- **Unified API**: Same interface for sync and async +- **HTTP/2 Support**: Better performance +- **Requests-compatible**: Similar API to current `requests` library +- **Type hints**: Full typing support +- **Connection pooling**: Built-in for both sync/async +- **Mature**: Production-ready and well-maintained + +**Comparison**: + +| Feature | httpx | aiohttp | requests | +| - --------| - ------| - --------| - ---------| +| Sync Support | ✅ | ❌ | ✅ | +| Async Support | ✅ | ✅ | ❌ | +| HTTP/2 | ✅ | ❌ | ❌ | +| API Similarity | High | Low | N/A | +| Type Hints | ✅ | Partial | ❌ | +| Maturity | High | Very High | Very High | + +### File Structure + +```text +smartsheet/ +├── __init__.py # Export both Smartsheet and AsyncSmartsheet +├── smartsheet.py # Existing sync client (unchanged) +├── async_smartsheet.py # New async client +├── session.py # Existing sync session +├── async_session.py # New async session management +├── sheets.py # Existing sync Sheets +├── async_sheets.py # New async Sheets +├── users.py # Existing sync Users +├── async_users.py # New async Users +├── [other modules...] # Continue pattern for all modules +├── operations/ # NEW: Shared operation builders +│ ├── __init__.py +│ ├── sheets_operations.py +│ ├── users_operations.py +│ └── [other operations...] +├── models/ # Shared models (unchanged) +├── util.py # Shared utilities (unchanged) +└── exceptions.py # Shared exceptions (unchanged) +``` + +### Usage Examples + +#### Synchronous (Existing - Unchanged) + +```python +import smartsheet + +# Existing code continues to work +smart = smartsheet.Smartsheet() +response = smart.Sheets.list_sheets() +sheet = smart.Sheets.get_sheet(response.data[0].id) +print(f"Sheet: {sheet.name}") +``` + +#### Asynchronous (New) + +```python +import asyncio +import smartsheet + +async def main(): + # Context manager (recommended) + async with smartsheet.AsyncSmartsheet() as smart: + response = await smart.Sheets.list_sheets() + sheet = await smart.Sheets.get_sheet(response.data[0].id) + print(f"Sheet: {sheet.name}") + + # Or manual session management + smart = smartsheet.AsyncSmartsheet() + try: + response = await smart.Sheets.list_sheets() + # ... operations + finally: + await smart.close() + +asyncio.run(main()) +``` + +#### Async Framework Integration + +```python +from mcp.server import Server +import smartsheet + +app = Server("smartsheet-mcp") + +@app.call_tool() +async def get_sheet(sheet_id: int): + """MCP tool to get sheet data.""" + async with smartsheet.AsyncSmartsheet() as smart: + sheet = await smart.Sheets.get_sheet(sheet_id) + return { + "name": sheet.name, + "rows": sheet.total_row_count + } +``` + +#### Concurrent Operations + +```python +async def fetch_multiple_sheets(): + async with smartsheet.AsyncSmartsheet() as client: + # Fetch multiple sheets concurrently + tasks = [ + client.Sheets.get_sheet(sheet_id) + for sheet_id in [123, 456, 789] + ] + sheets = await asyncio.gather(*tasks) + return sheets +``` + +## Implementation Phases + +### Phase 1: Foundation + +1. Add httpx dependency to `pyproject.toml` +2. Create `operations/` directory structure +3. Extract operation builders for Sheets module +4. Create `async_smartsheet.py` with core async client +5. Create `async_sheets.py` using shared operation builders +6. Add comprehensive tests + +### Phase 2: Core Modules + +1. Extract operation builders for Users, Reports, Workspaces +2. Create async versions of these modules +3. Add integration tests + +### Phase 3: Remaining Modules + +1. Extract operation builders for remaining modules +2. Create async versions +3. Complete test coverage + +### Phase 4: Documentation & Release + +1. Update documentation with async examples +2. Create migration guide +3. Add async framework integration example +4. Version bump and release + +## Dependency Management + +Update [`pyproject.toml`](pyproject.toml): + +```toml +[project] +dependencies = [ + "requests", # Keep for sync support + "httpx>=0.24.0", # Add for async support + "requests-toolbelt", + "six>=1.9", + "certifi", + "python-dateutil" +] + +[project.optional-dependencies] +async = [ + "httpx[http2]>=0.24.0", # Optional HTTP/2 support +] +test = [ + "coverage", + "coveralls", + "pytest", + "pytest-asyncio", # NEW: For async tests + "pytest-rerunfailures", + "requests-toolbelt" +] +``` + +## Risk Mitigation + +### Backward Compatibility + +- **Risk**: Breaking existing code +- **Mitigation**: Separate async classes, no changes to sync code +- **Validation**: Comprehensive test suite for sync code + +### Maintenance Burden + +- **Risk**: Duplicate code across sync/async +- **Mitigation**: Shared operation builders, code generation tools +- **Validation**: DRY principles, regular refactoring + +### Performance Regression + +- **Risk**: Async overhead for simple operations +- **Mitigation**: Benchmarking, lazy session initialization +- **Validation**: Performance tests in CI/CD + +### Dependency Conflicts + +- **Risk**: httpx conflicts with existing dependencies +- **Mitigation**: Careful version pinning, optional dependency +- **Validation**: Test across Python versions + +## Success Metrics + +1. **Compatibility**: 100% of existing sync tests pass +2. **Coverage**: All API modules have async equivalents +3. **Performance**: Async operations show measurable improvement in concurrent scenarios +4. **Adoption**: Async frameworks and event loop-based applications can integrate successfully +5. **Maintenance**: Code duplication kept under 35% through shared operation builders + +## Conclusion + +After evaluating five different approaches to adding async support to the Smartsheet Python SDK, the **Enhanced Dual-Class approach with Shared Operation Builders** emerges as the clear winner. + +### Why This Approach Wins + +1. **Technically Feasible**: Unlike Option A (single class with async_mode), this approach works within Python's async/await constraints +2. **Minimal Duplication**: At ~30-35% duplication, it's comparable to Option C (dynamic generation) but without the complexity +3. **Maintainable**: Clear, explicit code that's easy to understand and debug +4. **Type Safe**: Full IDE support and type checking, unlike Options A and C +5. **Proven Pattern**: Used successfully by major Python libraries (httpx, aiohttp) + +### Key Insight + +The fundamental constraint is that **Python methods are either sync OR async, not both**. Any approach that tries to work around this constraint (Options A, C) introduces significant complexity and fragility. The dual-class approach embraces this constraint and works with it, not against it. + +### Minimizing Duplication + +The key to minimizing duplication is recognizing that the **operation preparation logic** (building the operation dict with all parameters) is the complex part that should be shared. The actual method wrappers are simple and can be duplicated with minimal maintenance burden. + +By extracting operation builders into shared modules, we achieve: + +- **60% of logic shared** (operation preparation, models, serialization) +- **40% duplicated** (method signatures, docstrings, wrapper calls) +- **Overall: ~30-35% duplication** across the entire codebase + +This is an excellent trade-off that maintains clarity, type safety, and maintainability while minimizing duplication. + +### Next Steps + +1. Review this design document with the team +2. Get consensus on the recommended approach +3. Begin implementation with Phase 1 (Foundation) +4. Iterate based on feedback and real-world usage + +--- + +## Appendix: Why Other Approaches Don't Work + +### Option A: Single Class with async_mode + +This is the most intuitive approach but **fundamentally impossible** in Python: + +```python +# This CANNOT work: +def request(self, ...): + if self.async_mode: + return await self._async_request(...) # ERROR: await in non-async function + else: + return self._sync_request(...) +``` + +The `await` keyword is **syntax**, not a runtime operation. You cannot conditionally await. + +### Option C: Dynamic Method Generation + +While technically possible, this approach is **too fragile** for production use: + +- **IDE Support**: Type hints don't work with dynamically generated methods +- **Debugging**: Stack traces go through metaclass machinery +- **Edge Cases**: Special methods (file uploads, downloads) need custom handling +- **Maintenance**: Complex metaclass logic is hard to understand and modify + +### Option D: Protocol/ABC + +This approach **doesn't solve the problem** - it just adds an interface layer on top of the dual-class approach without reducing duplication. + +### Option B: Shared Base Classes + +This is a **viable alternative** to Option E, but with more complexity: + +- Requires three sets of classes (base + sync wrapper + async wrapper) +- More files and more indirection +- Slightly less duplication (~40% vs ~35%) but not enough to justify the complexity + +--- + +## References + +- [PEP 492 - Coroutines with async and await syntax](https://www.python.org/dev/peps/pep-0492/) +- [httpx Documentation](https://www.python-httpx.org/) +- [Real Python: Async IO in Python](https://realpython.com/async-io-python/) +- [Python asyncio Documentation](https://docs.python.org/3/library/asyncio.html) diff --git a/docs-source/async-quickstart.md b/docs-source/async-quickstart.md new file mode 100644 index 00000000..fb0bf4e3 --- /dev/null +++ b/docs-source/async-quickstart.md @@ -0,0 +1,634 @@ +# Async Support Quick Start Guide + +## Overview + +The Smartsheet Python SDK now includes **beta async support** for non-blocking I/O operations. This allows you to write more efficient applications that can handle multiple Smartsheet API requests concurrently, particularly useful for: + +- **Async frameworks** and event loop-based applications +- High-throughput applications processing multiple sheets +- Applications that need to make concurrent API calls +- Integration with modern async Python frameworks (FastAPI, aiohttp, etc.) + +### Why Async? + +Traditional synchronous API calls block execution while waiting for network responses. With async support, your application can: + +- Make multiple API requests concurrently +- Improve throughput and reduce total execution time +- Integrate seamlessly with async frameworks +- Better utilize system resources + +### Current Status: Proof of Concept (PoC) + +This is a **beta/PoC implementation** with limited scope. Currently supported operations: + +- ✅ **Sheets.add_rows()** - Add rows to a sheet +- ✅ **Workspaces.list_workspaces()** - List workspaces + +Additional async operations will be added based on user feedback and demand. + +## Installation + +The async client requires the `httpx` library for async HTTP operations: + +```bash +pip install smartsheet-python-sdk httpx +``` + +Or if you're installing from source: + +```bash +pip install smartsheet-python-sdk[async] +``` + +## Basic Usage + +### Using Async Context Manager (Recommended) + +The recommended way to use the async client is with an async context manager, which automatically handles resource cleanup: + +```python +import asyncio +from smartsheet import AsyncSmartsheet + +async def main(): + async with AsyncSmartsheet(access_token="your_token_here") as client: + # List workspaces + workspaces = await client.Workspaces.list_workspaces() + print(f"Found {len(workspaces.data)} workspaces") + + for workspace in workspaces.data: + print(f" - {workspace.name}") + +# Run the async function +asyncio.run(main()) +``` + +### Manual Resource Management + +If you need more control over the client lifecycle, you can manually manage resources: + +```python +import asyncio +from smartsheet import AsyncSmartsheet + +async def main(): + client = AsyncSmartsheet(access_token="your_token_here") + try: + workspaces = await client.Workspaces.list_workspaces() + print(f"Found {len(workspaces.data)} workspaces") + finally: + # Always close the client to release resources + await client.aclose() + +asyncio.run(main()) +``` + +**Important:** Always call `await client.aclose()` when done to properly release HTTP connections and resources. + +## PoC Scope Examples + +### Adding Rows to a Sheet + +```python +import asyncio +from smartsheet import AsyncSmartsheet +from smartsheet.models import Row, Cell + +async def add_rows_example(): + async with AsyncSmartsheet(access_token="your_token_here") as client: + # Create rows to add + row1 = Row() + row1.to_bottom = True + row1.cells = [ + Cell({'column_id': 123456789, 'value': 'New Task'}), + Cell({'column_id': 987654321, 'value': 'In Progress'}), + Cell({'column_id': 555555555, 'value': 'High'}) + ] + + row2 = Row() + row2.to_bottom = True + row2.cells = [ + Cell({'column_id': 123456789, 'value': 'Another Task'}), + Cell({'column_id': 987654321, 'value': 'Not Started'}), + Cell({'column_id': 555555555, 'value': 'Medium'}) + ] + + # Add rows asynchronously + sheet_id = 1234567890 + result = await client.Sheets.add_rows(sheet_id, [row1, row2]) + + if result.message == 'SUCCESS': + print(f"Successfully added {len(result.data)} rows") + for row in result.data: + print(f" Row ID: {row.id}, Row Number: {row.row_number}") + else: + print(f"Error: {result.message}") + +asyncio.run(add_rows_example()) +``` + +### Listing Workspaces with Token Pagination + +Token-based pagination is more efficient than legacy offset-based pagination: + +```python +import asyncio +from smartsheet import AsyncSmartsheet + +async def list_workspaces_example(): + async with AsyncSmartsheet(access_token="your_token_here") as client: + # First page + result = await client.Workspaces.list_workspaces( + pagination_type='token', + max_items=100 + ) + + print(f"Found {len(result.data)} workspaces on first page") + for workspace in result.data: + print(f" - {workspace.name} (ID: {workspace.id})") + + # Check if there are more pages + if hasattr(result, 'next_token') and result.next_token: + print(f"\nFetching next page...") + next_result = await client.Workspaces.list_workspaces( + pagination_type='token', + last_key=result.next_token, + max_items=100 + ) + print(f"Found {len(next_result.data)} workspaces on second page") + +asyncio.run(list_workspaces_example()) +``` + +## Async Framework Integration + +The async client integrates seamlessly with async frameworks and event loop-based applications: + +```python +from smartsheet import AsyncSmartsheet +from smartsheet.models import Row, Cell + +# Example: Async service pattern +# (This pattern works with any async framework) + +# Initialize async client (reuse across requests) +smartsheet_client = None + +async def get_client(): + """Get or create the Smartsheet client.""" + global smartsheet_client + if smartsheet_client is None: + smartsheet_client = AsyncSmartsheet(access_token="your_token_here") + return smartsheet_client + +async def add_sheet_row( + sheet_id: int, + column_values: dict[int, str] +) -> str: + """Add a row to a Smartsheet. + + Args: + sheet_id: The ID of the sheet + column_values: Dictionary mapping column IDs to values + + Returns: + Success message with row details + """ + client = await get_client() + + # Create row + row = Row() + row.to_bottom = True + row.cells = [ + Cell({'column_id': col_id, 'value': value}) + for col_id, value in column_values.items() + ] + + # Add row + result = await client.Sheets.add_rows(sheet_id, [row]) + + if result.message == 'SUCCESS': + return f"Added row {result.data[0].id} to sheet {sheet_id}" + else: + return f"Error: {result.message}" + +async def list_workspaces() -> list[dict]: + """List all Smartsheet workspaces. + + Returns: + List of workspace information + """ + client = await get_client() + + result = await client.Workspaces.list_workspaces( + pagination_type='token', + max_items=100 + ) + + return [ + { + "id": ws.id, + "name": ws.name, + "access_level": ws.access_level + } + for ws in result.data + ] + +# Cleanup on shutdown +async def cleanup(): + """Clean up resources on shutdown.""" + global smartsheet_client + if smartsheet_client: + await smartsheet_client.aclose() +``` + +## Concurrent Operations + +One of the main benefits of async is the ability to run multiple operations concurrently: + +```python +import asyncio +from smartsheet import AsyncSmartsheet +from smartsheet.models import Row, Cell + +async def concurrent_operations_example(): + async with AsyncSmartsheet(access_token="your_token_here") as client: + # Create rows for different sheets + row = Row() + row.to_bottom = True + row.cells = [ + Cell({'column_id': 123456789, 'value': 'Concurrent Task'}) + ] + + # Execute multiple add_rows operations concurrently + sheet_ids = [111111, 222222, 333333, 444444, 555555] + + tasks = [ + client.Sheets.add_rows(sheet_id, [row]) + for sheet_id in sheet_ids + ] + + # Wait for all operations to complete + results = await asyncio.gather(*tasks) + + # Process results + for sheet_id, result in zip(sheet_ids, results): + if result.message == 'SUCCESS': + print(f"✓ Sheet {sheet_id}: Added {len(result.data)} rows") + else: + print(f"✗ Sheet {sheet_id}: Error - {result.message}") + +asyncio.run(concurrent_operations_example()) +``` + +### Mixed Concurrent Operations + +You can also mix different types of operations: + +```python +import asyncio +from smartsheet import AsyncSmartsheet +from smartsheet.models import Row, Cell + +async def mixed_operations_example(): + async with AsyncSmartsheet(access_token="your_token_here") as client: + row = Row() + row.to_bottom = True + row.cells = [Cell({'column_id': 123456789, 'value': 'Data'})] + + # Run different operations concurrently + results = await asyncio.gather( + client.Workspaces.list_workspaces(), + client.Sheets.add_rows(111111, [row]), + client.Sheets.add_rows(222222, [row]), + client.Workspaces.list_workspaces(pagination_type='token', max_items=50) + ) + + workspaces1, add_result1, add_result2, workspaces2 = results + + print(f"Found {len(workspaces1.data)} workspaces") + print(f"Added rows to 2 sheets") + print(f"Found {len(workspaces2.data)} workspaces (token pagination)") + +asyncio.run(mixed_operations_example()) +``` + +## Error Handling + +### Default Error Handling (Return Error Objects) + +By default, the async client returns `Error` objects instead of raising exceptions: + +```python +import asyncio +from smartsheet import AsyncSmartsheet +from smartsheet.models import Error + +async def error_handling_example(): + async with AsyncSmartsheet(access_token="your_token_here") as client: + result = await client.Sheets.add_rows(999999, []) # Invalid sheet ID + + if isinstance(result, Error): + print(f"Error occurred:") + print(f" Status Code: {result.result.status_code}") + print(f" Error Code: {result.result.code}") + print(f" Message: {result.result.message}") + print(f" Should Retry: {result.result.should_retry}") + else: + print(f"Success: {result.message}") + +asyncio.run(error_handling_example()) +``` + +### Exception-Based Error Handling + +You can configure the client to raise exceptions instead: + +```python +import asyncio +from smartsheet import AsyncSmartsheet +from smartsheet.exceptions import ApiError + +async def exception_handling_example(): + async with AsyncSmartsheet(access_token="your_token_here") as client: + # Enable exception raising + client.errors_as_exceptions(True) + + try: + result = await client.Sheets.add_rows(999999, []) + except ApiError as e: + print(f"API Error: {e}") + print(f"Error details: {e.error.result.message}") + +asyncio.run(exception_handling_example()) +``` + +### Handling Rate Limits + +The async client automatically retries on rate limit errors (429): + +```python +import asyncio +from smartsheet import AsyncSmartsheet + +async def rate_limit_example(): + async with AsyncSmartsheet( + access_token="your_token_here", + max_retry_time=60 # Retry for up to 60 seconds + ) as client: + # The client will automatically retry if rate limited + result = await client.Workspaces.list_workspaces() + print(f"Found {len(result.data)} workspaces") + +asyncio.run(rate_limit_example()) +``` + +## Migration Guide from Sync to Async + +### Basic Conversion + +**Synchronous Code:** + +```python +import smartsheet + +client = smartsheet.Smartsheet(access_token="token") +workspaces = client.Workspaces.list_workspaces() +print(f"Found {len(workspaces.data)} workspaces") +``` + +**Async Code:** + +```python +import asyncio +from smartsheet import AsyncSmartsheet + +async def main(): + async with AsyncSmartsheet(access_token="token") as client: + workspaces = await client.Workspaces.list_workspaces() + print(f"Found {len(workspaces.data)} workspaces") + +asyncio.run(main()) +``` + +### Key Differences + +1. **Import**: Use `AsyncSmartsheet` instead of `Smartsheet` +2. **Async Functions**: All functions that call the API must be `async` +3. **Await Calls**: Use `await` before API method calls +4. **Context Manager**: Use `async with` instead of regular `with` +5. **Resource Cleanup**: Call `await client.aclose()` or use context manager +6. **Running**: Use `asyncio.run()` to execute async functions + +### Migration Checklist + +- [ ] Change `import smartsheet` to `from smartsheet import AsyncSmartsheet` +- [ ] Change `Smartsheet()` to `AsyncSmartsheet()` +- [ ] Add `async` keyword to function definitions +- [ ] Add `await` before all API method calls +- [ ] Use `async with` for context managers +- [ ] Call `await client.aclose()` if not using context manager +- [ ] Wrap execution in `asyncio.run()` if running from main script +- [ ] Install `httpx` dependency + +## Configuration Options + +The `AsyncSmartsheet` client accepts the same configuration options as the sync client: + +```python +from smartsheet import AsyncSmartsheet + +client = AsyncSmartsheet( + access_token="your_token_here", + max_connections=8, # Max concurrent connections (default: 8) + max_retry_time=30, # Max retry time in seconds (default: 30) + user_agent="MyApp/1.0", # Custom user agent + proxies={"https": "..."}, # Proxy configuration + api_base="https://api.smartsheet.com/2.0" # API base URL +) +``` + +## Limitations and Future Roadmap + +### Current Limitations (PoC) + +- **Limited API Coverage**: Only `add_rows` and `list_workspaces` are currently supported +- **Beta Status**: API may change based on feedback +- **Testing**: While comprehensive tests exist, real-world usage may reveal edge cases + +### Planned Future Enhancements + +Based on user feedback, we plan to add: + +1. **More Sheet Operations** + - `get_sheet()` - Retrieve sheet data + - `update_rows()` - Update existing rows + - `delete_rows()` - Delete rows + - `get_columns()` - Get column information + +2. **Additional Resources** + - Full Workspaces API support + - Folders operations + - Reports operations + - Attachments operations + +3. **Advanced Features** + - Batch operations optimization + - Connection pooling improvements + - Streaming support for large datasets + +### Providing Feedback + +We welcome feedback on the async implementation! Please: + +- Report issues on [GitHub Issues](https://github.com/smartsheet/smartsheet-python-sdk/issues) +- Share your use cases and requirements +- Suggest which operations should be prioritized for async support + +## Best Practices + +### 1. Use Context Managers + +Always use `async with` to ensure proper resource cleanup: + +```python +# Good +async with AsyncSmartsheet(access_token="token") as client: + result = await client.Workspaces.list_workspaces() + +# Avoid (unless you have a specific reason) +client = AsyncSmartsheet(access_token="token") +result = await client.Workspaces.list_workspaces() +# Easy to forget: await client.aclose() +``` + +### 2. Reuse Client Instances + +Create one client instance and reuse it across multiple operations: + +```python +# Good - reuse client +async with AsyncSmartsheet(access_token="token") as client: + result1 = await client.Workspaces.list_workspaces() + result2 = await client.Sheets.add_rows(sheet_id, rows) + +# Avoid - creating multiple clients +async with AsyncSmartsheet(access_token="token") as client1: + result1 = await client1.Workspaces.list_workspaces() + +async with AsyncSmartsheet(access_token="token") as client2: + result2 = await client2.Sheets.add_rows(sheet_id, rows) +``` + +### 3. Use Concurrent Operations Wisely + +Take advantage of async for concurrent operations, but be mindful of rate limits: + +```python +# Good - reasonable concurrency +tasks = [client.Sheets.add_rows(sid, rows) for sid in sheet_ids[:10]] +results = await asyncio.gather(*tasks) + +# Be careful - too many concurrent requests may hit rate limits +tasks = [client.Sheets.add_rows(sid, rows) for sid in sheet_ids[:1000]] +results = await asyncio.gather(*tasks) # May trigger rate limiting +``` + +### 4. Handle Errors Appropriately + +Choose the error handling style that fits your application: + +```python +# For applications that need fine-grained error handling +result = await client.Workspaces.list_workspaces() +if isinstance(result, Error): + # Handle error + pass + +# For applications that prefer exceptions +client.errors_as_exceptions(True) +try: + result = await client.Workspaces.list_workspaces() +except ApiError as e: + # Handle exception + pass +``` + +### 5. Configure Retry Behavior + +Adjust retry settings based on your application's needs: + +```python +# For time-sensitive operations +client = AsyncSmartsheet( + access_token="token", + max_retry_time=10 # Give up after 10 seconds +) + +# For batch operations that can tolerate delays +client = AsyncSmartsheet( + access_token="token", + max_retry_time=120 # Retry for up to 2 minutes +) +``` + +## Troubleshooting + +### "RuntimeError: Event loop is closed" + +This usually happens when trying to run async code in an environment that already has an event loop. Use `asyncio.run()` only in scripts, not in Jupyter notebooks or frameworks with existing event loops. + +**Solution for Jupyter/IPython:** + +```python +# Instead of asyncio.run(main()) +await main() # In Jupyter, you can await directly +``` + +### "httpx not installed" + +The async client requires httpx. Install it: + +```bash +pip install httpx +``` + +### "Session is None" errors + +Make sure you're using the client correctly: + +```python +# Wrong - session not initialized +client = AsyncSmartsheet(access_token="token") +result = await client.Workspaces.list_workspaces() # May fail + +# Correct - use context manager +async with AsyncSmartsheet(access_token="token") as client: + result = await client.Workspaces.list_workspaces() # Works +``` + +### Rate Limiting Issues + +If you're hitting rate limits frequently: + +1. Reduce concurrency +2. Increase `max_retry_time` +3. Add delays between batches of requests +4. Consider using the sync client for sequential operations + +## Additional Resources + +- [Full SDK Documentation](https://smartsheet.github.io/smartsheet-python-sdk/) +- [Smartsheet API Documentation](https://developers.smartsheet.com/api/smartsheet/) +- [Async Design Document](async-design.md) +- [GitHub Repository](https://github.com/smartsheet/smartsheet-python-sdk) +- [Example Code](../examples/async_examples.py) + +## Support + +For questions and support: + +- GitHub Issues: [smartsheet-python-sdk/issues](https://github.com/smartsheet/smartsheet-python-sdk/issues) +- Developer Community: [Smartsheet Community](https://community.smartsheet.com/categories/api-developers) +- API Documentation: [developers.smartsheet.com](https://developers.smartsheet.com/) diff --git a/examples/async_examples.py b/examples/async_examples.py new file mode 100644 index 00000000..9f30b368 --- /dev/null +++ b/examples/async_examples.py @@ -0,0 +1,570 @@ +#!/usr/bin/env python3 +""" +Smartsheet Python SDK - Async Examples + +This module demonstrates how to use the async features of the Smartsheet Python SDK. +These examples show the PoC implementation including add_rows and list_workspaces. + +Requirements: + - smartsheet-python-sdk + - httpx + +Installation: + pip install smartsheet-python-sdk httpx + +Setup: + Set your Smartsheet access token as an environment variable: + export SMARTSHEET_ACCESS_TOKEN="your_token_here" + + Or pass it directly to the AsyncSmartsheet constructor. + +Usage: + python async_examples.py +""" + +import asyncio +import os +from typing import List + +from smartsheet import AsyncSmartsheet +from smartsheet.models import Row, Cell, Error, Result, IndexResult + + +# ============================================================================= +# Example 1: Basic Async Client Usage +# ============================================================================= + +async def example_basic_usage(): + """ + Demonstrates basic async client usage with context manager. + + This is the recommended way to use the async client as it automatically + handles resource cleanup. + """ + print("\n" + "="*70) + print("Example 1: Basic Async Client Usage") + print("="*70) + + # Get access token from environment + access_token = os.environ.get('SMARTSHEET_ACCESS_TOKEN') + if not access_token: + print("⚠️ SMARTSHEET_ACCESS_TOKEN not set. Skipping example.") + return + + # Use async context manager (recommended) + async with AsyncSmartsheet(access_token=access_token) as client: + print("✓ Client initialized with context manager") + + # List workspaces + result = await client.Workspaces.list_workspaces() + + if isinstance(result, Error): + print(f"✗ Error: {result.result.message}") + else: + print(f"✓ Found {len(result.data)} workspaces") + for workspace in result.data[:5]: # Show first 5 + print(f" - {workspace.name} (ID: {workspace.id})") + + print("✓ Client automatically closed") + + +# ============================================================================= +# Example 2: Manual Resource Management +# ============================================================================= + +async def example_manual_cleanup(): + """ + Demonstrates manual resource management without context manager. + + Use this approach when you need more control over the client lifecycle. + Always remember to call aclose() when done! + """ + print("\n" + "="*70) + print("Example 2: Manual Resource Management") + print("="*70) + + access_token = os.environ.get('SMARTSHEET_ACCESS_TOKEN') + if not access_token: + print("⚠️ SMARTSHEET_ACCESS_TOKEN not set. Skipping example.") + return + + # Create client without context manager + client = AsyncSmartsheet(access_token=access_token) + print("✓ Client initialized") + + try: + result = await client.Workspaces.list_workspaces() + + if isinstance(result, Error): + print(f"✗ Error: {result.result.message}") + else: + print(f"✓ Found {len(result.data)} workspaces") + finally: + # IMPORTANT: Always close the client to release resources + await client.aclose() + print("✓ Client manually closed") + + +# ============================================================================= +# Example 3: Adding Rows to a Sheet +# ============================================================================= + +async def example_add_rows(sheet_id: int, column_ids: List[int]): + """ + Demonstrates adding rows to a sheet asynchronously. + + Args: + sheet_id: The ID of the sheet to add rows to + column_ids: List of column IDs for the cells (must have at least 3) + """ + print("\n" + "="*70) + print("Example 3: Adding Rows to a Sheet") + print("="*70) + + access_token = os.environ.get('SMARTSHEET_ACCESS_TOKEN') + if not access_token: + print("⚠️ SMARTSHEET_ACCESS_TOKEN not set. Skipping example.") + return + + async with AsyncSmartsheet(access_token=access_token) as client: + # Create first row + row1 = Row() + row1.to_bottom = True # Add to bottom of sheet + row1.cells = [ + Cell({'column_id': column_ids[0], 'value': 'Task 1'}), + Cell({'column_id': column_ids[1], 'value': 'In Progress'}), + Cell({'column_id': column_ids[2], 'value': 'High'}) + ] + + # Create second row + row2 = Row() + row2.to_bottom = True + row2.cells = [ + Cell({'column_id': column_ids[0], 'value': 'Task 2'}), + Cell({'column_id': column_ids[1], 'value': 'Not Started'}), + Cell({'column_id': column_ids[2], 'value': 'Medium'}) + ] + + print(f"Adding 2 rows to sheet {sheet_id}...") + + # Add rows asynchronously + result = await client.Sheets.add_rows(sheet_id, [row1, row2]) + + if isinstance(result, Error): + print(f"✗ Error: {result.result.message}") + print(f" Status Code: {result.result.status_code}") + print(f" Error Code: {result.result.code}") + else: + print(f"✓ Successfully added {len(result.data)} rows") + for row in result.data: + print(f" - Row ID: {row.id}, Row Number: {row.row_number}") + + +# ============================================================================= +# Example 4: Listing Workspaces with Token Pagination +# ============================================================================= + +async def example_list_workspaces_pagination(): + """ + Demonstrates listing workspaces with token-based pagination. + + Token pagination is more efficient than legacy offset-based pagination + and is the recommended approach for new code. + """ + print("\n" + "="*70) + print("Example 4: Listing Workspaces with Token Pagination") + print("="*70) + + access_token = os.environ.get('SMARTSHEET_ACCESS_TOKEN') + if not access_token: + print("⚠️ SMARTSHEET_ACCESS_TOKEN not set. Skipping example.") + return + + async with AsyncSmartsheet(access_token=access_token) as client: + # First page with token pagination + print("Fetching first page (max 100 items)...") + result = await client.Workspaces.list_workspaces( + pagination_type='token', + max_items=100 + ) + + if isinstance(result, Error): + print(f"✗ Error: {result.result.message}") + return + + print(f"✓ Found {len(result.data)} workspaces on first page") + for workspace in result.data: + print(f" - {workspace.name} (ID: {workspace.id}, Access: {workspace.access_level})") + + # Check if there are more pages + if hasattr(result, 'next_token') and result.next_token: + print(f"\n✓ More results available (next_token: {result.next_token[:20]}...)") + print("Fetching next page...") + + next_result = await client.Workspaces.list_workspaces( + pagination_type='token', + last_key=result.next_token, + max_items=100 + ) + + if not isinstance(next_result, Error): + print(f"✓ Found {len(next_result.data)} workspaces on second page") + else: + print("\n✓ No more pages available") + + +# ============================================================================= +# Example 5: Concurrent Operations +# ============================================================================= + +async def example_concurrent_operations(sheet_ids: List[int], column_ids: List[int]): + """ + Demonstrates running multiple operations concurrently. + + This is one of the main benefits of async - you can make multiple + API calls at the same time, significantly improving throughput. + + Args: + sheet_ids: List of sheet IDs to add rows to + column_ids: List of column IDs for the cells + """ + print("\n" + "="*70) + print("Example 5: Concurrent Operations") + print("="*70) + + access_token = os.environ.get('SMARTSHEET_ACCESS_TOKEN') + if not access_token: + print("⚠️ SMARTSHEET_ACCESS_TOKEN not set. Skipping example.") + return + + async with AsyncSmartsheet(access_token=access_token) as client: + # Create a row to add to multiple sheets + row = Row() + row.to_bottom = True + row.cells = [ + Cell({'column_id': column_ids[0], 'value': 'Concurrent Task'}), + Cell({'column_id': column_ids[1], 'value': 'Automated'}), + Cell({'column_id': column_ids[2], 'value': 'Low'}) + ] + + print(f"Adding rows to {len(sheet_ids)} sheets concurrently...") + + # Create tasks for concurrent execution + tasks = [ + client.Sheets.add_rows(sheet_id, [row]) + for sheet_id in sheet_ids + ] + + # Execute all tasks concurrently + import time + start_time = time.time() + results = await asyncio.gather(*tasks) + elapsed_time = time.time() - start_time + + # Process results + success_count = 0 + error_count = 0 + + for sheet_id, result in zip(sheet_ids, results): + if isinstance(result, Error): + print(f"✗ Sheet {sheet_id}: Error - {result.result.message}") + error_count += 1 + else: + print(f"✓ Sheet {sheet_id}: Added {len(result.data)} rows") + success_count += 1 + + print(f"\n✓ Completed {len(sheet_ids)} operations in {elapsed_time:.2f} seconds") + print(f" Success: {success_count}, Errors: {error_count}") + + +# ============================================================================= +# Example 6: Mixed Concurrent Operations +# ============================================================================= + +async def example_mixed_concurrent_operations(sheet_id: int, column_ids: List[int]): + """ + Demonstrates running different types of operations concurrently. + + You can mix different API calls (add_rows, list_workspaces, etc.) + and execute them all at the same time. + + Args: + sheet_id: Sheet ID for add_rows operation + column_ids: List of column IDs for the cells + """ + print("\n" + "="*70) + print("Example 6: Mixed Concurrent Operations") + print("="*70) + + access_token = os.environ.get('SMARTSHEET_ACCESS_TOKEN') + if not access_token: + print("⚠️ SMARTSHEET_ACCESS_TOKEN not set. Skipping example.") + return + + async with AsyncSmartsheet(access_token=access_token) as client: + # Create row for add_rows operation + row = Row() + row.to_bottom = True + row.cells = [ + Cell({'column_id': column_ids[0], 'value': 'Mixed Operation Task'}) + ] + + print("Running mixed operations concurrently...") + + # Execute different operations concurrently + results = await asyncio.gather( + client.Workspaces.list_workspaces(), + client.Sheets.add_rows(sheet_id, [row]), + client.Workspaces.list_workspaces(pagination_type='token', max_items=50), + ) + + workspaces1, add_result, workspaces2 = results + + # Process results + if not isinstance(workspaces1, Error): + print(f"✓ Operation 1: Found {len(workspaces1.data)} workspaces (legacy pagination)") + + if not isinstance(add_result, Error): + print(f"✓ Operation 2: Added {len(add_result.data)} rows to sheet {sheet_id}") + + if not isinstance(workspaces2, Error): + print(f"✓ Operation 3: Found {len(workspaces2.data)} workspaces (token pagination)") + + +# ============================================================================= +# Example 7: Error Handling with Error Objects +# ============================================================================= + +async def example_error_handling_objects(): + """ + Demonstrates error handling using Error objects (default behavior). + + By default, the async client returns Error objects instead of raising + exceptions. This gives you fine-grained control over error handling. + """ + print("\n" + "="*70) + print("Example 7: Error Handling with Error Objects") + print("="*70) + + access_token = os.environ.get('SMARTSHEET_ACCESS_TOKEN') + if not access_token: + print("⚠️ SMARTSHEET_ACCESS_TOKEN not set. Skipping example.") + return + + async with AsyncSmartsheet(access_token=access_token) as client: + # Try to add rows to a non-existent sheet + invalid_sheet_id = 999999999999 + + row = Row() + row.to_bottom = True + row.cells = [Cell({'column_id': 123, 'value': 'Test'})] + + print(f"Attempting to add rows to invalid sheet {invalid_sheet_id}...") + result = await client.Sheets.add_rows(invalid_sheet_id, [row]) + + # Check if result is an Error object + if isinstance(result, Error): + print(f"✓ Error detected (as expected):") + print(f" Status Code: {result.result.status_code}") + print(f" Error Code: {result.result.code}") + print(f" Message: {result.result.message}") + print(f" Should Retry: {result.result.should_retry}") + print(f" Recommendation: {result.result.recommendation}") + + if hasattr(result.result, 'ref_id') and result.result.ref_id: + print(f" Reference ID: {result.result.ref_id}") + else: + print(f"✗ Unexpected success: {result.message}") + + +# ============================================================================= +# Example 8: Error Handling with Exceptions +# ============================================================================= + +async def example_error_handling_exceptions(): + """ + Demonstrates error handling using exceptions. + + You can configure the client to raise exceptions instead of returning + Error objects by calling errors_as_exceptions(True). + """ + print("\n" + "="*70) + print("Example 8: Error Handling with Exceptions") + print("="*70) + + access_token = os.environ.get('SMARTSHEET_ACCESS_TOKEN') + if not access_token: + print("⚠️ SMARTSHEET_ACCESS_TOKEN not set. Skipping example.") + return + + async with AsyncSmartsheet(access_token=access_token) as client: + # Enable exception raising + client.errors_as_exceptions(True) + print("✓ Configured client to raise exceptions") + + # Try to add rows to a non-existent sheet + invalid_sheet_id = 999999999999 + + row = Row() + row.to_bottom = True + row.cells = [Cell({'column_id': 123, 'value': 'Test'})] + + print(f"Attempting to add rows to invalid sheet {invalid_sheet_id}...") + + try: + result = await client.Sheets.add_rows(invalid_sheet_id, [row]) + print(f"✗ Unexpected success: {result.message}") + except Exception as e: + print(f"✓ Exception caught (as expected):") + print(f" Exception Type: {type(e).__name__}") + print(f" Message: {str(e)}") + + # Access error details if available + if hasattr(e, 'error'): + print(f" Error Code: {e.error.result.code}") + print(f" Status Code: {e.error.result.status_code}") + + +# ============================================================================= +# Example 9: Async Service Integration Pattern +# ============================================================================= + +async def example_async_service_pattern(): + """ + Demonstrates the pattern for integrating with async services. + + This shows how to create a reusable client instance for async service handlers. + Note: This is a demonstration of the pattern, works with any async framework. + """ + print("\n" + "="*70) + print("Example 9: Async Service Integration Pattern") + print("="*70) + + access_token = os.environ.get('SMARTSHEET_ACCESS_TOKEN') + if not access_token: + print("⚠️ SMARTSHEET_ACCESS_TOKEN not set. Skipping example.") + return + + # In a real async service, you would create this at module level + # and reuse it across multiple handler invocations + global_client = None + + async def get_client(): + """Get or create the Smartsheet client.""" + nonlocal global_client + if global_client is None: + global_client = AsyncSmartsheet(access_token=access_token) + print("✓ Created global client instance") + return global_client + + # Simulate multiple service handler calls + print("\nSimulating async service handler call 1...") + client = await get_client() + result1 = await client.Workspaces.list_workspaces() + if not isinstance(result1, Error): + print(f"✓ Handler 1: Found {len(result1.data)} workspaces") + + print("\nSimulating async service handler call 2...") + client = await get_client() # Reuses same client + result2 = await client.Workspaces.list_workspaces() + if not isinstance(result2, Error): + print(f"✓ Handler 2: Found {len(result2.data)} workspaces") + + # Cleanup (would be done in service shutdown handler) + if global_client: + await global_client.aclose() + print("\n✓ Cleaned up global client") + + +# ============================================================================= +# Example 10: Configuration Options +# ============================================================================= + +async def example_configuration_options(): + """ + Demonstrates various configuration options for the async client. + """ + print("\n" + "="*70) + print("Example 10: Configuration Options") + print("="*70) + + access_token = os.environ.get('SMARTSHEET_ACCESS_TOKEN') + if not access_token: + print("⚠️ SMARTSHEET_ACCESS_TOKEN not set. Skipping example.") + return + + # Create client with custom configuration + async with AsyncSmartsheet( + access_token=access_token, + max_connections=10, # Increase max concurrent connections + max_retry_time=60, # Retry for up to 60 seconds + user_agent="MyApp/1.0", # Custom user agent + # proxies={"https": "..."}, # Uncomment to use proxy + ) as client: + print("✓ Client initialized with custom configuration:") + print(" - max_connections: 10") + print(" - max_retry_time: 60 seconds") + print(" - user_agent: MyApp/1.0") + + # Test the configured client + result = await client.Workspaces.list_workspaces() + + if not isinstance(result, Error): + print(f"\n✓ Successfully listed {len(result.data)} workspaces") + + +# ============================================================================= +# Main Function - Run All Examples +# ============================================================================= + +async def main(): + """ + Run all examples. + + Note: Some examples require specific sheet IDs and column IDs. + Modify the values below to match your Smartsheet environment. + """ + print("\n" + "="*70) + print("Smartsheet Python SDK - Async Examples") + print("="*70) + + # Check for access token + access_token = os.environ.get('SMARTSHEET_ACCESS_TOKEN') + if not access_token: + print("\n⚠️ WARNING: SMARTSHEET_ACCESS_TOKEN environment variable not set!") + print("Set it with: export SMARTSHEET_ACCESS_TOKEN='your_token_here'") + print("\nRunning examples in demo mode (will show errors)...\n") + + # Run basic examples (don't require specific IDs) + await example_basic_usage() + await example_manual_cleanup() + await example_list_workspaces_pagination() + await example_error_handling_objects() + await example_error_handling_exceptions() + await example_async_service_pattern() + await example_configuration_options() + + # Examples that require specific sheet/column IDs + # Uncomment and modify these to run with your actual data + + # SHEET_ID = 1234567890 # Replace with your sheet ID + # COLUMN_IDS = [111, 222, 333] # Replace with your column IDs + # + # await example_add_rows(SHEET_ID, COLUMN_IDS) + # await example_concurrent_operations([SHEET_ID], COLUMN_IDS) + # await example_mixed_concurrent_operations(SHEET_ID, COLUMN_IDS) + + print("\n" + "="*70) + print("Examples completed!") + print("="*70) + print("\nTo run examples that modify sheets, uncomment and configure") + print("the sheet ID and column ID variables in the main() function.") + print("\nFor more information, see:") + print(" - docs-source/async-quickstart.md") + print(" - https://smartsheet.github.io/smartsheet-python-sdk/") + print("="*70 + "\n") + + +if __name__ == "__main__": + # Run the async main function + asyncio.run(main()) diff --git a/pyproject.toml b/pyproject.toml index 251dd959..63d88e8c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,8 @@ dependencies = [ "requests-toolbelt", "six>=1.9", "certifi", - "python-dateutil" + "python-dateutil", + "httpx>=0.27.0" ] classifiers = [ "Development Status :: 5 - Production/Stable", @@ -41,6 +42,7 @@ test = [ "coverage", "coveralls", "pytest", + "pytest-asyncio>=0.21.0", "pytest-rerunfailures", "requests-toolbelt" ] @@ -48,6 +50,7 @@ develop = [ "coverage", "coveralls[yaml]", "pytest", + "pytest-asyncio>=0.21.0", "pytest-instafail", "pylint" ] diff --git a/smartsheet/__init__.py b/smartsheet/__init__.py index 6aae2d68..cadc6e30 100644 --- a/smartsheet/__init__.py +++ b/smartsheet/__init__.py @@ -32,6 +32,7 @@ __version__ = get_version() from .smartsheet import AbstractUserCalcBackoff, Smartsheet # NOQA +from .async_smartsheet import AsyncSmartsheet, AsyncAbstractUserCalcBackoff # NOQA from .util import fresh_operation # NOQA from . import models diff --git a/smartsheet/async_session.py b/smartsheet/async_session.py new file mode 100644 index 00000000..a8c76c34 --- /dev/null +++ b/smartsheet/async_session.py @@ -0,0 +1,140 @@ +# pylint: disable=E0401,W0221,W0613 +# Smartsheet Python SDK. +# +# Copyright 2016 Smartsheet.com, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Async session management for Smartsheet API. + +This module provides async HTTP client configuration with the same SSL/TLS +security settings as the synchronous session module. It uses httpx.AsyncClient +to provide non-blocking HTTP operations suitable for async/await patterns. +""" + +# pylint: disable=no-member +# known issue regarding ssl module and pylint. + +import ssl +from typing import Optional + +import certifi +import httpx + +_TRUSTED_CERT_FILE = certifi.where() + + +def _create_ssl_context() -> ssl.SSLContext: + """Create SSL context with secure TLS configuration. + + Configures SSL context to: + - Disable SSLv2, SSLv3, and TLSv1 (insecure protocols) + - Use system default security settings + - Verify certificates against trusted CA bundle + + Returns: + ssl.SSLContext: Configured SSL context for secure connections + """ + ctx = ssl.create_default_context() + ctx.options |= ssl.OP_NO_SSLv2 + ctx.options |= ssl.OP_NO_SSLv3 + ctx.options |= ssl.OP_NO_TLSv1 + return ctx + + +def async_pinned_session( + pool_maxsize: int = 8, + max_retries: int = 1, + timeout: Optional[float] = None +) -> httpx.AsyncClient: + """Create an async HTTP client with pinned SSL/TLS configuration. + + This function creates an httpx.AsyncClient configured with: + - Secure SSL/TLS settings (no SSLv2, SSLv3, or TLSv1) + - Connection pooling for efficient resource usage + - Automatic retry logic for transient failures + - Certificate verification against trusted CA bundle + + The client should be used as an async context manager to ensure + proper resource cleanup: + + async with async_pinned_session() as client: + response = await client.get("https://api.smartsheet.com/...") + + Or with explicit lifecycle management: + + client = async_pinned_session() + try: + response = await client.get("https://api.smartsheet.com/...") + finally: + await client.aclose() + + Args: + pool_maxsize: Maximum number of connections to pool (default: 8) + max_retries: Number of retry attempts for failed requests (default: 1) + timeout: Request timeout in seconds (default: None for no timeout) + + Returns: + httpx.AsyncClient: Configured async HTTP client with secure settings + + Example: + >>> async with async_pinned_session() as client: + ... response = await client.get("https://api.smartsheet.com/2.0/users/me") + ... print(response.status_code) + 200 + """ + # Create SSL context with secure configuration + ssl_context = _create_ssl_context() + + # Configure connection limits for pooling + limits = httpx.Limits( + max_connections=pool_maxsize, + max_keepalive_connections=pool_maxsize // 2 + ) + + # Configure retry transport + transport = httpx.AsyncHTTPTransport( + limits=limits, + verify=ssl_context, + retries=max_retries + ) + + # Create async client with configuration + client = httpx.AsyncClient( + transport=transport, + timeout=timeout, + verify=_TRUSTED_CERT_FILE, + event_hooks={ + 'response': [_redact_token_async] + } + ) + + return client + + +async def _redact_token_async(response: httpx.Response) -> None: + """Redact authorization token from request headers for security. + + This hook is called after each response to remove sensitive authorization + tokens from the request object, preventing them from appearing in logs + or debug output. + + Args: + response: The HTTP response object containing the request + """ + if "Authorization" in response.request.headers: + # Create new headers dict with redacted token + response.request.headers = httpx.Headers({ + **response.request.headers, + "Authorization": "[redacted]" + }) diff --git a/smartsheet/async_sheets.py b/smartsheet/async_sheets.py new file mode 100644 index 00000000..b8b1aa76 --- /dev/null +++ b/smartsheet/async_sheets.py @@ -0,0 +1,135 @@ +# pylint: disable=C0111,R0902,R0913,C0301,R0914 +# Smartsheet Python SDK. +# +# Copyright 2018 Smartsheet.com, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Async Sheets API operations. + +This module provides async versions of Sheets API operations using async/await +patterns. Currently implements a proof-of-concept with the add_rows method. + +Example: + Adding rows to a sheet asynchronously:: + + import asyncio + from smartsheet import AsyncSmartsheet + from smartsheet.models import Row, Cell + + async def main(): + async with AsyncSmartsheet(access_token="token") as client: + # Create rows to add + row1 = Row() + row1.to_bottom = True + row1.cells = [ + Cell({'column_id': 123, 'value': 'New Value 1'}), + Cell({'column_id': 456, 'value': 'New Value 2'}) + ] + + # Add rows asynchronously + result = await client.Sheets.add_rows(sheet_id, [row1]) + print(f"Added {len(result.data)} rows") + + asyncio.run(main()) +""" + +from __future__ import absolute_import + +import logging +from typing import Union, List + +from .models import Error, Result, Row +from .types import TypedList +from .util import fresh_operation +from .operations.sheets_operations import SheetsOperations + + +class AsyncSheets: + """Async class for handling Sheets operations. + + This class provides async methods for interacting with the Smartsheet Sheets API. + Currently implements a proof-of-concept with the add_rows method. Additional + methods will be added in future iterations. + + Attributes: + _base: Reference to the parent AsyncSmartsheet client + _log: Logger instance for this class + """ + + def __init__(self, smartsheet_obj): + """Init AsyncSheets with base AsyncSmartsheet object. + + Args: + smartsheet_obj: The parent AsyncSmartsheet client instance + """ + self._base = smartsheet_obj + self._log = logging.getLogger(__name__) + + async def add_rows(self, sheet_id: int, list_of_rows) -> Union[Result[Union[Row, List[Row]]], Error]: + """Insert one or more Rows into the specified Sheet asynchronously. + + If multiple rows are specified in the request, all rows + must be inserted at the same location (i.e. the **toTop**, + **toBottom**, **parentId**, **siblingId**, and **above** attributes + must be the same for all rows in the request.) + + In a parent row, values of the following fields will be + auto-calculated based upon values in the child rows (and therefore + cannot be updated using the API): Start Date, End Date, Duration, % + Complete. + + Args: + sheet_id: Sheet ID + list_of_rows: An array of Row objects with the following attributes: + + One or more location-specifier attributes (optional) + + format (optional) + + expanded (optional) + + locked (optional) + + A cells attribute set to an array of Cell objects. + To insert an empty row, set the cells attribute to empty or null. + Each Cell object may contain the following attributes: + + columnId (required) + + value (required) + + strict (optional) + + format (optional) + + hyperlink (optional) + + Returns: + Union[Result[Union[Row, List[Row]]], Error]: The result of the operation - + either a list or a single object, or an Error object if the request fails. + + Example: + >>> async with AsyncSmartsheet(access_token="token") as client: + ... row = Row() + ... row.to_bottom = True + ... row.cells = [Cell({'column_id': 123, 'value': 'Test'})] + ... result = await client.Sheets.add_rows(sheet_id, [row]) + ... print(f"Added {len(result.data)} rows") + """ + _op, expected = SheetsOperations.build_add_rows(sheet_id, list_of_rows) + + prepped_request = self._base.prepare_request(_op) + response = await self._base.request(prepped_request, expected, _op) + + return response diff --git a/smartsheet/async_smartsheet.py b/smartsheet/async_smartsheet.py new file mode 100644 index 00000000..c79d854c --- /dev/null +++ b/smartsheet/async_smartsheet.py @@ -0,0 +1,693 @@ +# pylint: disable=C0103,C0111,R0902,R0913,W0614,C0302,W0401,R0912,W0611,C0301,W0621,W0404,R1720,W0702,W0613 +# Smartsheet Python SDK. +# +# Copyright 2016 Smartsheet.com, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Async client for Smartsheet API. + +This module provides an async version of the Smartsheet client that uses +httpx.AsyncClient for non-blocking HTTP operations. It mirrors the synchronous +client API but uses async/await patterns. + +Example: + Basic usage with context manager:: + + import asyncio + from smartsheet import AsyncSmartsheet + + async def main(): + async with AsyncSmartsheet(access_token="your_token") as client: + workspaces = await client.Workspaces.list_workspaces() + print(f"Found {len(workspaces.data)} workspaces") + + asyncio.run(main()) + + Manual lifecycle management:: + + async def main(): + client = AsyncSmartsheet(access_token="your_token") + try: + workspaces = await client.Workspaces.list_workspaces() + print(f"Found {len(workspaces.data)} workspaces") + finally: + await client.aclose() + + asyncio.run(main()) +""" + +from __future__ import absolute_import, annotations + +import asyncio +import importlib +import inspect +import json +import logging +import logging.config +import os +import random +import re +import sys +import time +from typing import Optional + +import httpx +import six + +from . import __api_base__, __version__, models +from .async_session import async_pinned_session +from .exceptions import ApiError, HttpError, UnexpectedRequestError +from .models import Error, ErrorResult +from .util import is_multipart, serialize + +__all__ = ("AsyncSmartsheet", "AsyncAbstractUserCalcBackoff") + + +class AsyncAbstractUserCalcBackoff: + """Abstract base class for async backoff calculation.""" + + async def calc_backoff(self, previous_attempts, total_elapsed_time, error_result): + """Calculate backoff time for retry attempts. + + Args: + previous_attempts (int): Number of previous retry attempts + total_elapsed_time (float): Total elapsed time in seconds + error_result (ErrorResult): Error result from previous attempt + + Returns: + float: Backoff time in seconds (negative to stop retrying) + """ + raise NotImplementedError( + f"Class {self.__class__.__name__} doesn't implement calc_backoff()" + ) + + +class AsyncDefaultCalcBackoff(AsyncAbstractUserCalcBackoff): + """Default async backoff calculator.""" + + def __init__(self, max_retry_time): + self._max_retry_time = max_retry_time + + async def calc_backoff(self, previous_attempts, total_elapsed_time, error_result): + """Default back off calculator on retry. + + Args: + previous_attempts (int): Number of previous retry attempts + total_elapsed_time (float): Elapsed time in seconds + error_result (ErrorResult): ErrorResult object for previous API attempt + + Returns: + float: Back off time in seconds (any negative number will drop out of retry loop) + """ + # Use exponential backoff + backoff = (2**previous_attempts) + random.random() + + if (total_elapsed_time + backoff) > self._max_retry_time: + return -1 + + return backoff + + +class AsyncSmartsheet: + """Async client for making requests to the Smartsheet API. + + This class provides an async interface to the Smartsheet API using httpx.AsyncClient + for non-blocking HTTP operations. It mirrors the synchronous Smartsheet client API + but uses async/await patterns throughout. + + The client should be used as an async context manager to ensure proper resource + cleanup, or you must manually call aclose() when done. + + Attributes: + Sheets: Async Sheets API operations + Workspaces: Async Workspaces API operations + models: Reference to smartsheet.models module + raise_exceptions: Whether to raise exceptions on API errors (default: False) + + Example: + >>> async with AsyncSmartsheet(access_token="token") as client: + ... result = await client.Sheets.add_rows(sheet_id, rows) + ... print(f"Added {len(result.data)} rows") + """ + + models = models + + def __init__( + self, + access_token: Optional[str] = None, + max_connections: int = 8, + user_agent: Optional[str] = None, + max_retry_time: int = 30, + proxies: Optional[dict] = None, + api_base: str = __api_base__, + ): + """Initialize async Smartsheet client. + + Args: + access_token: Access Token for making client requests. May also be set + as an env variable in SMARTSHEET_ACCESS_TOKEN. (required) + max_connections: Maximum connection pool size. + max_retry_time: User provided maximum elapsed time for retry attempts. + user_agent: The user agent to use when making requests. This helps us + identify requests coming from your application. We recommend you use + the format "AppName/Version". If set, we append + "/SmartsheetPythonSDK/__version__" to the user_agent. + proxies: Proxy configuration dict. See httpx documentation for details. + api_base: Base URL for API requests (default: https://api.smartsheet.com/2.0) + + Raises: + ValueError: If access_token is not provided and not set in environment + """ + self.raise_exceptions = False + + if access_token: + self._access_token = access_token + else: + self._access_token = os.environ.get("SMARTSHEET_ACCESS_TOKEN", None) + + if self._access_token is None: + raise ValueError( + "Access Token must be set in the environment " + "or passed to smartsheet.AsyncSmartsheet() " + "as a parameter." + ) + + if isinstance(max_retry_time, AsyncAbstractUserCalcBackoff): + self._user_calc_backoff = max_retry_time + else: + self._user_calc_backoff = AsyncDefaultCalcBackoff(max_retry_time) + + self._session: Optional[httpx.AsyncClient] = None + self._max_connections = max_connections + self._proxies = proxies + + base_user_agent = "SmartsheetPythonSDK/" + __version__ + if user_agent: + self._user_agent = f"{base_user_agent}/{user_agent}" + else: + caller = "__unknown__" + stack = inspect.stack() + module = inspect.getmodule(stack[-1][0]) + if module is not None: + caller = inspect.getmodule(stack[-1][0]).__name__ + self._user_agent = f"{base_user_agent}/{caller}" + + self._log = logging.getLogger(__name__) + self._url = "" + self._api_base = api_base + self._assume_user = None + self._test_scenario_name = None + self._wiremock_test_name = None + self._wiremock_request_id = None + self._change_agent = None + self._api_modules_cache = {} + + async def __aenter__(self): + """Async context manager entry.""" + await self._ensure_session() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + """Async context manager exit.""" + await self.aclose() + return False + + async def _ensure_session(self): + """Ensure HTTP session is initialized.""" + if self._session is None: + self._session = async_pinned_session(pool_maxsize=self._max_connections) + if self._proxies: + # httpx uses 'proxies' parameter differently than requests + # We'll need to recreate the client with proxies + await self._session.aclose() + self._session = httpx.AsyncClient( + proxies=self._proxies, + limits=httpx.Limits( + max_connections=self._max_connections, + max_keepalive_connections=self._max_connections // 2 + ) + ) + + async def aclose(self): + """Close the async HTTP session and release resources. + + This method should be called when you're done using the client if you're + not using it as an async context manager. + + Example: + >>> client = AsyncSmartsheet(access_token="token") + >>> try: + ... result = await client.Sheets.add_rows(sheet_id, rows) + ... finally: + ... await client.aclose() + """ + if self._session is not None: + await self._session.aclose() + self._session = None + + def assume_user(self, email: Optional[str] = None): + """Assume identity of specified user. + + As an administrator, you can assume the identity of any user + in your organization. + + Args: + email: Valid email address of user whose identity should be assumed. + Pass None to clear the assumed user. + """ + if email is None: + self._assume_user = None + else: + self._assume_user = six.moves.urllib.parse.quote(email) + + def errors_as_exceptions(self, preference: bool = True): + """Set preference on whether or not to raise exceptions on API errors. + + When preference is True, exceptions will be raised. When False, + instances of the Error data type will be returned. + + The property `raise_exceptions` defaults to False. Therefore, this + method should only be called if exceptions *should* be raised. + + Args: + preference: Flag indicating whether errors should be raised as exceptions. + """ + self.raise_exceptions = preference + + def as_test_scenario(self, name: str): + """Identify requests made with this client as a test scenario. + + Args: + name: The name of the test scenario. + """ + self._test_scenario_name = name + + def with_wiremock_test_case(self, test_name: str, request_id: str): + """Configure client with x-test-name and x-request-id headers. + + Used for wiremock test cases. + + Args: + test_name: The name of the wiremock test case. + request_id: The unique request ID for this test scenario. + """ + self._wiremock_test_name = test_name + self._wiremock_request_id = request_id + + def with_change_agent(self, change_agent: str): + """Request headers will contain the 'Smartsheet-Change-Agent' header value. + + Args: + change_agent: The name of this change agent + """ + self._change_agent = change_agent + + async def request(self, prepped_request, expected, operation): + """Make a request from the Smartsheet API. + + Make a request from the Smartsheet API and validate that inputs + and outputs are as expected. The API response is converted from + raw wire messages to a native objects based on the value of `expected`. + + Args: + prepped_request: Prepared httpx.Request for the operation. + expected: The expected response data type. + operation: Dictionary containing operation details + + Returns: + The API operation result object. + """ + res = await self.request_with_retry(prepped_request, operation) + native = res.native(expected) + + if not self.raise_exceptions: + return native + + if isinstance(native, self.models.Error): + the_ex = getattr(sys.modules[__name__], native.result.name) + raise the_ex(native, str(native.result.code) + ": " + native.result.message) + else: + return native + + def _log_request(self, operation, response): + """Wrapper for request/response logger. + + Args: + operation: Operation dictionary + response: httpx.Response object + """ + # request + self._log.info( + '{"request": {"command": "%s %s"}}', + response.request.method, + response.request.url, + ) + if response.request.content is not None: + body_dumps = f'"<< {response.request.headers.get("Content-Type")} content type suppressed >>"' + if response.request.headers.get("Content-Type") is not None and "application/json" in response.request.headers.get("Content-Type"): + body = response.request.content.decode("utf8") + body_dumps = json.dumps(json.loads(body), sort_keys=True) + self._log.debug('{"requestBody": %s}', body_dumps) + + # response + content_dumps = f'"<< {response.headers.get("Content-Type")} content type suppressed >>"' + if response.headers.get("Content-Type") is not None and "application/json" in response.headers.get("Content-Type"): + content = response.content.decode("utf8") + content_dumps = json.dumps(json.loads(content), sort_keys=True) + + if 200 <= response.status_code <= 299: + if operation["dl_path"] is None: + self._log.debug( + '{"response": {"statusCode": %d, "reason": "%s", "content": %s}}', + response.status_code, + response.reason_phrase, + content_dumps, + ) + else: + self._log.debug( + '{"response": {"statusCode": %d, "reason": "%s"}}', + response.status_code, + response.reason_phrase, + ) + else: + self._log.error( + '{"response": {"statusCode": %d, "reason": "%s", "content": %s}}', + response.status_code, + response.reason_phrase, + content_dumps, + ) + + async def _request(self, prepped_request, operation): + """Wrapper for the low-level Request action. + + Only low-level error handling. + + Args: + prepped_request: Prepared httpx.Request for the operation. + operation: Operation dictionary + + Returns: + Operation Result object. + """ + await self._ensure_session() + + try: + res = await self._session.send(prepped_request) + self._log_request(operation, res) + except httpx.HTTPError as rex: + raise UnexpectedRequestError(prepped_request, None) from rex + + if 200 <= res.status_code <= 299: + return AsyncOperationResult(res.text, res, self, operation) + else: + return AsyncOperationErrorResult(res.text, res) + + async def request_with_retry(self, prepped_request, operation): + """Perform the request with retry. + + Args: + prepped_request: A prepared httpx.Request object for the operation. + operation: Dictionary containing operation details + + Returns: + Operation Result object. + """ + attempt = 0 + start_time = time.time() + # Make a copy of the request as the access token will be redacted on response prior to logging + pre_redact_request = prepped_request + + while True: + result = await self._request(prepped_request, operation) + if isinstance(result, AsyncOperationErrorResult): + native = result.native("Error") + if native.result.should_retry: + attempt += 1 + elapsed_time = time.time() - start_time + backoff = await self._user_calc_backoff.calc_backoff( + attempt, elapsed_time, native.result + ) + if backoff < 0: + break + self._log.info( + "HttpError status_code=%s: Retrying in %.1f seconds", + native.result.status_code, + backoff, + ) + await asyncio.sleep(backoff) + # restore un-redacted request prior to retry + prepped_request = pre_redact_request + else: + break + else: + break + return result + + def prepare_request(self, _op): + """Generate a prepared httpx.Request object.""" + if _op["header_params"]: + _op["headers"].update(_op["header_params"]) + + if _op["path_params"]: + for key, val in six.iteritems(_op["path_params"]): + _op["path"] = _op["path"].replace("{" + key + "}", str(val)) + + if _op["json"]: + _op["json"] = serialize(_op["json"]) + + if _op["query_params"]: + for key, val in six.iteritems(_op["query_params"]): + if isinstance(val, list): + val = ",".join([str(num) for num in val]) + _op["query_params"][key] = val + + # Build request + req = httpx.Request( + _op["method"], + self._api_base + _op["path"], + headers=_op["headers"], + params=_op["query_params"], + json=_op["json"] if _op["json"] else None, + data=_op["form_data"] if _op["form_data"] else None, + ) + + req.headers.update({"User-Agent": self._user_agent}) + if _op["auth_settings"] is not None: + auth_header_val = "Bearer " + self._access_token + req.headers.update({"Authorization": auth_header_val}) + + if self._assume_user is not None: + req.headers.update({"Assume-User": self._assume_user}) + else: + req.headers.pop("Assume-User", None) + + if self._test_scenario_name is not None: + req.headers.update({"Api-Scenario": self._test_scenario_name}) + else: + req.headers.pop("Api-Scenario", None) + + if self._wiremock_test_name is not None and self._wiremock_request_id is not None: + req.headers["X-Test-Name"] = self._wiremock_test_name + req.headers["X-Request-ID"] = self._wiremock_request_id + + if self._change_agent is not None: + req.headers.update({"Smartsheet-Change-Agent": self._change_agent}) + else: + req.headers.pop("Smartsheet-Change-Agent", None) + + return req + + def __getattr__(self, name): + """Handle sub-class instantiation. + + Args: + name: Name of smartsheet resource class to instantiate. + + Returns: + Instance of named class. + """ + # Check if module is already cached + if name in self._api_modules_cache: + return self._api_modules_cache[name] + + try: + # Try async API class first + class_ = getattr( + importlib.import_module(__package__ + ".async_" + name.lower()), "Async" + name + ) + instance = class_(self) + # Cache the instance + self._api_modules_cache[name] = instance + return instance + except (ImportError, AttributeError): + self._log.error( + "ImportError! Could not load async api class %s", name + ) + raise AttributeError(f"AsyncSmartsheet has no attribute '{name}'") + + +class AsyncOperationResult: + """The successful result of a call to an async operation.""" + + def __init__(self, op_result, resp=None, base_obj=None, operation=None): + """Initialize AsyncOperationResult. + + Args: + op_result: The result of an operation not including the binary + payload portion, if one exists. Must be a JSON string. + resp: A raw httpx.Response object. + base_obj: Configured core object for subsequent convenience + method requests. + operation: Operation dictionary + """ + assert isinstance( + op_result, str + ), f"op_result: expected string, got {type(op_result)!r}" + if resp is not None: + assert isinstance( + resp, httpx.Response + ), f"resp: expected httpx.Response, got {type(resp)!r}" + self._base = base_obj + self.op_result = op_result + self.resp = resp + self.dynamic_data_types = [] + self.operation = operation + + def native(self, expected): + """Initialize expected result object and return it. + + Args: + expected: Expected objects to return. + + Returns: + Operation Result object or Operation Error Result object. + """ + try: + data = self.resp.json() + except ValueError: + return AsyncOperationErrorResult(self.op_result, self.resp) + + if isinstance(expected, list): + klass = expected[0] + dynamic_type = expected[1] + class_ = getattr(importlib.import_module("smartsheet.models"), klass) + obj = class_(data, dynamic_type, self._base) + if hasattr(obj, "request_response"): + obj.request_response = self.resp + + return obj + + class_ = getattr(importlib.import_module("smartsheet.models"), expected) + + obj = class_(data, self._base) + if hasattr(obj, "request_response"): + obj.request_response = self.resp + + return obj + + +class AsyncOperationErrorResult: + """The error result of a call to an async operation.""" + + error_lookup = { + 0: { + "name": "ApiError", + "recommendation": "Do not retry without fixing the problem. ", + "should_retry": False, + }, + 4001: { + "name": "SystemMaintenanceError", + "recommendation": ( + "Retry using exponential backoff. Hint: " + "Wait time between retries should measure " + "in minutes (not seconds)." + ), + "should_retry": True, + }, + 4002: { + "name": "ServerTimeoutExceededError", + "recommendation": "Retry using exponential backoff.", + "should_retry": True, + }, + 4003: { + "name": "RateLimitExceededError", + "recommendation": ( + "Retry using exponential backoff. Hint: " + "Reduce the rate at which you are sending " + "requests." + ), + "should_retry": True, + }, + 4004: { + "name": "UnexpectedErrorShouldRetryError", + "recommendation": "Retry using exponential backoff.", + "should_retry": True, + }, + } + + def __init__(self, op_result, resp): + """Initialize AsyncOperationErrorResult. + + Args: + op_result: The result of an operation not including the + binary payload portion, if one exists. + resp: A raw httpx.Response object. + """ + self.op_result = op_result + self.resp = resp + self._log = logging.getLogger(__name__) + + def native(self, expected): + """Sadly, we won't be returning what was expected. + + Args: + expected: Dashed expectations + """ + # look up name of the error + error_payload = {} + try: + error_payload = self.resp.json() + except json.JSONDecodeError: + # Do not fail if the response is not JSON + pass + error_code = error_payload.get("errorCode", 0) + try: + error_name = AsyncOperationErrorResult.error_lookup[error_code]["name"] + recommendation = AsyncOperationErrorResult.error_lookup[error_code][ + "recommendation" + ] + should_retry = AsyncOperationErrorResult.error_lookup[error_code]["should_retry"] + except: + # If error_code is present in the response but not in the lookup, default to ApiError + error_name = AsyncOperationErrorResult.error_lookup[0]["name"] + recommendation = AsyncOperationErrorResult.error_lookup[0]["recommendation"] + should_retry = AsyncOperationErrorResult.error_lookup[0]["should_retry"] + + obj = Error( + { + "result": ErrorResult( + { + "name": error_name, + "status_code": self.resp.status_code, + "code": error_code, + "message": error_payload.get("message"), + "ref_id": error_payload.get("refId"), + "recommendation": recommendation, + "should_retry": should_retry, + } + ), + "request_response": self.resp, + } + ) + return obj diff --git a/smartsheet/async_workspaces.py b/smartsheet/async_workspaces.py new file mode 100644 index 00000000..a631ef36 --- /dev/null +++ b/smartsheet/async_workspaces.py @@ -0,0 +1,152 @@ +# pylint: disable=C0111,R0902,R0913 +# Smartsheet Python SDK. +# +# Copyright 2018 Smartsheet.com, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Async Workspaces API operations. + +This module provides async versions of Workspaces API operations using async/await +patterns. Currently implements a proof-of-concept with the list_workspaces method. + +Example: + Listing workspaces asynchronously:: + + import asyncio + from smartsheet import AsyncSmartsheet + + async def main(): + async with AsyncSmartsheet(access_token="token") as client: + # List workspaces with token pagination + result = await client.Workspaces.list_workspaces( + pagination_type='token', + max_items=100 + ) + + print(f"Found {len(result.data)} workspaces") + for workspace in result.data: + print(f" - {workspace.name}") + + # Check if there are more results + if hasattr(result, 'next_token') and result.next_token: + print(f"More results available, next_token: {result.next_token}") + + asyncio.run(main()) +""" + +from __future__ import absolute_import + +import logging +import warnings +from typing import Union, Optional + +from .models import Error, IndexResult, Workspace +from .util import fresh_operation +from .operations.workspaces_operations import WorkspacesOperations + + +class AsyncWorkspaces: + """Async class for handling Workspaces operations. + + This class provides async methods for interacting with the Smartsheet Workspaces API. + Currently implements a proof-of-concept with the list_workspaces method. Additional + methods will be added in future iterations. + + Attributes: + _base: Reference to the parent AsyncSmartsheet client + _log: Logger instance for this class + """ + + def __init__(self, smartsheet_obj): + """Init AsyncWorkspaces with base AsyncSmartsheet object. + + Args: + smartsheet_obj: The parent AsyncSmartsheet client instance + """ + self._base = smartsheet_obj + self._log = logging.getLogger(__name__) + + async def list_workspaces( + self, + page_size: Optional[int] = None, + page: Optional[int] = None, + include_all: Optional[bool] = None, + last_key: Optional[str] = None, + max_items: Optional[int] = None, + pagination_type: Optional[str] = None + ) -> Union[IndexResult[Workspace], Error]: + """Get the list of Workspaces the authenticated User may access asynchronously. + + Args: + page_size: [DEPRECATED] The maximum number of items to + return per page. Use pagination_type='token' with max_items instead. + page: [DEPRECATED] Which page to return. + Use pagination_type='token' with last_key instead. + include_all: [DEPRECATED] If true, include all results + (i.e. do not paginate). Use pagination_type='token' instead. + last_key: Pagination cursor for next page (token pagination only). + max_items: Maximum items per page (token pagination only). + Must be a positive integer. + pagination_type: Use 'token' for efficient cursor-based pagination. + Defaults to legacy offset-based pagination if not specified. + + Returns: + Union[IndexResult[Workspace], Error]: The result of the operation, or an + Error object if the request fails. When using legacy pagination, contains + paginated results with total_count, total_pages, etc. + + Raises: + ValueError: If pagination_type is not 'token' or None, or if max_items <= 0 + when using token pagination. + + Example: + Using token-based pagination (recommended):: + + >>> async with AsyncSmartsheet(access_token="token") as client: + ... result = await client.Workspaces.list_workspaces( + ... pagination_type='token', + ... max_items=100 + ... ) + ... print(f"Found {len(result.data)} workspaces") + ... + ... # Get next page if available + ... if hasattr(result, 'next_token') and result.next_token: + ... next_result = await client.Workspaces.list_workspaces( + ... pagination_type='token', + ... last_key=result.next_token, + ... max_items=100 + ... ) + + Using legacy pagination (deprecated):: + + >>> async with AsyncSmartsheet(access_token="token") as client: + ... result = await client.Workspaces.list_workspaces( + ... page_size=50, + ... page=1 + ... ) + ... print(f"Found {len(result.data)} workspaces") + """ + _op, expected = WorkspacesOperations.build_list_workspaces( + page_size=page_size, + page=page, + include_all=include_all, + last_key=last_key, + max_items=max_items, + pagination_type=pagination_type + ) + + prepped_request = self._base.prepare_request(_op) + response = await self._base.request(prepped_request, expected, _op) + + return response diff --git a/smartsheet/operations/__init__.py b/smartsheet/operations/__init__.py new file mode 100644 index 00000000..096c7cc1 --- /dev/null +++ b/smartsheet/operations/__init__.py @@ -0,0 +1,60 @@ +# pylint: disable=C0114 +# Smartsheet Python SDK. +# +# Copyright 2016 Smartsheet.com, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Shared operation builders for Smartsheet API. + +This package contains shared operation builder classes that prepare API +operation dictionaries for both synchronous and asynchronous API modules. +By extracting the operation preparation logic into shared builders, we +minimize code duplication between sync and async implementations. + +The operation builders handle: +- Building operation dictionaries with method, path, and parameters +- Parameter validation and transformation +- Query parameter configuration +- Request body preparation + +These builders are used by both sync API modules (sheets.py, users.py, etc.) +and async API modules (async_sheets.py, async_users.py, etc.) to ensure +consistent behavior and reduce maintenance burden. + +Example: + >>> from smartsheet.operations import SheetsOperations, WorkspacesOperations + >>> + >>> # Build an add_rows operation + >>> operation, expected = SheetsOperations.build_add_rows(123, rows) + >>> print(operation["method"]) + 'POST' + >>> print(operation["path"]) + '/sheets/123/rows' + >>> + >>> # Build a list_workspaces operation + >>> operation, expected = WorkspacesOperations.build_list_workspaces( + ... pagination_type='token', + ... max_items=100 + ... ) + >>> print(operation["method"]) + 'GET' +""" + +from .sheets_operations import SheetsOperations +from .workspaces_operations import WorkspacesOperations + +__all__ = [ + 'SheetsOperations', + 'WorkspacesOperations', +] diff --git a/smartsheet/operations/sheets_operations.py b/smartsheet/operations/sheets_operations.py new file mode 100644 index 00000000..7a0e20c5 --- /dev/null +++ b/smartsheet/operations/sheets_operations.py @@ -0,0 +1,112 @@ +# pylint: disable=C0111,R0902,R0913 +# Smartsheet Python SDK. +# +# Copyright 2018 Smartsheet.com, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Shared operation builders for Sheets API. + +This module provides shared operation builder functions for Sheets API operations. +These builders create operation dictionaries that can be used by both synchronous +and asynchronous API implementations, eliminating code duplication. + +The operation builders are pure functions that: +- Take operation parameters as input +- Return a tuple of (operation_dict, expected_types) +- Do not perform HTTP requests or have side effects +- Handle parameter validation and transformation + +Example: + >>> from smartsheet.operations.sheets_operations import SheetsOperations + >>> from smartsheet.types import TypedList + >>> from smartsheet.models import Row + >>> + >>> # Prepare rows + >>> rows = TypedList(Row) + >>> rows.append(Row({'cells': [...]})) + >>> + >>> # Build operation + >>> operation, expected = SheetsOperations.build_add_rows(123, rows) + >>> print(operation["method"]) + 'POST' + >>> print(operation["path"]) + '/sheets/123/rows' +""" + +from __future__ import absolute_import + +from typing import Tuple, Dict, Any, List, Union + +from ..models import Row +from ..types import TypedList +from ..util import fresh_operation + + +class SheetsOperations: + """Shared operation builders for Sheets API. + + This class provides static methods that build operation dictionaries for + Sheets API operations. These methods are used by both sync and async + implementations to ensure consistent behavior. + + All methods are static and pure functions - they don't modify state or + perform HTTP requests. They simply prepare operation dictionaries that + can be passed to HTTP clients. + """ + + @staticmethod + def build_add_rows( + sheet_id: int, + list_of_rows: Union[List[Row], TypedList] + ) -> Tuple[Dict[str, Any], List[str]]: + """Build operation for adding rows to a sheet. + + This method prepares the operation dictionary for inserting one or more + rows into a sheet. It handles normalization of single row inputs into + a list format. + + Args: + sheet_id: The ID of the sheet to add rows to + list_of_rows: A list of Row objects or a single Row object/dict. + If a single row is provided, it will be wrapped in a TypedList. + + Returns: + A tuple containing: + - operation: Dict with method, path, json, and query_params + - expected: List of expected response types ["Result", "Row"] + + Example: + >>> from smartsheet.models import Row + >>> from smartsheet.types import TypedList + >>> + >>> rows = TypedList(Row) + >>> rows.append(Row({'to_bottom': True, 'cells': [...]})) + >>> + >>> operation, expected = SheetsOperations.build_add_rows(123, rows) + >>> # operation can now be passed to prepare_request and request + """ + # Normalize single row to list + if isinstance(list_of_rows, (dict, Row)): + arg_value = list_of_rows + list_of_rows = TypedList(Row) + list_of_rows.append(arg_value) + + _op = fresh_operation("add_rows") + _op["method"] = "POST" + _op["path"] = "/sheets/" + str(sheet_id) + "/rows" + _op["json"] = list_of_rows + + expected = ["Result", "Row"] + + return _op, expected diff --git a/smartsheet/operations/workspaces_operations.py b/smartsheet/operations/workspaces_operations.py new file mode 100644 index 00000000..df29b8c4 --- /dev/null +++ b/smartsheet/operations/workspaces_operations.py @@ -0,0 +1,161 @@ +# pylint: disable=C0111,R0902,R0913 +# Smartsheet Python SDK. +# +# Copyright 2018 Smartsheet.com, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Shared operation builders for Workspaces API. + +This module provides shared operation builder functions for Workspaces API operations. +These builders create operation dictionaries that can be used by both synchronous +and asynchronous API implementations, eliminating code duplication. + +The operation builders are pure functions that: +- Take operation parameters as input +- Return a tuple of (operation_dict, expected_types) +- Do not perform HTTP requests or have side effects +- Handle parameter validation and transformation + +Example: + >>> from smartsheet.operations.workspaces_operations import WorkspacesOperations + >>> + >>> # Build operation for listing workspaces with token pagination + >>> operation, expected = WorkspacesOperations.build_list_workspaces( + ... pagination_type='token', + ... max_items=100 + ... ) + >>> print(operation["method"]) + 'GET' + >>> print(operation["path"]) + '/workspaces' +""" + +from __future__ import absolute_import + +import warnings +from typing import Tuple, Dict, Any, List, Optional + +from ..util import fresh_operation + + +class WorkspacesOperations: + """Shared operation builders for Workspaces API. + + This class provides static methods that build operation dictionaries for + Workspaces API operations. These methods are used by both sync and async + implementations to ensure consistent behavior. + + All methods are static and pure functions - they don't modify state or + perform HTTP requests. They simply prepare operation dictionaries that + can be passed to HTTP clients. + """ + + @staticmethod + def build_list_workspaces( + page_size: Optional[int] = None, + page: Optional[int] = None, + include_all: Optional[bool] = None, + last_key: Optional[str] = None, + max_items: Optional[int] = None, + pagination_type: Optional[str] = None + ) -> Tuple[Dict[str, Any], List[str]]: + """Build operation for listing workspaces. + + This method prepares the operation dictionary for retrieving the list + of workspaces the authenticated user may access. It supports both + legacy offset-based pagination and modern token-based pagination. + + Args: + page_size: [DEPRECATED] The maximum number of items to return per page. + Use pagination_type='token' with max_items instead. + page: [DEPRECATED] Which page to return. + Use pagination_type='token' with last_key instead. + include_all: [DEPRECATED] If true, include all results (i.e. do not paginate). + Use pagination_type='token' instead. + last_key: Pagination cursor for next page (token pagination only). + max_items: Maximum items per page (token pagination only). + Must be a positive integer. + pagination_type: Use 'token' for efficient cursor-based pagination. + Defaults to legacy offset-based pagination if not specified. + + Returns: + A tuple containing: + - operation: Dict with method, path, and query_params + - expected: List of expected response types ["IndexResult", "Workspace"] + + Raises: + ValueError: If pagination_type is not 'token' or None, or if max_items <= 0 + when using token pagination. + + Example: + Token-based pagination (recommended):: + + >>> operation, expected = WorkspacesOperations.build_list_workspaces( + ... pagination_type='token', + ... max_items=100 + ... ) + >>> print(operation["query_params"]["paginationType"]) + 'token' + + Legacy pagination (deprecated):: + + >>> operation, expected = WorkspacesOperations.build_list_workspaces( + ... page_size=50, + ... page=1 + ... ) + >>> print(operation["query_params"]["pageSize"]) + 50 + """ + # Parameter validation + if pagination_type is not None and pagination_type not in ['token']: + raise ValueError("pagination_type must be 'token' or None") + if pagination_type == 'token' and max_items is not None and max_items <= 0: + raise ValueError("max_items must be a positive integer") + + _op = fresh_operation("list_workspaces") + _op["method"] = "GET" + _op["path"] = "/workspaces" + + # Issue deprecation warnings for old parameters when used + if page_size is not None: + warnings.warn( + "page_size parameter is deprecated. Use pagination_type='token' with max_items instead.", + DeprecationWarning, + stacklevel=3 # Adjusted for call through sync/async wrapper + ) + if page is not None: + warnings.warn( + "page parameter is deprecated. Use pagination_type='token' with last_key instead.", + DeprecationWarning, + stacklevel=3 + ) + if include_all is not None: + warnings.warn( + "include_all parameter is deprecated. Use pagination_type='token' instead.", + DeprecationWarning, + stacklevel=3 + ) + + if pagination_type == "token": + _op["query_params"]["lastKey"] = last_key + _op["query_params"]["maxItems"] = max_items + _op["query_params"]["paginationType"] = pagination_type + else: + _op["query_params"]["pageSize"] = page_size + _op["query_params"]["page"] = page + _op["query_params"]["includeAll"] = include_all + + expected = ["IndexResult", "Workspace"] + + return _op, expected diff --git a/smartsheet/sheets.py b/smartsheet/sheets.py index aa473dba..c8030667 100644 --- a/smartsheet/sheets.py +++ b/smartsheet/sheets.py @@ -30,6 +30,7 @@ SheetPublish, SheetSummary, SummaryField, UpdateRequest, Version, Error from .types import TypedList from .util import deprecated +from .operations.sheets_operations import SheetsOperations class Sheets: @@ -111,17 +112,8 @@ def add_rows(self, sheet_id, list_of_rows) -> Union[Result[Union[Row, List[Row]] Returns: Union[Result[Union[Row, List[Row]]], Error]: The result of the operation - either a list or a single object, or an Error object if the request fails. """ - if isinstance(list_of_rows, (dict, Row)): - arg_value = list_of_rows - list_of_rows = TypedList(Row) - list_of_rows.append(arg_value) - - _op = fresh_operation("add_rows") - _op["method"] = "POST" - _op["path"] = "/sheets/" + str(sheet_id) + "/rows" - _op["json"] = list_of_rows - - expected = ["Result", "Row"] + # Build operation using shared builder + _op, expected = SheetsOperations.build_add_rows(sheet_id, list_of_rows) prepped_request = self._base.prepare_request(_op) response = self._base.request(prepped_request, expected, _op) diff --git a/smartsheet/workspaces.py b/smartsheet/workspaces.py index 326f3e97..d545661c 100644 --- a/smartsheet/workspaces.py +++ b/smartsheet/workspaces.py @@ -25,6 +25,7 @@ from .models import Error, Folder, IndexResult, PaginatedChildrenResult, Result, Share, Sheet, Workspace from .util import deprecated from .util import fresh_operation +from .operations.workspaces_operations import WorkspacesOperations class Workspaces: @@ -367,45 +368,15 @@ def list_workspaces( ValueError: If pagination_type is not 'token' or None, or if max_items <= 0 when using token pagination. """ - # Parameter validation - if pagination_type is not None and pagination_type not in ['token']: - raise ValueError("pagination_type must be 'token' or None") - if pagination_type == 'token' and max_items is not None and max_items <= 0: - raise ValueError("max_items must be a positive integer") - _op = fresh_operation("list_workspaces") - _op["method"] = "GET" - _op["path"] = "/workspaces" - - # Issue deprecation warnings for old parameters when used - if page_size is not None: - warnings.warn( - "page_size parameter is deprecated. Use pagination_type='token' with max_items instead.", - DeprecationWarning, - stacklevel=2 - ) - if page is not None: - warnings.warn( - "page parameter is deprecated. Use pagination_type='token' with last_key instead.", - DeprecationWarning, - stacklevel=2 - ) - if include_all is not None: - warnings.warn( - "include_all parameter is deprecated. Use pagination_type='token' instead.", - DeprecationWarning, - stacklevel=2 - ) - - if pagination_type == "token": - _op["query_params"]["lastKey"] = last_key - _op["query_params"]["maxItems"] = max_items - _op["query_params"]["paginationType"] = pagination_type - else: - _op["query_params"]["pageSize"] = page_size - _op["query_params"]["page"] = page - _op["query_params"]["includeAll"] = include_all - - expected = ["IndexResult", "Workspace"] + # Build operation using shared builder + _op, expected = WorkspacesOperations.build_list_workspaces( + page_size=page_size, + page=page, + include_all=include_all, + last_key=last_key, + max_items=max_items, + pagination_type=pagination_type + ) prepped_request = self._base.prepare_request(_op) response = self._base.request(prepped_request, expected, _op) diff --git a/tests/README_ASYNC_TESTS.md b/tests/README_ASYNC_TESTS.md new file mode 100644 index 00000000..ac611f74 --- /dev/null +++ b/tests/README_ASYNC_TESTS.md @@ -0,0 +1,345 @@ +# Async PoC Tests + +This directory contains comprehensive tests for the async Smartsheet Python SDK proof-of-concept implementation. + +## Test Files + +### 1. `test_async_poc.py` + +Core tests for the async PoC implementation covering: + +- **AsyncSmartsheet Client Tests** + - Client initialization and configuration + - Context manager lifecycle (`async with`) + - Manual resource cleanup (`aclose()`) + - Environment variable configuration + - Error handling preferences + +- **AsyncSheets.add_rows() Tests** + - Successful row addition + - Single row vs. list of rows + - Error responses + - Exception handling when enabled + +- **AsyncWorkspaces.list_workspaces() Tests** + - Successful workspace listing + - Token-based pagination (recommended) + - Legacy pagination (deprecated) + - Parameter validation + - Error handling + +- **Concurrent Operations Tests** + - Multiple concurrent add_rows operations + - Multiple concurrent list_workspaces operations + - Mixed concurrent operations (reads and writes) + +- **Retry Logic Tests** + - Rate limit retry (error code 4003) + - No retry on client errors (4xx) + - Max retry time exceeded + +- **Resource Cleanup Tests** + - Context manager cleanup on exceptions + - Multiple close calls safety + +### 2. `test_async_framework_integration.py` + +Async framework integration tests demonstrating event loop compatibility: + +- **Event Loop Compatibility Tests** + - Non-blocking concurrent operations + - Event loop responsiveness during I/O + - Performance characteristics of async operations + +- **Async Service Pattern Tests** + - Service handler pattern simulation + - Request queue processing with workers + - Controlled concurrency + +- **Error Handling Tests** + - Partial failures in concurrent operations + - Timeout handling + - Graceful error recovery + +- **Resource Management Tests** + - Connection pooling under load + - Graceful shutdown with pending operations + +## Setup + +### Prerequisites + +- Python 3.7 or higher +- Virtual environment (recommended) + +### Installation + +1. **Create and activate a virtual environment** (recommended): + + ```bash + python3 -m venv venv + source venv/bin/activate # On Windows: venv\Scripts\activate + ``` + +2. **Install the package with test dependencies**: + + ```bash + pip install -e ".[test]" + ``` + + This installs: + - `pytest` - Test framework + - `pytest-asyncio>=0.21.0` - Async test support + - `coverage` - Code coverage reporting + - Other test utilities + +3. **Verify installation**: + + ```bash + python -m pytest --version + ``` + +## Running Tests + +### Run All Async Tests + +```bash +# Run both test files +pytest tests/test_async_poc.py tests/test_async_framework_integration.py -v + +# Or run all tests in the tests directory +pytest tests/ -v +``` + +### Run Specific Test File + +```bash +# Run only core async PoC tests +pytest tests/test_async_poc.py -v + +# Run only async framework integration tests +pytest tests/test_async_framework_integration.py -v +``` + +### Run Specific Test Class or Function + +```bash +# Run a specific test class +pytest tests/test_async_poc.py::TestAsyncSmartsheetClient -v + +# Run a specific test function +pytest tests/test_async_poc.py::TestAsyncSmartsheetClient::test_client_initialization -v + +# Run tests matching a pattern +pytest tests/test_async_poc.py -k "concurrent" -v +``` + +### Run with Coverage + +```bash +# Generate coverage report +pytest tests/test_async_poc.py tests/test_async_framework_integration.py --cov=smartsheet --cov-report=html + +# View coverage report +open htmlcov/index.html # On macOS +# or +xdg-open htmlcov/index.html # On Linux +# or +start htmlcov/index.html # On Windows +``` + +### Run with Different Output Formats + +```bash +# Verbose output +pytest tests/test_async_poc.py -v + +# Very verbose output (shows test docstrings) +pytest tests/test_async_poc.py -vv + +# Show print statements +pytest tests/test_async_poc.py -v -s + +# Show only failures +pytest tests/test_async_poc.py -v --tb=short + +# Show summary of all test outcomes +pytest tests/test_async_poc.py -v -ra +``` + +## Test Architecture + +### Mocking Strategy + +All tests use mocked HTTP responses to avoid making real API calls: + +- **`mock_httpx_response` fixture**: Creates mock `httpx.Response` objects with configurable: + - Status codes + - JSON response data + - Response delays (for timing tests) + - Headers + +- **`AsyncMock`**: Used to mock async methods like `session.send()` + +- **`patch`**: Used to replace client session with mocked version + +### Fixtures + +- **`mock_access_token`**: Provides a test access token +- **`async_client`**: Creates and cleans up an `AsyncSmartsheet` client +- **`mock_httpx_response`**: Factory for creating mock HTTP responses + +### Async Test Patterns + +Tests use `pytest-asyncio` markers: + +```python +@pytest.mark.asyncio +async def test_example(async_client): + result = await async_client.Workspaces.list_workspaces() + assert isinstance(result, IndexResult) +``` + +## Test Coverage + +The test suite covers: + +- ✅ Client initialization and configuration +- ✅ Context manager lifecycle +- ✅ AsyncSheets.add_rows() method +- ✅ AsyncWorkspaces.list_workspaces() method +- ✅ Concurrent operations (asyncio.gather) +- ✅ Retry logic and backoff +- ✅ Error handling (with and without exceptions) +- ✅ Resource cleanup +- ✅ Event loop non-blocking behavior +- ✅ Async service patterns +- ✅ Connection pooling +- ✅ Graceful shutdown + +## Performance Tests + +The async framework integration tests include timing assertions to verify non-blocking behavior: + +```python +# Example: Verify concurrent execution is faster than sequential +start_time = time.time() +results = await asyncio.gather(*tasks) +elapsed_time = time.time() - start_time + +# With 5 concurrent requests at 0.1s each: +# Sequential: 5 * 0.1 = 0.5s +# Concurrent: ~0.1s +assert elapsed_time < 0.3 # Allows overhead +``` + +## Troubleshooting + +### Import Errors + +If you see import errors like `ModuleNotFoundError: No module named 'smartsheet'`: + +```bash +# Make sure you installed the package in editable mode +pip install -e . + +# Or with test dependencies +pip install -e ".[test]" +``` + +### pytest-asyncio Warnings + +If you see warnings about asyncio mode: + +```bash +# Add to pytest.ini or pyproject.toml: +[tool.pytest.ini_options] +asyncio_mode = "auto" +``` + +### Mock Not Working + +If mocks aren't being applied: + +```python +# Ensure you're patching the right object +with patch.object(async_client, '_session') as mock_session: + # Not: patch('smartsheet.async_smartsheet.httpx.AsyncClient') +``` + +### Tests Hanging + +If tests hang indefinitely: + +- Check for missing `await` keywords +- Verify all async fixtures are properly cleaned up +- Use `pytest --timeout=30` to set a timeout + +## CI/CD Integration + +### GitHub Actions Example + +```yaml +name: Test Async PoC + +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -e ".[test]" + + - name: Run async tests + run: | + pytest tests/test_async_poc.py tests/test_async_framework_integration.py -v --cov=smartsheet + + - name: Upload coverage + uses: codecov/codecov-action@v3 +``` + +## Future Test Additions + +As the async implementation expands beyond the PoC, add tests for: + +- Additional async API methods (get_sheet, update_row, etc.) +- Streaming operations +- Webhook handling +- Batch operations +- More complex error scenarios +- Performance benchmarks +- Memory leak detection + +## Contributing + +When adding new async functionality: + +1. Add corresponding tests to `test_async_poc.py` +2. Add integration tests to `test_async_framework_integration.py` if relevant +3. Use mocks - never make real API calls in tests +4. Include docstrings explaining what each test validates +5. Follow existing test patterns and naming conventions +6. Ensure tests are deterministic (no random failures) +7. Add timing assertions for performance-critical features + +## Resources + +- [pytest documentation](https://docs.pytest.org/) +- [pytest-asyncio documentation](https://pytest-asyncio.readthedocs.io/) +- [Python asyncio documentation](https://docs.python.org/3/library/asyncio.html) +- [httpx documentation](https://www.python-httpx.org/) +- [Smartsheet API documentation](https://smartsheet.redoc.ly/) diff --git a/tests/test_async_framework_integration.py b/tests/test_async_framework_integration.py new file mode 100644 index 00000000..fca94326 --- /dev/null +++ b/tests/test_async_framework_integration.py @@ -0,0 +1,540 @@ +# pylint: disable=C0103,W0212,R0913 +"""Async framework integration tests for async PoC implementation. + +This module demonstrates that the async Smartsheet client works correctly +in event loop scenarios similar to async services. Tests verify: +- Non-blocking concurrent operations +- Event loop compatibility +- Performance characteristics of async operations +- Proper behavior under concurrent load + +These tests simulate async service patterns where multiple +requests may be handled concurrently without blocking the event loop. +""" + +import asyncio +import time +import pytest +from unittest.mock import AsyncMock, MagicMock, patch +import httpx + +from smartsheet import AsyncSmartsheet +from smartsheet.models import Row, Cell, Workspace, IndexResult, Result + + +@pytest.fixture +def mock_access_token(): + """Provide a mock access token for testing.""" + return "async_test_token_12345" + + +@pytest.fixture +async def async_client(mock_access_token): + """Create an AsyncSmartsheet client for async framework testing.""" + client = AsyncSmartsheet(access_token=mock_access_token) + yield client + await client.aclose() + + +@pytest.fixture +def mock_httpx_response(): + """Create a mock httpx.Response with configurable delay.""" + def _create_response(status_code=200, json_data=None, delay=0): + async def delayed_send(*args, **kwargs): + if delay > 0: + await asyncio.sleep(delay) + + response = MagicMock(spec=httpx.Response) + response.status_code = status_code + response.reason_phrase = "OK" if status_code == 200 else "Error" + response.headers = {"Content-Type": "application/json"} + + import json + response.text = json.dumps(json_data) + response.content = response.text.encode('utf-8') + response.json.return_value = json_data + + response.request = MagicMock() + response.request.method = "GET" + response.request.url = "https://api.smartsheet.com/2.0/test" + response.request.headers = {"Authorization": "Bearer ***"} + response.request.content = None + + return response + + return delayed_send + return _create_response + + +class TestAsyncFrameworkEventLoopCompatibility: + """Test suite for async framework event loop compatibility.""" + + @pytest.mark.asyncio + async def test_non_blocking_operations(self, async_client, mock_httpx_response): + """Test that async operations don't block the event loop. + + This simulates an async service handling multiple concurrent requests. + Each request should execute without blocking others. + """ + response_data = { + "pageNumber": 1, + "pageSize": 100, + "totalPages": 1, + "totalCount": 1, + "data": [{"id": 111, "name": "Workspace 1", "accessLevel": "OWNER"}] + } + + # Create mock with 0.1 second delay per request + mock_send = mock_httpx_response(status_code=200, json_data=response_data, delay=0.1) + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = mock_send + await async_client._ensure_session() + + start_time = time.time() + + # Execute 5 concurrent requests + tasks = [ + async_client.Workspaces.list_workspaces() + for _ in range(5) + ] + + results = await asyncio.gather(*tasks) + + elapsed_time = time.time() - start_time + + # All 5 requests completed successfully + assert len(results) == 5 + for result in results: + assert isinstance(result, IndexResult) + + # With async, all 5 requests should complete in ~0.1s (concurrent) + # not 0.5s (sequential). Allow some overhead. + assert elapsed_time < 0.3, f"Operations took {elapsed_time}s, expected < 0.3s (concurrent execution)" + + @pytest.mark.asyncio + async def test_event_loop_not_blocked_during_io(self, async_client, mock_httpx_response): + """Test that the event loop remains responsive during I/O operations. + + This simulates an async service where other tasks should be able to + run while waiting for API responses. + """ + response_data = { + "message": "SUCCESS", + "resultCode": 0, + "result": [{"id": 12345, "rowNumber": 1, "cells": []}] + } + + # Create mock with 0.2 second delay + mock_send = mock_httpx_response(status_code=200, json_data=response_data, delay=0.2) + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = mock_send + await async_client._ensure_session() + + # Track when background task runs + background_task_ran = [] + + async def background_task(): + """Simulates other work happening in the event loop.""" + for i in range(5): + await asyncio.sleep(0.05) + background_task_ran.append(i) + + row = Row() + row.to_bottom = True + + # Start API call and background task concurrently + api_task = asyncio.create_task( + async_client.Sheets.add_rows(sheet_id=999, list_of_rows=[row]) + ) + bg_task = asyncio.create_task(background_task()) + + # Wait for both to complete + result, _ = await asyncio.gather(api_task, bg_task) + + # API call succeeded + assert isinstance(result, Result) + + # Background task was able to run during API I/O wait + assert len(background_task_ran) == 5, "Background task should have completed during I/O wait" + + @pytest.mark.asyncio + async def test_concurrent_mixed_operations_performance(self, async_client, mock_httpx_response): + """Test performance of mixed concurrent operations. + + Simulates an async service handling different types of requests + concurrently (reads and writes). + """ + list_response = { + "pageNumber": 1, + "pageSize": 100, + "totalPages": 1, + "totalCount": 1, + "data": [{"id": 111, "name": "Workspace 1", "accessLevel": "OWNER"}] + } + + add_rows_response = { + "message": "SUCCESS", + "resultCode": 0, + "result": [{"id": 12345, "rowNumber": 1, "cells": []}] + } + + async def mock_send_with_delay(*args, **kwargs): + """Mock send that returns different responses based on URL.""" + await asyncio.sleep(0.1) # Simulate network delay + + request = args[0] + response = MagicMock(spec=httpx.Response) + response.status_code = 200 + response.reason_phrase = "OK" + response.headers = {"Content-Type": "application/json"} + + import json + if '/rows' in str(request.url): + data = add_rows_response + else: + data = list_response + + response.text = json.dumps(data) + response.content = response.text.encode('utf-8') + response.json.return_value = data + + response.request = MagicMock() + response.request.method = request.method + response.request.url = request.url + response.request.headers = {"Authorization": "Bearer ***"} + response.request.content = None + + return response + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = AsyncMock(side_effect=mock_send_with_delay) + await async_client._ensure_session() + + start_time = time.time() + + row = Row() + row.to_bottom = True + + # Mix of 10 operations: 5 reads, 5 writes + tasks = [] + for i in range(10): + if i % 2 == 0: + tasks.append(async_client.Workspaces.list_workspaces()) + else: + tasks.append(async_client.Sheets.add_rows(sheet_id=i, list_of_rows=[row])) + + results = await asyncio.gather(*tasks) + + elapsed_time = time.time() - start_time + + # All operations completed + assert len(results) == 10 + + # With concurrent execution, should take ~0.1s not 1.0s + assert elapsed_time < 0.3, f"Operations took {elapsed_time}s, expected < 0.3s" + + +class TestAsyncServicePattern: + """Test suite simulating async service patterns.""" + + @pytest.mark.asyncio + async def test_service_handler_pattern(self, mock_access_token, mock_httpx_response): + """Test async service handler pattern. + + Simulates an async service with multiple handler functions that + use the async client concurrently. + """ + + async def get_workspace_info(client, workspace_id): + """Simulated async handler: Get workspace info.""" + response_data = { + "pageNumber": 1, + "pageSize": 100, + "totalPages": 1, + "totalCount": 1, + "data": [{"id": workspace_id, "name": f"Workspace {workspace_id}", "accessLevel": "OWNER"}] + } + + mock_send = mock_httpx_response(status_code=200, json_data=response_data, delay=0.05) + + with patch.object(client, '_session') as mock_session: + mock_session.send = mock_send + await client._ensure_session() + + result = await client.Workspaces.list_workspaces() + return {"workspace_id": workspace_id, "count": len(result.data)} + + async def add_sheet_rows(client, sheet_id, num_rows): + """Simulated async handler: Add rows to sheet.""" + response_data = { + "message": "SUCCESS", + "resultCode": 0, + "result": [{"id": i, "rowNumber": i, "cells": []} for i in range(num_rows)] + } + + mock_send = mock_httpx_response(status_code=200, json_data=response_data, delay=0.05) + + with patch.object(client, '_session') as mock_session: + mock_session.send = mock_send + await client._ensure_session() + + row = Row() + row.to_bottom = True + + result = await client.Sheets.add_rows(sheet_id=sheet_id, list_of_rows=[row]) + return {"sheet_id": sheet_id, "rows_added": len(result.data)} + + # Simulate async service with shared client + async with AsyncSmartsheet(access_token=mock_access_token) as client: + # Simulate multiple concurrent requests to the service + tasks = [ + get_workspace_info(client, 1), + add_sheet_rows(client, 100, 3), + get_workspace_info(client, 2), + add_sheet_rows(client, 200, 5), + get_workspace_info(client, 3), + ] + + start_time = time.time() + results = await asyncio.gather(*tasks) + elapsed_time = time.time() - start_time + + # All handlers completed successfully + assert len(results) == 5 + assert results[0]["workspace_id"] == 1 + assert results[1]["rows_added"] == 3 + assert results[2]["workspace_id"] == 2 + assert results[3]["rows_added"] == 5 + assert results[4]["workspace_id"] == 3 + + # Should complete concurrently in ~0.05s, not 0.25s + assert elapsed_time < 0.2, f"Service handlers took {elapsed_time}s, expected concurrent execution" + + @pytest.mark.asyncio + async def test_request_queue_processing(self, async_client, mock_httpx_response): + """Test processing a queue of requests concurrently. + + Simulates an async service processing a queue of incoming requests + with controlled concurrency. + """ + response_data = { + "pageNumber": 1, + "pageSize": 100, + "totalPages": 1, + "totalCount": 1, + "data": [{"id": 111, "name": "Workspace 1", "accessLevel": "OWNER"}] + } + + mock_send = mock_httpx_response(status_code=200, json_data=response_data, delay=0.05) + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = mock_send + await async_client._ensure_session() + + # Simulate a queue of 20 requests + request_queue = asyncio.Queue() + for i in range(20): + await request_queue.put(i) + + results = [] + + async def worker(): + """Worker that processes requests from the queue.""" + while not request_queue.empty(): + try: + request_id = await asyncio.wait_for(request_queue.get(), timeout=0.1) + result = await async_client.Workspaces.list_workspaces() + results.append({"request_id": request_id, "success": True}) + request_queue.task_done() + except asyncio.TimeoutError: + break + + # Process queue with 5 concurrent workers + start_time = time.time() + workers = [asyncio.create_task(worker()) for _ in range(5)] + await asyncio.gather(*workers) + elapsed_time = time.time() - start_time + + # All 20 requests processed + assert len(results) == 20 + + # With 5 workers processing 20 requests at 0.05s each: + # Sequential: 20 * 0.05 = 1.0s + # Concurrent (5 workers): 4 batches * 0.05 = 0.2s + assert elapsed_time < 0.4, f"Queue processing took {elapsed_time}s, expected < 0.4s" + + +class TestAsyncErrorHandling: + """Test suite for error handling in async service scenarios.""" + + @pytest.mark.asyncio + async def test_partial_failure_in_concurrent_operations(self, async_client, mock_httpx_response): + """Test that one failure doesn't affect other concurrent operations. + + In an async service, one failed request shouldn't impact others. + """ + success_response = { + "pageNumber": 1, + "pageSize": 100, + "totalPages": 1, + "totalCount": 1, + "data": [{"id": 111, "name": "Workspace 1", "accessLevel": "OWNER"}] + } + + error_response = { + "errorCode": 1006, + "message": "Not Found" + } + + call_count = [0] + + async def mock_send_mixed(*args, **kwargs): + """Mock that fails on 3rd call, succeeds otherwise.""" + call_count[0] += 1 + await asyncio.sleep(0.05) + + response = MagicMock(spec=httpx.Response) + response.headers = {"Content-Type": "application/json"} + + import json + if call_count[0] == 3: + # Third call fails + response.status_code = 404 + response.reason_phrase = "Not Found" + data = error_response + else: + # Other calls succeed + response.status_code = 200 + response.reason_phrase = "OK" + data = success_response + + response.text = json.dumps(data) + response.content = response.text.encode('utf-8') + response.json.return_value = data + + response.request = MagicMock() + response.request.method = "GET" + response.request.url = "https://api.smartsheet.com/2.0/workspaces" + response.request.headers = {"Authorization": "Bearer ***"} + response.request.content = None + + return response + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = AsyncMock(side_effect=mock_send_mixed) + await async_client._ensure_session() + + # Execute 5 concurrent requests + tasks = [ + async_client.Workspaces.list_workspaces() + for _ in range(5) + ] + + results = await asyncio.gather(*tasks, return_exceptions=False) + + # 4 should succeed, 1 should be an error + successes = [r for r in results if isinstance(r, IndexResult)] + errors = [r for r in results if not isinstance(r, IndexResult)] + + assert len(successes) == 4, "4 requests should succeed" + assert len(errors) == 1, "1 request should fail" + + @pytest.mark.asyncio + async def test_timeout_handling(self, async_client): + """Test handling of operation timeouts in async service. + + Services need to handle timeouts gracefully without blocking. + """ + + async def slow_operation(): + """Simulates a slow API operation.""" + await asyncio.sleep(2.0) + return "completed" + + # Test that we can timeout operations + with pytest.raises(asyncio.TimeoutError): + await asyncio.wait_for(slow_operation(), timeout=0.1) + + # Event loop should still be responsive after timeout + quick_result = await asyncio.sleep(0.01, result="still_working") + assert quick_result == "still_working" + + +class TestAsyncResourceManagement: + """Test suite for resource management in async scenarios.""" + + @pytest.mark.asyncio + async def test_connection_pooling(self, mock_access_token, mock_httpx_response): + """Test that connection pooling works correctly under load. + + Async services should efficiently reuse connections. + """ + response_data = { + "pageNumber": 1, + "pageSize": 100, + "totalPages": 1, + "totalCount": 1, + "data": [{"id": 111, "name": "Workspace 1", "accessLevel": "OWNER"}] + } + + mock_send = mock_httpx_response(status_code=200, json_data=response_data, delay=0.01) + + # Create client with small connection pool + async with AsyncSmartsheet(access_token=mock_access_token, max_connections=3) as client: + with patch.object(client, '_session') as mock_session: + mock_session.send = mock_send + await client._ensure_session() + + # Execute more requests than pool size + tasks = [ + client.Workspaces.list_workspaces() + for _ in range(10) + ] + + results = await asyncio.gather(*tasks) + + # All requests should complete successfully despite limited pool + assert len(results) == 10 + for result in results: + assert isinstance(result, IndexResult) + + @pytest.mark.asyncio + async def test_graceful_shutdown(self, mock_access_token, mock_httpx_response): + """Test graceful shutdown of client with pending operations. + + Async services need to shut down cleanly. + """ + response_data = { + "pageNumber": 1, + "pageSize": 100, + "totalPages": 1, + "totalCount": 1, + "data": [{"id": 111, "name": "Workspace 1", "accessLevel": "OWNER"}] + } + + mock_send = mock_httpx_response(status_code=200, json_data=response_data, delay=0.1) + + client = AsyncSmartsheet(access_token=mock_access_token) + + with patch.object(client, '_session') as mock_session: + mock_session.send = mock_send + await client._ensure_session() + + # Start some operations + task1 = asyncio.create_task(client.Workspaces.list_workspaces()) + task2 = asyncio.create_task(client.Workspaces.list_workspaces()) + + # Let them start + await asyncio.sleep(0.05) + + # Wait for operations to complete before closing + await asyncio.gather(task1, task2) + + # Clean shutdown + await client.aclose() + + # Verify both operations completed + assert task1.done() + assert task2.done() diff --git a/tests/test_async_poc.py b/tests/test_async_poc.py new file mode 100644 index 00000000..43d91141 --- /dev/null +++ b/tests/test_async_poc.py @@ -0,0 +1,600 @@ +# pylint: disable=C0103,W0212,R0913 +"""Tests for async PoC implementation. + +This module contains comprehensive tests for the async Smartsheet client, +including AsyncSmartsheet, AsyncSheets, and AsyncWorkspaces. Tests use +mocked HTTP responses to avoid making real API calls. +""" + +import asyncio +import json +import pytest +from unittest.mock import AsyncMock, MagicMock, patch +import httpx + +from smartsheet import AsyncSmartsheet +from smartsheet.models import Row, Cell, Workspace, Error, Result, IndexResult +from smartsheet.exceptions import ApiError + + +@pytest.fixture +def mock_access_token(): + """Provide a mock access token for testing.""" + return "test_access_token_12345" + + +@pytest.fixture +async def async_client(mock_access_token): + """Create an AsyncSmartsheet client for testing. + + This fixture creates a client and ensures proper cleanup after tests. + """ + client = AsyncSmartsheet(access_token=mock_access_token) + yield client + await client.aclose() + + +@pytest.fixture +def mock_httpx_response(): + """Create a mock httpx.Response object.""" + def _create_response(status_code=200, json_data=None, headers=None): + response = MagicMock(spec=httpx.Response) + response.status_code = status_code + response.reason_phrase = "OK" if status_code == 200 else "Error" + response.headers = headers or {"Content-Type": "application/json"} + + if json_data: + response.text = json.dumps(json_data) + response.content = response.text.encode('utf-8') + response.json.return_value = json_data + else: + response.text = "" + response.content = b"" + response.json.side_effect = ValueError("No JSON") + + # Mock request object + response.request = MagicMock() + response.request.method = "GET" + response.request.url = "https://api.smartsheet.com/2.0/test" + response.request.headers = {"Authorization": "Bearer ***"} + response.request.content = None + + return response + return _create_response + + +class TestAsyncSmartsheetClient: + """Test suite for AsyncSmartsheet client initialization and lifecycle.""" + + @pytest.mark.asyncio + async def test_client_initialization(self, mock_access_token): + """Test that AsyncSmartsheet client initializes correctly.""" + client = AsyncSmartsheet(access_token=mock_access_token) + + assert client._access_token == mock_access_token + assert client._session is None # Session not created until first use + assert client.raise_exceptions is False + + await client.aclose() + + @pytest.mark.asyncio + async def test_client_initialization_from_env(self, monkeypatch): + """Test client initialization using environment variable.""" + test_token = "env_token_12345" + monkeypatch.setenv("SMARTSHEET_ACCESS_TOKEN", test_token) + + client = AsyncSmartsheet() + assert client._access_token == test_token + + await client.aclose() + + @pytest.mark.asyncio + async def test_client_initialization_no_token(self): + """Test that client raises ValueError when no token is provided.""" + with pytest.raises(ValueError, match="Access Token must be set"): + AsyncSmartsheet() + + @pytest.mark.asyncio + async def test_context_manager(self, mock_access_token): + """Test AsyncSmartsheet as an async context manager.""" + async with AsyncSmartsheet(access_token=mock_access_token) as client: + assert client._access_token == mock_access_token + # Session should be created on context entry + await client._ensure_session() + assert client._session is not None + + # Session should be closed after context exit + # Note: We can't directly check if session is closed, but we verify no errors + + @pytest.mark.asyncio + async def test_manual_close(self, mock_access_token): + """Test manual resource cleanup with aclose().""" + client = AsyncSmartsheet(access_token=mock_access_token) + await client._ensure_session() + + assert client._session is not None + + await client.aclose() + assert client._session is None + + @pytest.mark.asyncio + async def test_errors_as_exceptions(self, async_client): + """Test errors_as_exceptions configuration.""" + assert async_client.raise_exceptions is False + + async_client.errors_as_exceptions(True) + assert async_client.raise_exceptions is True + + async_client.errors_as_exceptions(False) + assert async_client.raise_exceptions is False + + @pytest.mark.asyncio + async def test_assume_user(self, async_client): + """Test assume_user functionality.""" + assert async_client._assume_user is None + + async_client.assume_user("test@example.com") + assert async_client._assume_user == "test%40example.com" + + async_client.assume_user(None) + assert async_client._assume_user is None + + +class TestAsyncSheetsAddRows: + """Test suite for AsyncSheets.add_rows() method.""" + + @pytest.mark.asyncio + async def test_add_rows_success(self, async_client, mock_httpx_response): + """Test successful add_rows operation.""" + # Mock response data + response_data = { + "message": "SUCCESS", + "resultCode": 0, + "result": [ + { + "id": 12345, + "rowNumber": 1, + "cells": [ + {"columnId": 111, "value": "Test Value 1"}, + {"columnId": 222, "value": "Test Value 2"} + ] + } + ] + } + + mock_response = mock_httpx_response(status_code=200, json_data=response_data) + + # Mock the HTTP client + with patch.object(async_client, '_session') as mock_session: + mock_session.send = AsyncMock(return_value=mock_response) + await async_client._ensure_session() + + # Create test row + row = Row() + row.to_bottom = True + row.cells = [ + Cell({'column_id': 111, 'value': 'Test Value 1'}), + Cell({'column_id': 222, 'value': 'Test Value 2'}) + ] + + # Execute add_rows + result = await async_client.Sheets.add_rows(sheet_id=999, list_of_rows=[row]) + + # Verify result + assert isinstance(result, Result) + assert result.message == "SUCCESS" + assert len(result.data) == 1 + assert result.data[0].id == 12345 + + @pytest.mark.asyncio + async def test_add_rows_single_row(self, async_client, mock_httpx_response): + """Test add_rows with a single Row object (not in a list).""" + response_data = { + "message": "SUCCESS", + "resultCode": 0, + "result": [{"id": 12345, "rowNumber": 1, "cells": []}] + } + + mock_response = mock_httpx_response(status_code=200, json_data=response_data) + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = AsyncMock(return_value=mock_response) + await async_client._ensure_session() + + # Pass single Row object instead of list + row = Row() + row.to_bottom = True + + result = await async_client.Sheets.add_rows(sheet_id=999, list_of_rows=row) + + assert isinstance(result, Result) + assert len(result.data) == 1 + + @pytest.mark.asyncio + async def test_add_rows_error_response(self, async_client, mock_httpx_response): + """Test add_rows with error response.""" + error_data = { + "errorCode": 1006, + "message": "Not Found", + "refId": "test-ref-id" + } + + mock_response = mock_httpx_response(status_code=404, json_data=error_data) + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = AsyncMock(return_value=mock_response) + await async_client._ensure_session() + + row = Row() + row.to_bottom = True + + result = await async_client.Sheets.add_rows(sheet_id=999, list_of_rows=[row]) + + # With raise_exceptions=False (default), should return Error object + assert isinstance(result, Error) + assert result.result.status_code == 404 + assert result.result.code == 1006 + + @pytest.mark.asyncio + async def test_add_rows_with_exceptions_enabled(self, async_client, mock_httpx_response): + """Test add_rows raises exception when errors_as_exceptions is enabled.""" + async_client.errors_as_exceptions(True) + + error_data = { + "errorCode": 1006, + "message": "Not Found", + "refId": "test-ref-id" + } + + mock_response = mock_httpx_response(status_code=404, json_data=error_data) + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = AsyncMock(return_value=mock_response) + await async_client._ensure_session() + + row = Row() + row.to_bottom = True + + with pytest.raises(ApiError): + await async_client.Sheets.add_rows(sheet_id=999, list_of_rows=[row]) + + +class TestAsyncWorkspacesListWorkspaces: + """Test suite for AsyncWorkspaces.list_workspaces() method.""" + + @pytest.mark.asyncio + async def test_list_workspaces_success(self, async_client, mock_httpx_response): + """Test successful list_workspaces operation.""" + response_data = { + "pageNumber": 1, + "pageSize": 100, + "totalPages": 1, + "totalCount": 2, + "data": [ + {"id": 111, "name": "Workspace 1", "accessLevel": "OWNER"}, + {"id": 222, "name": "Workspace 2", "accessLevel": "ADMIN"} + ] + } + + mock_response = mock_httpx_response(status_code=200, json_data=response_data) + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = AsyncMock(return_value=mock_response) + await async_client._ensure_session() + + result = await async_client.Workspaces.list_workspaces() + + assert isinstance(result, IndexResult) + assert len(result.data) == 2 + assert result.data[0].name == "Workspace 1" + assert result.data[1].name == "Workspace 2" + assert result.total_count == 2 + + @pytest.mark.asyncio + async def test_list_workspaces_token_pagination(self, async_client, mock_httpx_response): + """Test list_workspaces with token-based pagination.""" + response_data = { + "data": [ + {"id": 111, "name": "Workspace 1", "accessLevel": "OWNER"} + ], + "nextToken": "next_page_token_abc123" + } + + mock_response = mock_httpx_response(status_code=200, json_data=response_data) + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = AsyncMock(return_value=mock_response) + await async_client._ensure_session() + + result = await async_client.Workspaces.list_workspaces( + pagination_type='token', + max_items=100 + ) + + assert isinstance(result, IndexResult) + assert len(result.data) == 1 + assert hasattr(result, 'next_token') + assert result.next_token == "next_page_token_abc123" + + @pytest.mark.asyncio + async def test_list_workspaces_legacy_pagination(self, async_client, mock_httpx_response): + """Test list_workspaces with legacy pagination parameters.""" + response_data = { + "pageNumber": 2, + "pageSize": 50, + "totalPages": 3, + "totalCount": 125, + "data": [{"id": 111, "name": "Workspace 1", "accessLevel": "OWNER"}] + } + + mock_response = mock_httpx_response(status_code=200, json_data=response_data) + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = AsyncMock(return_value=mock_response) + await async_client._ensure_session() + + # Should issue deprecation warning + with pytest.warns(DeprecationWarning): + result = await async_client.Workspaces.list_workspaces( + page_size=50, + page=2 + ) + + assert isinstance(result, IndexResult) + assert result.page_number == 2 + assert result.total_pages == 3 + + @pytest.mark.asyncio + async def test_list_workspaces_invalid_pagination_type(self, async_client): + """Test list_workspaces with invalid pagination_type.""" + with pytest.raises(ValueError, match="pagination_type must be 'token' or None"): + await async_client.Workspaces.list_workspaces(pagination_type='invalid') + + @pytest.mark.asyncio + async def test_list_workspaces_invalid_max_items(self, async_client): + """Test list_workspaces with invalid max_items.""" + with pytest.raises(ValueError, match="max_items must be a positive integer"): + await async_client.Workspaces.list_workspaces( + pagination_type='token', + max_items=0 + ) + + +class TestAsyncConcurrentOperations: + """Test suite for concurrent async operations.""" + + @pytest.mark.asyncio + async def test_concurrent_add_rows(self, async_client, mock_httpx_response): + """Test multiple concurrent add_rows operations.""" + response_data = { + "message": "SUCCESS", + "resultCode": 0, + "result": [{"id": 12345, "rowNumber": 1, "cells": []}] + } + + mock_response = mock_httpx_response(status_code=200, json_data=response_data) + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = AsyncMock(return_value=mock_response) + await async_client._ensure_session() + + # Create multiple rows for different sheets + row = Row() + row.to_bottom = True + + # Execute multiple add_rows concurrently + tasks = [ + async_client.Sheets.add_rows(sheet_id=i, list_of_rows=[row]) + for i in range(1, 6) + ] + + results = await asyncio.gather(*tasks) + + # Verify all operations completed successfully + assert len(results) == 5 + for result in results: + assert isinstance(result, Result) + assert result.message == "SUCCESS" + + @pytest.mark.asyncio + async def test_concurrent_list_workspaces(self, async_client, mock_httpx_response): + """Test multiple concurrent list_workspaces operations.""" + response_data = { + "pageNumber": 1, + "pageSize": 100, + "totalPages": 1, + "totalCount": 1, + "data": [{"id": 111, "name": "Workspace 1", "accessLevel": "OWNER"}] + } + + mock_response = mock_httpx_response(status_code=200, json_data=response_data) + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = AsyncMock(return_value=mock_response) + await async_client._ensure_session() + + # Execute multiple list_workspaces concurrently + tasks = [ + async_client.Workspaces.list_workspaces() + for _ in range(5) + ] + + results = await asyncio.gather(*tasks) + + # Verify all operations completed successfully + assert len(results) == 5 + for result in results: + assert isinstance(result, IndexResult) + assert len(result.data) == 1 + + @pytest.mark.asyncio + async def test_mixed_concurrent_operations(self, async_client, mock_httpx_response): + """Test mixed concurrent operations (add_rows and list_workspaces).""" + add_rows_response = { + "message": "SUCCESS", + "resultCode": 0, + "result": [{"id": 12345, "rowNumber": 1, "cells": []}] + } + + list_workspaces_response = { + "pageNumber": 1, + "pageSize": 100, + "totalPages": 1, + "totalCount": 1, + "data": [{"id": 111, "name": "Workspace 1", "accessLevel": "OWNER"}] + } + + # Create different responses based on URL + def create_response(*args, **kwargs): + request = args[0] + if '/rows' in str(request.url): + return mock_httpx_response(status_code=200, json_data=add_rows_response) + else: + return mock_httpx_response(status_code=200, json_data=list_workspaces_response) + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = AsyncMock(side_effect=create_response) + await async_client._ensure_session() + + row = Row() + row.to_bottom = True + + # Mix of different operations + tasks = [ + async_client.Sheets.add_rows(sheet_id=1, list_of_rows=[row]), + async_client.Workspaces.list_workspaces(), + async_client.Sheets.add_rows(sheet_id=2, list_of_rows=[row]), + async_client.Workspaces.list_workspaces(), + ] + + results = await asyncio.gather(*tasks) + + assert len(results) == 4 + assert isinstance(results[0], Result) + assert isinstance(results[1], IndexResult) + assert isinstance(results[2], Result) + assert isinstance(results[3], IndexResult) + + +class TestAsyncRetryLogic: + """Test suite for async retry logic and error handling.""" + + @pytest.mark.asyncio + async def test_retry_on_rate_limit(self, async_client, mock_httpx_response): + """Test retry logic on rate limit error (4003).""" + rate_limit_response = mock_httpx_response( + status_code=429, + json_data={"errorCode": 4003, "message": "Rate limit exceeded"} + ) + + success_response = mock_httpx_response( + status_code=200, + json_data={ + "pageNumber": 1, + "pageSize": 100, + "totalPages": 1, + "totalCount": 1, + "data": [{"id": 111, "name": "Workspace 1", "accessLevel": "OWNER"}] + } + ) + + with patch.object(async_client, '_session') as mock_session: + # First call returns rate limit, second call succeeds + mock_session.send = AsyncMock(side_effect=[rate_limit_response, success_response]) + await async_client._ensure_session() + + # Mock asyncio.sleep to avoid actual delays in tests + with patch('asyncio.sleep', new_callable=AsyncMock): + result = await async_client.Workspaces.list_workspaces() + + # Should succeed after retry + assert isinstance(result, IndexResult) + assert len(result.data) == 1 + + # Verify send was called twice (initial + 1 retry) + assert mock_session.send.call_count == 2 + + @pytest.mark.asyncio + async def test_no_retry_on_client_error(self, async_client, mock_httpx_response): + """Test that client errors (4xx) don't trigger retry.""" + error_response = mock_httpx_response( + status_code=400, + json_data={"errorCode": 1002, "message": "Invalid request"} + ) + + with patch.object(async_client, '_session') as mock_session: + mock_session.send = AsyncMock(return_value=error_response) + await async_client._ensure_session() + + row = Row() + row.to_bottom = True + + result = await async_client.Sheets.add_rows(sheet_id=999, list_of_rows=[row]) + + # Should return error without retry + assert isinstance(result, Error) + + # Verify send was called only once (no retry) + assert mock_session.send.call_count == 1 + + @pytest.mark.asyncio + async def test_max_retry_time_exceeded(self, async_client, mock_httpx_response): + """Test that retries stop after max_retry_time is exceeded.""" + rate_limit_response = mock_httpx_response( + status_code=429, + json_data={"errorCode": 4003, "message": "Rate limit exceeded"} + ) + + with patch.object(async_client, '_session') as mock_session: + # Always return rate limit error + mock_session.send = AsyncMock(return_value=rate_limit_response) + await async_client._ensure_session() + + # Mock asyncio.sleep to avoid actual delays + with patch('asyncio.sleep', new_callable=AsyncMock): + result = await async_client.Workspaces.list_workspaces() + + # Should eventually give up and return error + assert isinstance(result, Error) + assert result.result.code == 4003 + + +class TestAsyncResourceCleanup: + """Test suite for proper resource cleanup.""" + + @pytest.mark.asyncio + async def test_context_manager_cleanup_on_exception(self, mock_access_token, mock_httpx_response): + """Test that resources are cleaned up even when exception occurs.""" + error_response = mock_httpx_response( + status_code=500, + json_data={"errorCode": 4004, "message": "Server error"} + ) + + try: + async with AsyncSmartsheet(access_token=mock_access_token) as client: + client.errors_as_exceptions(True) + + with patch.object(client, '_session') as mock_session: + mock_session.send = AsyncMock(return_value=error_response) + await client._ensure_session() + + # This should raise an exception + await client.Workspaces.list_workspaces() + except ApiError: + pass # Expected + + # Client should be closed even after exception + # (We can't directly verify, but no exception should occur) + + @pytest.mark.asyncio + async def test_multiple_close_calls(self, mock_access_token): + """Test that calling aclose() multiple times is safe.""" + client = AsyncSmartsheet(access_token=mock_access_token) + await client._ensure_session() + + # Close multiple times - should not raise exception + await client.aclose() + await client.aclose() + await client.aclose() + + assert client._session is None From 05175743c961b372b1618e4865dc4127d14a8980 Mon Sep 17 00:00:00 2001 From: Scott Anderson Date: Thu, 8 Jan 2026 17:19:47 +0000 Subject: [PATCH 2/2] remove unnecessary whitespace --- smartsheet/async_session.py | 32 +++++++++++++------------- smartsheet/async_smartsheet.py | 42 +++++++++++++++++----------------- 2 files changed, 37 insertions(+), 37 deletions(-) diff --git a/smartsheet/async_session.py b/smartsheet/async_session.py index a8c76c34..44ddbe1b 100644 --- a/smartsheet/async_session.py +++ b/smartsheet/async_session.py @@ -36,12 +36,12 @@ def _create_ssl_context() -> ssl.SSLContext: """Create SSL context with secure TLS configuration. - + Configures SSL context to: - Disable SSLv2, SSLv3, and TLSv1 (insecure protocols) - Use system default security settings - Verify certificates against trusted CA bundle - + Returns: ssl.SSLContext: Configured SSL context for secure connections """ @@ -58,35 +58,35 @@ def async_pinned_session( timeout: Optional[float] = None ) -> httpx.AsyncClient: """Create an async HTTP client with pinned SSL/TLS configuration. - + This function creates an httpx.AsyncClient configured with: - Secure SSL/TLS settings (no SSLv2, SSLv3, or TLSv1) - Connection pooling for efficient resource usage - Automatic retry logic for transient failures - Certificate verification against trusted CA bundle - + The client should be used as an async context manager to ensure proper resource cleanup: - + async with async_pinned_session() as client: response = await client.get("https://api.smartsheet.com/...") - + Or with explicit lifecycle management: - + client = async_pinned_session() try: response = await client.get("https://api.smartsheet.com/...") finally: await client.aclose() - + Args: pool_maxsize: Maximum number of connections to pool (default: 8) max_retries: Number of retry attempts for failed requests (default: 1) timeout: Request timeout in seconds (default: None for no timeout) - + Returns: httpx.AsyncClient: Configured async HTTP client with secure settings - + Example: >>> async with async_pinned_session() as client: ... response = await client.get("https://api.smartsheet.com/2.0/users/me") @@ -95,20 +95,20 @@ def async_pinned_session( """ # Create SSL context with secure configuration ssl_context = _create_ssl_context() - + # Configure connection limits for pooling limits = httpx.Limits( max_connections=pool_maxsize, max_keepalive_connections=pool_maxsize // 2 ) - + # Configure retry transport transport = httpx.AsyncHTTPTransport( limits=limits, verify=ssl_context, retries=max_retries ) - + # Create async client with configuration client = httpx.AsyncClient( transport=transport, @@ -118,17 +118,17 @@ def async_pinned_session( 'response': [_redact_token_async] } ) - + return client async def _redact_token_async(response: httpx.Response) -> None: """Redact authorization token from request headers for security. - + This hook is called after each response to remove sensitive authorization tokens from the request object, preventing them from appearing in logs or debug output. - + Args: response: The HTTP response object containing the request """ diff --git a/smartsheet/async_smartsheet.py b/smartsheet/async_smartsheet.py index c79d854c..b328c6fd 100644 --- a/smartsheet/async_smartsheet.py +++ b/smartsheet/async_smartsheet.py @@ -76,15 +76,15 @@ async def main(): class AsyncAbstractUserCalcBackoff: """Abstract base class for async backoff calculation.""" - + async def calc_backoff(self, previous_attempts, total_elapsed_time, error_result): """Calculate backoff time for retry attempts. - + Args: previous_attempts (int): Number of previous retry attempts total_elapsed_time (float): Total elapsed time in seconds error_result (ErrorResult): Error result from previous attempt - + Returns: float: Backoff time in seconds (negative to stop retrying) """ @@ -95,7 +95,7 @@ async def calc_backoff(self, previous_attempts, total_elapsed_time, error_result class AsyncDefaultCalcBackoff(AsyncAbstractUserCalcBackoff): """Default async backoff calculator.""" - + def __init__(self, max_retry_time): self._max_retry_time = max_retry_time @@ -121,20 +121,20 @@ async def calc_backoff(self, previous_attempts, total_elapsed_time, error_result class AsyncSmartsheet: """Async client for making requests to the Smartsheet API. - + This class provides an async interface to the Smartsheet API using httpx.AsyncClient for non-blocking HTTP operations. It mirrors the synchronous Smartsheet client API but uses async/await patterns throughout. - + The client should be used as an async context manager to ensure proper resource cleanup, or you must manually call aclose() when done. - + Attributes: Sheets: Async Sheets API operations Workspaces: Async Workspaces API operations models: Reference to smartsheet.models module raise_exceptions: Whether to raise exceptions on API errors (default: False) - + Example: >>> async with AsyncSmartsheet(access_token="token") as client: ... result = await client.Sheets.add_rows(sheet_id, rows) @@ -165,12 +165,12 @@ def __init__( "/SmartsheetPythonSDK/__version__" to the user_agent. proxies: Proxy configuration dict. See httpx documentation for details. api_base: Base URL for API requests (default: https://api.smartsheet.com/2.0) - + Raises: ValueError: If access_token is not provided and not set in environment """ self.raise_exceptions = False - + if access_token: self._access_token = access_token else: @@ -241,10 +241,10 @@ async def _ensure_session(self): async def aclose(self): """Close the async HTTP session and release resources. - + This method should be called when you're done using the client if you're not using it as an async context manager. - + Example: >>> client = AsyncSmartsheet(access_token="token") >>> try: @@ -273,7 +273,7 @@ def assume_user(self, email: Optional[str] = None): def errors_as_exceptions(self, preference: bool = True): """Set preference on whether or not to raise exceptions on API errors. - + When preference is True, exceptions will be raised. When False, instances of the Error data type will be returned. @@ -295,7 +295,7 @@ def as_test_scenario(self, name: str): def with_wiremock_test_case(self, test_name: str, request_id: str): """Configure client with x-test-name and x-request-id headers. - + Used for wiremock test cases. Args: @@ -359,13 +359,13 @@ def _log_request(self, operation, response): body = response.request.content.decode("utf8") body_dumps = json.dumps(json.loads(body), sort_keys=True) self._log.debug('{"requestBody": %s}', body_dumps) - + # response content_dumps = f'"<< {response.headers.get("Content-Type")} content type suppressed >>"' if response.headers.get("Content-Type") is not None and "application/json" in response.headers.get("Content-Type"): content = response.content.decode("utf8") content_dumps = json.dumps(json.loads(content), sort_keys=True) - + if 200 <= response.status_code <= 299: if operation["dl_path"] is None: self._log.debug( @@ -401,7 +401,7 @@ async def _request(self, prepped_request, operation): Operation Result object. """ await self._ensure_session() - + try: res = await self._session.send(prepped_request) self._log_request(operation, res) @@ -427,7 +427,7 @@ async def request_with_retry(self, prepped_request, operation): start_time = time.time() # Make a copy of the request as the access token will be redacted on response prior to logging pre_redact_request = prepped_request - + while True: result = await self._request(prepped_request, operation) if isinstance(result, AsyncOperationErrorResult): @@ -496,7 +496,7 @@ def prepare_request(self, _op): req.headers.update({"Api-Scenario": self._test_scenario_name}) else: req.headers.pop("Api-Scenario", None) - + if self._wiremock_test_name is not None and self._wiremock_request_id is not None: req.headers["X-Test-Name"] = self._wiremock_test_name req.headers["X-Request-ID"] = self._wiremock_request_id @@ -530,11 +530,11 @@ def __getattr__(self, name): # Cache the instance self._api_modules_cache[name] = instance return instance - except (ImportError, AttributeError): + except (ImportError, AttributeError) as e: self._log.error( "ImportError! Could not load async api class %s", name ) - raise AttributeError(f"AsyncSmartsheet has no attribute '{name}'") + raise AttributeError(f"AsyncSmartsheet has no attribute '{name}'") from e class AsyncOperationResult: