From f5b9b48eee4d9ae536d1a06bfd8fa703e0310eca Mon Sep 17 00:00:00 2001 From: xnuinside Date: Sun, 18 Jan 2026 12:46:16 +0300 Subject: [PATCH 1/8] Add SQLAlchemy 2.0 support (issue #49) - New sqlalchemy_v2 generator with modern SQLAlchemy 2.0 syntax: - Uses DeclarativeBase instead of deprecated declarative_base() - Uses Mapped[T] type annotations for columns - Uses mapped_column() instead of Column() - Uses X | None union syntax for nullable columns - Support for all column types, foreign keys, indexes, and constraints - Added functional tests for SQLAlchemy v2 generation - Added integration tests for SQLAlchemy v2 (validates generated code runs) - Added integration tests for OpenAPI 3 generator - Updated CHANGELOG.md and README.md with SQLAlchemy v2 support - Removed unused files (SUGGESTIONS.md, one.ddl) --- CHANGELOG.md | 8 + README.md | 3 +- SUGGESTIONS.md | 928 ------------------ omymodels/generators.py | 2 + omymodels/models/sqlalchemy_v2/__init__.py | 5 + omymodels/models/sqlalchemy_v2/core.py | 323 ++++++ .../models/sqlalchemy_v2/sqlalchemy_v2.jinja2 | 10 + omymodels/models/sqlalchemy_v2/templates.py | 62 ++ omymodels/models/sqlalchemy_v2/types.py | 54 + one.ddl | 3 - .../generator/test_sqlalchemy_v2.py | 188 ++++ tests/integration/openapi3/__init__.py | 0 tests/integration/openapi3/test_openapi3.py | 271 +++++ tests/integration/sqlalchemy_v2/__init__.py | 0 tests/integration/sqlalchemy_v2/conftest.py | 30 + .../sqlalchemy_v2/test_sqlalchemy_v2.py | 212 ++++ 16 files changed, 1167 insertions(+), 932 deletions(-) delete mode 100644 SUGGESTIONS.md create mode 100644 omymodels/models/sqlalchemy_v2/__init__.py create mode 100644 omymodels/models/sqlalchemy_v2/core.py create mode 100644 omymodels/models/sqlalchemy_v2/sqlalchemy_v2.jinja2 create mode 100644 omymodels/models/sqlalchemy_v2/templates.py create mode 100644 omymodels/models/sqlalchemy_v2/types.py delete mode 100644 one.ddl create mode 100644 tests/functional/generator/test_sqlalchemy_v2.py create mode 100644 tests/integration/openapi3/__init__.py create mode 100644 tests/integration/openapi3/test_openapi3.py create mode 100644 tests/integration/sqlalchemy_v2/__init__.py create mode 100644 tests/integration/sqlalchemy_v2/conftest.py create mode 100644 tests/integration/sqlalchemy_v2/test_sqlalchemy_v2.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 0620bf3..7e074e4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -43,6 +43,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Boolean defaults 0/1 converted to False/True - Expanded `datetime_now_check` with more SQL datetime keywords +**SQLAlchemy 2.0 Support (issue #49)** +- New `sqlalchemy_v2` models type with modern SQLAlchemy 2.0 syntax +- Uses `DeclarativeBase` instead of deprecated `declarative_base()` +- Uses `Mapped[T]` type annotations for columns +- Uses `mapped_column()` instead of `Column()` +- Uses `X | None` union syntax for nullable columns +- Supports all column types, foreign keys, indexes, and constraints + **SQLModel Improvements** - Fixed array type generation (issue #66) - Arrays now properly generate `List[T]` with correct SQLAlchemy ARRAY type diff --git a/README.md b/README.md index e5900bc..0a9ab9a 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,8 @@ O! My Models (omymodels) is a library that allow you to **generate** different O Supported Models: -- SQLAlchemy ORM (https://docs.sqlalchemy.org/en/20/orm/) +- SQLAlchemy 2.0 ORM (https://docs.sqlalchemy.org/en/20/orm/) - modern syntax with `Mapped` and `mapped_column` +- SQLAlchemy ORM (legacy style) - SQLAlchemy Core (Tables) (https://docs.sqlalchemy.org/en/20/core/metadata.html) - SQLModel (https://sqlmodel.tiangolo.com/) - combines SQLAlchemy and Pydantic - GinoORM (https://python-gino.org/) diff --git a/SUGGESTIONS.md b/SUGGESTIONS.md deleted file mode 100644 index 8db4172..0000000 --- a/SUGGESTIONS.md +++ /dev/null @@ -1,928 +0,0 @@ -# O! My Models - Architecture Improvement Suggestions - -## Core Idea - -Clear separation into two phases: -1. **Parser** - parses input data (DDL, Python models) into unified `TableMeta` structure -2. **Generator** - generates output code based on `TableMeta` - -``` -Input (DDL/Python) → Parser → TableMeta → Generator → Output (Python code) -``` - ---- - -## 1. Module Restructuring - -### Current Structure (Problems) - -``` -omymodels/ -├── from_ddl.py # Mixed: parsing, normalization, generation, saving -├── converter.py # Mixed: Python parsing, normalization, generation -├── logic.py # ORM-specific logic mixed with general logic -├── types.py # Too much different logic -└── generators.py # Only routing -``` - -### Proposed Structure - -``` -omymodels/ -├── __init__.py # Public API -├── api.py # create_models(), convert_models() -│ -├── parsing/ # PARSING (phase 1) -│ ├── __init__.py -│ ├── ddl_parser.py # DDL → raw dict (simple-ddl-parser) -│ ├── python_parser.py # Python → raw dict (py-models-parser) -│ └── normalizer.py # raw dict → List[TableMeta] -│ -├── types/ # TYPES (separate module) -│ ├── __init__.py -│ ├── sql_types.py # SQL type groups (string_types, integer_types, etc.) -│ ├── converter.py # TypeConverter class -│ └── registry.py # Mappings for each framework -│ -├── generation/ # GENERATION (phase 2) -│ ├── __init__.py -│ ├── base.py # BaseGenerator (abstract class) -│ ├── orm_base.py # ORMGenerator (for Gino, SQLAlchemy, SQLModel) -│ ├── datamodel_base.py # DataModelGenerator (for Pydantic, Dataclass) -│ ├── renderer.py # Jinja2 rendering -│ └── registry.py # Generator registry -│ -├── generators/ # CONCRETE GENERATORS (instead of models/) -│ ├── gino/ -│ ├── sqlalchemy/ -│ ├── sqlalchemy_core/ -│ ├── sqlmodel/ -│ ├── pydantic/ -│ ├── pydantic_v2/ -│ ├── dataclass/ -│ └── enum/ -│ -├── helpers.py -├── errors.py -└── cli.py -``` - ---- - -## 2. Base Generator Classes - -### 2.1 BaseGenerator (abstract) - -```python -# omymodels/generation/base.py -from abc import ABC, abstractmethod -from typing import List, Optional, Dict, Any -from table_meta.model import TableMeta - -class BaseGenerator(ABC): - """Base class for all generators.""" - - def __init__(self): - self.imports: set = set() - self.custom_types: Dict[str, Any] = {} - - @abstractmethod - def generate_model( - self, - table: TableMeta, - singular: bool = True, - exceptions: Optional[List] = None, - **kwargs - ) -> str: - """Generate code for a single model.""" - pass - - @abstractmethod - def create_header(self, tables: List[TableMeta], **kwargs) -> str: - """Generate file header with imports.""" - pass - - def get_type(self, column_type: str) -> str: - """Get target type from SQL type.""" - return self.type_converter.convert(column_type) - - def add_custom_type(self, type_name: str, type_def: Any) -> None: - """Add custom type (enum, etc.).""" - self.custom_types[type_name] = type_def -``` - -### 2.2 ORMGenerator (for ORM frameworks) - -```python -# omymodels/generation/orm_base.py -from omymodels.generation.base import BaseGenerator - -class ORMGenerator(BaseGenerator): - """Base class for ORM generators (Gino, SQLAlchemy, SQLModel).""" - - def __init__(self): - super().__init__() - self.use_func = False # func.now() - self.use_dialect = False # PostgreSQL dialect - self.constraints = [] - self.indexes = [] - - def generate_column(self, column, table, **kwargs) -> str: - """Generate ORM column definition.""" - # Common logic from current logic.py - pass - - def setup_column_attributes(self, column, ...) -> str: - """Setup column attributes (nullable, default, PK, FK).""" - pass - - def add_table_args(self, table) -> str: - """Generate __table_args__ with indexes and constraints.""" - pass - - def prepare_column_default(self, column) -> str: - """Process default values for ORM.""" - # Common code from all ORM generators - pass -``` - -### 2.3 DataModelGenerator (for Pydantic, Dataclass) - -```python -# omymodels/generation/datamodel_base.py -from omymodels.generation.base import BaseGenerator - -class DataModelGenerator(BaseGenerator): - """Base class for data model generators (Pydantic, Dataclass).""" - - def __init__(self): - super().__init__() - self.datetime_import = False - self.uuid_import = False - self.typing_imports: set = set() - - def generate_attr(self, column, defaults_off: bool = False) -> str: - """Generate model attribute.""" - pass - - def get_python_type(self, sql_type: str) -> str: - """Map SQL type to Python type.""" - pass - - def format_nullable(self, type_str: str) -> str: - """Format nullable type (Optional[X] or X | None).""" - pass - - def format_default(self, default_value: Any) -> str: - """Format default value for Python.""" - pass -``` - ---- - -## 3. Parsing Unification - -### 3.1 Unified Parser Interface - -```python -# omymodels/parsing/__init__.py -from typing import List, Union -from table_meta.model import TableMeta - -def parse( - input_data: str, - input_type: str = "ddl" # "ddl" | "python" -) -> List[TableMeta]: - """ - Universal input data parser. - - Args: - input_data: DDL string or Python code - input_type: Input data type - - Returns: - List of TableMeta objects - """ - if input_type == "ddl": - from omymodels.parsing.ddl_parser import parse_ddl - raw_data = parse_ddl(input_data) - elif input_type == "python": - from omymodels.parsing.python_parser import parse_python - raw_data = parse_python(input_data) - else: - raise ValueError(f"Unknown input type: {input_type}") - - from omymodels.parsing.normalizer import normalize - return normalize(raw_data) -``` - -### 3.2 Normalizer - Unified Normalization - -```python -# omymodels/parsing/normalizer.py -from typing import List, Dict, Any -from table_meta.model import TableMeta, Column - -def normalize(raw_data: Dict[str, Any]) -> List[TableMeta]: - """ - Normalize raw parser data into TableMeta objects. - - Single normalization point for DDL and Python parsers. - """ - tables = [] - - for table_data in raw_data.get("tables", []): - table = TableMeta( - name=_clean_name(table_data["name"]), - columns=[_normalize_column(c) for c in table_data["columns"]], - primary_key=table_data.get("primary_key", []), - indexes=table_data.get("indexes", []), - constraints=table_data.get("constraints", {}), - table_schema=table_data.get("schema"), - ) - tables.append(table) - - return tables - -def _normalize_column(column_data: Dict) -> Column: - """Normalize column data.""" - return Column( - name=_clean_name(column_data["name"]), - type=_normalize_type(column_data["type"]), - nullable=column_data.get("nullable", True), - default=_clean_default(column_data.get("default")), - size=column_data.get("size"), - references=column_data.get("references"), - ) -``` - ---- - -## 4. Type Handling Unification - -### 4.1 TypeConverter Class - -```python -# omymodels/types/converter.py -from typing import Optional, Dict, Tuple -from omymodels.types.sql_types import ( - string_types, integer_types, float_types, - datetime_types, bool_types, json_types -) - -class TypeConverter: - """Converter for SQL types to target framework types.""" - - def __init__(self, mapping: Dict[str, str], prefix: str = ""): - self.mapping = mapping - self.prefix = prefix - self._build_lookup() - - def _build_lookup(self): - """Build lookup table for fast search.""" - self._lookup = {} - for sql_type, target_type in self.mapping.items(): - self._lookup[sql_type.lower()] = target_type - - def convert(self, sql_type: str) -> str: - """Convert SQL type to target type.""" - normalized = sql_type.lower().split("(")[0].strip() - return self._lookup.get(normalized, sql_type) - - def with_size(self, type_str: str, size: Optional[Tuple]) -> str: - """Add size to type if needed.""" - if size is None: - return type_str - if isinstance(size, tuple): - return f"{type_str}({size[0]}, {size[1]})" - return f"{type_str}({size})" - - def is_datetime(self, sql_type: str) -> bool: - """Check if type is datetime.""" - return sql_type.upper() in datetime_types - - def is_json(self, sql_type: str) -> bool: - """Check if type is JSON.""" - return sql_type.upper() in json_types -``` - -### 4.2 Type Registry for Each Framework - -```python -# omymodels/types/registry.py - -# Gino -GINO_TYPES = { - "varchar": "db.String", - "text": "db.Text", - "integer": "db.Integer", - "bigint": "db.BigInteger", - "timestamp": "db.TIMESTAMP", - # ... -} - -# SQLAlchemy -SQLALCHEMY_TYPES = { - "varchar": "sa.String", - "text": "sa.Text", - "integer": "sa.Integer", - # ... -} - -# Pydantic -PYDANTIC_TYPES = { - "varchar": "str", - "text": "str", - "integer": "int", - "bigint": "int", - "timestamp": "datetime.datetime", - "json": "dict | list", # for v2 - # ... -} - -def get_type_converter(framework: str) -> TypeConverter: - """Return TypeConverter for the specified framework.""" - converters = { - "gino": TypeConverter(GINO_TYPES, prefix="db."), - "sqlalchemy": TypeConverter(SQLALCHEMY_TYPES, prefix="sa."), - "pydantic": TypeConverter(PYDANTIC_TYPES), - "pydantic_v2": TypeConverter(PYDANTIC_V2_TYPES), - # ... - } - return converters[framework] -``` - ---- - -## 5. Code Duplication Elimination - -### 5.1 Current Duplication (~30-40%) - -| Method | Duplicated in | Solution | -|--------|---------------|----------| -| `prepare_column_default()` | Gino, SQLAlchemy, SQLModel, SQLAlchemy Core | Move to `ORMGenerator` | -| `create_header()` (ORM) | Gino, SQLAlchemy, SQLModel | Move to `ORMGenerator` | -| `get_not_custom_type()` | Pydantic, Pydantic v2, Dataclass | Move to `DataModelGenerator` | -| `generate_attr()` | Pydantic, Pydantic v2, Dataclass | Move to `DataModelGenerator` | -| `types_mapping` building | All generators | Use `TypeConverter` | - -### 5.2 Refactoring Example - -**Before (duplication in 4 files):** -```python -# models/gino/core.py, models/sqlalchemy/core.py, models/sqlmodel/core.py -def prepare_column_default(self, column_data, column): - if isinstance(column_data.default, str): - if column_data.type.upper() in datetime_types: - if datetime_now_check(column_data.default.lower()): - column_data.default = "func.now()" - self.state.add("func") - # ... 20+ lines of identical code -``` - -**After (once in base class):** -```python -# omymodels/generation/orm_base.py -class ORMGenerator(BaseGenerator): - def prepare_column_default(self, column) -> str: - """Process default values for ORM.""" - if column.type.upper() in datetime_types: - if is_now_function(column.default): - self.use_func = True - return "func.now()" - return self._format_default(column.default) -``` - ---- - -## 6. API Improvement - -### 6.1 Unified Public API - -```python -# omymodels/api.py -from typing import Optional, List, Dict, Any, Union -from pathlib import Path - -def generate( - input_data: Union[str, Path], - input_type: str = "ddl", # "ddl" | "python" - output_type: str = "gino", # "gino" | "pydantic" | etc. - output_path: Optional[Path] = None, - *, - singular: bool = True, - schema_global: bool = True, - defaults_off: bool = False, - exceptions: Optional[List[str]] = None, -) -> Dict[str, Any]: - """ - Universal model generation function. - - Args: - input_data: DDL string, Python code, or file path - input_type: Input data type - output_type: Output model type - output_path: Path for saving (optional) - singular: Singularize table names - schema_global: Global schema vs per-table - defaults_off: Disable default values - exceptions: Exceptions for singularization - - Returns: - {"code": str, "metadata": dict} - """ - # 1. Load data - if isinstance(input_data, Path): - input_data = input_data.read_text() - - # 2. Parse - from omymodels.parsing import parse - tables = parse(input_data, input_type) - - # 3. Generate - from omymodels.generation.registry import get_generator - generator = get_generator(output_type) - code = generator.generate_all( - tables, - singular=singular, - schema_global=schema_global, - defaults_off=defaults_off, - exceptions=exceptions, - ) - - # 4. Save - if output_path: - output_path.write_text(code) - - return {"code": code, "metadata": {"tables": len(tables)}} - - -# Backward compatibility -def create_models(ddl: str, **kwargs) -> Dict[str, Any]: - """Backward compatible wrapper.""" - return generate(ddl, input_type="ddl", **kwargs) - -def convert_models(python_code: str, **kwargs) -> str: - """Backward compatible wrapper.""" - result = generate(python_code, input_type="python", **kwargs) - return result["code"] -``` - ---- - -## 7. Bug Fixes - -### 7.1 iterate_over_the_list() - List Modification During Iteration - -**Current code (bug):** -```python -def iterate_over_the_list(items: List) -> str: - for item in items: - if isinstance(item, str): - items.append(clean_value(item)) - items.remove(item) # Modification during iteration! - return items -``` - -**Fix:** -```python -def iterate_over_the_list(items: List) -> List: - return [ - clean_value(item) if isinstance(item, str) - else prepare_data(item) if isinstance(item, dict) - else item - for item in items - ] -``` - -### 7.2 dataclass/core.py - Meaningless Condition - -**Current code:** -```python -if _type == _type: # Always True! - _type = types_mapping.get(_type, _type) -``` - -**Fix:** -```python -_type = types_mapping.get(_type, _type) -``` - ---- - -## 8. Feature Extensions - -### 8.1 New SQL Type Support - -- `INTERVAL` - for time intervals -- `POINT`, `POLYGON` - geometric types (PostGIS) -- `ARRAY` - improved multi-dimensional array support -- `HSTORE` - PostgreSQL key-value -- `INET`, `CIDR` - network types - -### 8.2 New Target Frameworks - -- **Tortoise ORM** - async ORM -- **Django ORM** - popular web framework -- **Peewee** - lightweight ORM -- **attrs** - dataclasses alternative -- **msgspec** - fast serialization - -### 8.3 Additional Features - -- **Pydantic validators** - auto-generation from constraints -- **Alembic migrations** - migration generation -- **TypedDict** - for JSON schemas -- **Protocol classes** - for interfaces - ---- - -## 9. Plugin System for Custom Generators - -Allow users to register their own generators without forking the repository. - -### 9.1 Generator Registration API - -```python -# omymodels/plugins.py -from typing import Type, Dict -from omymodels.generation.base import BaseGenerator - -_custom_generators: Dict[str, Type[BaseGenerator]] = {} - -def register_generator(name: str, generator_class: Type[BaseGenerator]) -> None: - """ - Register a custom generator. - - Args: - name: Unique generator name (e.g., "my_orm", "custom_pydantic") - generator_class: Generator class inheriting from BaseGenerator - - Example: - from omymodels import register_generator - from omymodels.generation.base import BaseGenerator - - class MyORMGenerator(BaseGenerator): - ... - - register_generator("my_orm", MyORMGenerator) - """ - if not issubclass(generator_class, BaseGenerator): - raise TypeError(f"{generator_class} must inherit from BaseGenerator") - _custom_generators[name] = generator_class - -def unregister_generator(name: str) -> None: - """Remove a custom generator.""" - _custom_generators.pop(name, None) - -def get_generator(name: str) -> BaseGenerator: - """Get generator by name (built-in or custom).""" - if name in _custom_generators: - return _custom_generators[name]() - from omymodels.generation.registry import builtin_generators - if name in builtin_generators: - return builtin_generators[name]() - raise ValueError(f"Unknown generator: {name}") -``` - -### 9.2 User-Defined Generator Example - -```python -# my_project/generators/peewee_generator.py -from omymodels.generation.base import BaseGenerator -from omymodels.types.converter import TypeConverter -from table_meta.model import TableMeta - -# Type mapping for Peewee -PEEWEE_TYPES = { - "varchar": "CharField", - "text": "TextField", - "integer": "IntegerField", - "bigint": "BigIntegerField", - "boolean": "BooleanField", - "timestamp": "DateTimeField", - "date": "DateField", - "float": "FloatField", - "decimal": "DecimalField", - "uuid": "UUIDField", - "json": "JSONField", -} - -class PeeweeGenerator(BaseGenerator): - """Custom generator for Peewee ORM.""" - - def __init__(self): - super().__init__() - self.type_converter = TypeConverter(PEEWEE_TYPES) - self.imports = {"peewee"} - - def generate_model(self, table: TableMeta, singular: bool = True, **kwargs) -> str: - """Generate Peewee model class.""" - class_name = self._to_class_name(table.name, singular) - - lines = [f"\nclass {class_name}(Model):"] - - for column in table.columns: - field_type = self.type_converter.convert(column.type) - attrs = self._get_field_attrs(column) - lines.append(f" {column.name} = {field_type}({attrs})") - - # Meta class - lines.append("") - lines.append(" class Meta:") - lines.append(f" table_name = '{table.name}'") - - return "\n".join(lines) - - def create_header(self, tables, **kwargs) -> str: - """Generate imports.""" - fields = set() - for table in tables: - for col in table.columns: - fields.add(self.type_converter.convert(col.type)) - - return f"from peewee import Model, {', '.join(sorted(fields))}\n" - - def _to_class_name(self, name: str, singular: bool) -> str: - """Convert table name to class name.""" - # Simple implementation - return "".join(word.capitalize() for word in name.split("_")) - - def _get_field_attrs(self, column) -> str: - """Generate field attributes.""" - attrs = [] - if column.nullable: - attrs.append("null=True") - if column.default is not None: - attrs.append(f"default={column.default!r}") - return ", ".join(attrs) -``` - -### 9.3 Using Custom Generator - -```python -# my_project/main.py -from omymodels import create_models, register_generator -from my_project.generators.peewee_generator import PeeweeGenerator - -# Register custom generator -register_generator("peewee", PeeweeGenerator) - -# Use it -ddl = """ -CREATE TABLE users ( - id SERIAL PRIMARY KEY, - name VARCHAR(100) NOT NULL, - email VARCHAR(255), - created_at TIMESTAMP DEFAULT NOW() -); -""" - -result = create_models(ddl, models_type="peewee") -print(result["code"]) - -# Output: -# from peewee import Model, CharField, DateTimeField, IntegerField -# -# class Users(Model): -# id = IntegerField() -# name = CharField() -# email = CharField(null=True) -# created_at = DateTimeField(default=datetime.datetime.now) -# -# class Meta: -# table_name = 'users' -``` - -### 9.4 Entry Points for Auto-Discovery - -Support automatic discovery via `pyproject.toml` entry points: - -```toml -# In user's pyproject.toml -[project.entry-points."omymodels.generators"] -peewee = "my_package.generators:PeeweeGenerator" -django = "my_package.generators:DjangoGenerator" -``` - -```python -# omymodels/plugins.py -import sys -if sys.version_info >= (3, 10): - from importlib.metadata import entry_points -else: - from importlib_metadata import entry_points - -def discover_plugins() -> None: - """Auto-discover generators from entry points.""" - eps = entry_points(group="omymodels.generators") - for ep in eps: - generator_class = ep.load() - register_generator(ep.name, generator_class) - -# Call on import -discover_plugins() -``` - -### 9.5 Generator Inheritance for Quick Customization - -Users can extend built-in generators: - -```python -from omymodels.generation.pydantic_v2 import PydanticV2Generator - -class MyPydanticGenerator(PydanticV2Generator): - """Pydantic v2 with custom JSON handling.""" - - def __init__(self): - super().__init__() - # Override JSON type to use specific TypedDict - self.type_converter.mapping["json"] = "JsonData" - self.type_converter.mapping["jsonb"] = "JsonData" - - def create_header(self, tables, **kwargs) -> str: - header = super().create_header(tables, **kwargs) - # Add custom import - return "from my_types import JsonData\n" + header - -register_generator("my_pydantic", MyPydanticGenerator) -``` - -### 9.6 CLI Support for Custom Generators - -```bash -# Register via environment variable -export OMYMODELS_PLUGINS="my_package.generators" - -# Or via config file (~/.omymodels.toml) -[plugins] -generators = ["my_package.generators.PeeweeGenerator"] - -# Use in CLI -omm schema.sql -m peewee -t models.py -``` - -### 9.7 Validation and Error Handling - -```python -# omymodels/plugins.py -from omymodels.generation.base import BaseGenerator - -def register_generator(name: str, generator_class: Type[BaseGenerator]) -> None: - """Register with validation.""" - # Validate inheritance - if not issubclass(generator_class, BaseGenerator): - raise TypeError( - f"Generator must inherit from BaseGenerator, " - f"got {generator_class.__bases__}" - ) - - # Validate required methods - required_methods = ["generate_model", "create_header"] - for method in required_methods: - if not hasattr(generator_class, method): - raise TypeError(f"Generator missing required method: {method}") - - # Validate name - if not name.isidentifier(): - raise ValueError(f"Invalid generator name: {name!r}") - - # Check for conflicts with built-in - from omymodels.generation.registry import builtin_generators - if name in builtin_generators: - raise ValueError( - f"Cannot override built-in generator: {name}. " - f"Use a different name." - ) - - _custom_generators[name] = generator_class -``` - -### 9.8 Public API Additions - -```python -# omymodels/__init__.py -from omymodels.plugins import ( - register_generator, - unregister_generator, - list_generators, -) -from omymodels.generation.base import BaseGenerator -from omymodels.generation.orm_base import ORMGenerator -from omymodels.generation.datamodel_base import DataModelGenerator -from omymodels.types.converter import TypeConverter - -__all__ = [ - # Existing - "create_models", - "convert_models", - # New plugin API - "register_generator", - "unregister_generator", - "list_generators", - # Base classes for extension - "BaseGenerator", - "ORMGenerator", - "DataModelGenerator", - "TypeConverter", -] -``` - ---- - -## 10. Testing Improvements - -### 10.1 Test Structure - -``` -tests/ -├── unit/ -│ ├── parsing/ -│ │ ├── test_ddl_parser.py -│ │ ├── test_python_parser.py -│ │ └── test_normalizer.py -│ ├── types/ -│ │ ├── test_converter.py -│ │ └── test_registry.py -│ └── generation/ -│ ├── test_base_generator.py -│ ├── test_orm_generator.py -│ └── test_datamodel_generator.py -│ -├── functional/ -│ └── generators/ -│ ├── test_gino.py -│ ├── test_sqlalchemy.py -│ ├── test_pydantic.py -│ └── ... -│ -├── integration/ -│ ├── test_full_pipeline.py # DDL → TableMeta → Code -│ └── test_runtime_validation.py # Generated code works -│ -└── fixtures/ - ├── ddl/ - │ ├── simple.sql - │ ├── with_fk.sql - │ └── complex.sql - └── expected/ - ├── gino/ - ├── pydantic/ - └── ... -``` - -### 10.2 Property-based Tests - -```python -from hypothesis import given, strategies as st - -@given(st.text(min_size=1, alphabet=st.characters(whitelist_categories=('L',)))) -def test_table_name_normalization(name): - """Any table name should normalize to a valid Python identifier.""" - result = normalize_table_name(name) - assert result.isidentifier() -``` - ---- - -## 11. Implementation Plan - -### Phase 1: Base Infrastructure (1-2 weeks) -1. Create `omymodels/generation/base.py` with `BaseGenerator` -2. Create `omymodels/types/converter.py` with `TypeConverter` -3. Fix discovered bugs -4. Add tests for new classes - -### Phase 2: ORM Generators (1-2 weeks) -1. Create `ORMGenerator` with common logic -2. Refactor Gino, SQLAlchemy, SQLModel to use inheritance -3. Remove duplicate code -4. Update tests - -### Phase 3: Data Model Generators (1 week) -1. Create `DataModelGenerator` -2. Refactor Pydantic, Pydantic v2, Dataclass -3. Remove duplicate code - -### Phase 4: Parsing Unification (1 week) -1. Create `omymodels/parsing/` module -2. Extract normalization to `normalizer.py` -3. Update `from_ddl.py` and `converter.py` - -### Phase 5: Finalization (1 week) -1. Update public API -2. Update documentation -3. Ensure backward compatibility -4. Full testing - ---- - -## 12. Expected Results - -| Metric | Before | After | -|--------|--------|-------| -| Code duplication | ~35% | ~10% | -| Lines of code | 2348 | ~1800 | -| Time to add generator | 2-3 hours | 30-60 min | -| Test coverage | ~60% | ~85% | -| Number of base classes | 0 | 3 | diff --git a/omymodels/generators.py b/omymodels/generators.py index 2df9b1a..937634b 100644 --- a/omymodels/generators.py +++ b/omymodels/generators.py @@ -13,6 +13,7 @@ from omymodels.models.pydantic_v2 import core as p2 from omymodels.models.sqlalchemy import core as s from omymodels.models.sqlalchemy_core import core as sc +from omymodels.models.sqlalchemy_v2 import core as s2 from omymodels.models.sqlmodel import core as sm # Built-in generator modules @@ -22,6 +23,7 @@ "pydantic_v2": p2, "dataclass": d, "sqlalchemy": s, + "sqlalchemy_v2": s2, "sqlalchemy_core": sc, "sqlmodel": sm, "openapi3": oas3, diff --git a/omymodels/models/sqlalchemy_v2/__init__.py b/omymodels/models/sqlalchemy_v2/__init__.py new file mode 100644 index 0000000..2b00c8e --- /dev/null +++ b/omymodels/models/sqlalchemy_v2/__init__.py @@ -0,0 +1,5 @@ +"""SQLAlchemy 2.0 ORM model generator.""" + +from omymodels.models.sqlalchemy_v2.core import ModelGenerator + +__all__ = ["ModelGenerator"] diff --git a/omymodels/models/sqlalchemy_v2/core.py b/omymodels/models/sqlalchemy_v2/core.py new file mode 100644 index 0000000..265d3bf --- /dev/null +++ b/omymodels/models/sqlalchemy_v2/core.py @@ -0,0 +1,323 @@ +from typing import Dict, List, Optional + +import omymodels.models.sqlalchemy_v2.templates as st +from omymodels.helpers import create_class_name, datetime_now_check +from omymodels.models.sqlalchemy_v2.types import types_mapping, python_to_sa_type +from omymodels.types import datetime_types, json_types, postgresql_dialect +import omymodels.types as t + + +class GeneratorBase: + def __init__(self): + self.custom_types = {} + + +class ModelGenerator(GeneratorBase): + def __init__(self): + self.state = set() + self.postgresql_dialect_cols = set() + self.typing_imports = set() + self.constraint = False + self.im_index = False + self.datetime_import = False + self.date_import = False + self.time_import = False + self.uuid_import = False + self.fk_import = False + self.types_mapping = types_mapping + self.templates = st + self.prefix = "" + super().__init__() + + def prepare_column_default(self, column_data: Dict, column: str) -> str: + if isinstance(column_data.default, str): + if column_data.type.upper() in datetime_types: + if datetime_now_check(column_data.default.lower()): + column_data.default = "func.now()" + self.state.add("func") + elif "'" not in column_data.default: + column_data.default = f"'{column_data.default}'" + else: + if "'" not in column_data.default: + column_data.default = f"'{column_data.default}'" + else: + column_data.default = f"'{str(column_data.default)}'" + column += st.default.format(default=column_data.default) + return column + + def _get_python_type(self, column_type_info) -> str: + """Get Python type for Mapped annotation.""" + if isinstance(column_type_info, dict): + return column_type_info.get("python", "str") + return column_type_info + + def _get_sa_type(self, column_type_info) -> str: + """Get SQLAlchemy column type.""" + if isinstance(column_type_info, dict): + return column_type_info.get("sa") + return None + + def _track_imports(self, python_type: str): + """Track necessary imports based on Python type.""" + if python_type == "datetime": + self.datetime_import = True + elif python_type == "date": + self.date_import = True + elif python_type == "time": + self.time_import = True + elif python_type == "UUID": + self.uuid_import = True + + def _resolve_type_info(self, column_data) -> tuple: + """Resolve Python and SQLAlchemy types for a column.""" + column_type_info = self.types_mapping.get( + column_data.type.lower().split("[")[0], + {"python": "str", "sa": "String"} + ) + + python_type = self._get_python_type(column_type_info) + sa_type = self._get_sa_type(column_type_info) + + # Handle custom types (enums) + if self.custom_types: + custom = self.custom_types.get(column_data.type) + if custom: + if isinstance(custom, tuple): + python_type = custom[1] + sa_type = f"Enum({custom[1]})" + else: + python_type = column_data.type + sa_type = f"Enum({column_data.type})" + + return python_type, sa_type + + def _handle_array_type(self, column_data, python_type, sa_type) -> tuple: + """Handle array type columns.""" + if "[" in column_data.type and column_data.type.lower() not in json_types: + self.postgresql_dialect_cols.add("ARRAY") + self.typing_imports.add("List") + array_sa_type = python_to_sa_type.get(python_type, "String") + sa_type = f"ARRAY({array_sa_type})" + python_type = f"List[{python_type}]" + return python_type, sa_type + + def _add_type_size(self, sa_type, column_data) -> str: + """Add size specification to SQLAlchemy type.""" + if sa_type and column_data.size: + if isinstance(column_data.size, int): + return f"{sa_type}({column_data.size})" + elif isinstance(column_data.size, tuple): + return f"{sa_type}({','.join(str(x) for x in column_data.size)})" + return sa_type + + def generate_column( + self, + column_data, + table_pk: List[str], + table_data: Dict, + schema_global: bool, + ) -> str: + """Generate a column definition in SQLAlchemy 2.0 style.""" + column_data = t.prepare_column_data(column_data) + + python_type, sa_type = self._resolve_type_info(column_data) + self._track_imports(python_type) + + python_type, sa_type = self._handle_array_type(column_data, python_type, sa_type) + + if column_data.nullable and column_data.name not in table_pk: + python_type = f"{python_type} | None" + + if sa_type and sa_type in postgresql_dialect: + self.postgresql_dialect_cols.add(sa_type) + + sa_type_with_size = self._add_type_size(sa_type, column_data) + + if sa_type_with_size: + column = st.column_template.format( + column_name=column_data.name, + python_type=python_type, + column_type=sa_type_with_size, + ) + else: + column = st.column_template_no_type.format( + column_name=column_data.name, + python_type=python_type, + ) + + column = self._add_column_attributes( + column, column_data, table_pk, table_data, schema_global + ) + + column += ")\n" + return column + + def _add_column_attributes( + self, + column: str, + column_data, + table_pk: List[str], + table_data: Dict, + schema_global: bool, + ) -> str: + """Add attributes to column definition.""" + # Handle foreign keys from ALTER statements + if "columns" in table_data.alter: + for alter_column in table_data.alter["columns"]: + if ( + alter_column["name"] == column_data.name + and not alter_column["constraint_name"] + and alter_column["references"] + ): + column = self._add_foreign_key( + column, alter_column["references"], schema_global + ) + + # Handle autoincrement + if column_data.type.lower() in ("serial", "bigserial"): + column += st.autoincrement + + # Handle inline foreign keys + if column_data.references: + column = self._add_foreign_key( + column, column_data.references, schema_global + ) + + # Handle default values + if column_data.default is not None: + column = self.prepare_column_default(column_data, column) + + # Handle primary key + if column_data.name in table_pk: + column += st.pk_template + + # Handle unique constraint + if column_data.unique: + column += st.unique + + return column + + def _add_foreign_key( + self, column: str, reference: Dict[str, str], schema_global: bool + ) -> str: + """Add foreign key to column definition.""" + self.fk_import = True + if reference["schema"] and not schema_global: + column += st.fk_in_column.format( + ref_schema=reference["schema"], + ref_table=reference["table"], + ref_column=reference["column"] or column, + ) + else: + column += st.fk_in_column_without_schema.format( + ref_table=reference["table"], + ref_column=reference["column"] or column, + ) + if reference["on_delete"]: + column += st.on_delete.format(mode=reference["on_delete"].upper()) + if reference["on_update"]: + column += st.on_update.format(mode=reference["on_update"].upper()) + return column + + def generate_model( + self, + table: Dict, + singular: bool = True, + exceptions: Optional[List] = None, + schema_global: Optional[bool] = True, + *args, + **kwargs, + ) -> str: + """Generate a model definition in SQLAlchemy 2.0 style.""" + model = st.model_template.format( + model_name=create_class_name(table.name, singular, exceptions), + table_name=table.name, + ) + + for column in table.columns: + model += self.generate_column( + column, table.primary_key, table, schema_global + ) + + if table.indexes or table.alter or table.checks or not schema_global: + model = self._add_table_args(model, table, schema_global) + + return model + + def _add_table_args( + self, model: str, table: Dict, schema_global: bool = True + ) -> str: + """Add __table_args__ to model.""" + statements = [] + + if table.indexes: + for index in table.indexes: + if not index["unique"]: + self.im_index = True + statements.append( + st.index_template.format( + columns=", ".join(f"'{c}'" for c in index["columns"]), + name=f"'{index['index_name']}'", + ) + ) + else: + self.constraint = True + statements.append( + st.unique_index_template.format( + columns=", ".join(f"'{c}'" for c in index["columns"]), + name=f"'{index['index_name']}'", + ) + ) + + if not schema_global and table.table_schema: + statements.append(st.schema.format(schema_name=table.table_schema)) + + if statements: + model += st.table_args.format(statements=",".join(statements)) + + return model + + def _build_datetime_import(self) -> str: + """Build datetime import statement.""" + if not (self.datetime_import or self.date_import or self.time_import): + return "" + imports = [] + if self.datetime_import: + imports.append("datetime") + if self.date_import: + imports.append("date") + if self.time_import: + imports.append("time") + return f"from datetime import {', '.join(imports)}\n" + + def create_header( + self, tables: List[Dict], schema: bool = False, *args, **kwargs + ) -> str: + """Generate file header with imports.""" + parts = [] + + parts.append(self._build_datetime_import()) + + if self.uuid_import: + parts.append("from uuid import UUID\n") + + if self.typing_imports: + parts.append(st.typing_import.format(types=", ".join(sorted(self.typing_imports))) + "\n") + + if "func" in self.state: + parts.append(st.sql_alchemy_func_import + "\n") + + if self.postgresql_dialect_cols: + parts.append(st.postgresql_dialect_import.format( + types=", ".join(sorted(self.postgresql_dialect_cols))) + "\n") + + if self.fk_import: + parts.append("from sqlalchemy import ForeignKey\n") + + if self.constraint: + parts.append(st.unique_cons_import + "\n") + + if self.im_index: + parts.append(st.index_import + "\n") + + return "".join(parts) diff --git a/omymodels/models/sqlalchemy_v2/sqlalchemy_v2.jinja2 b/omymodels/models/sqlalchemy_v2/sqlalchemy_v2.jinja2 new file mode 100644 index 0000000..000d7b5 --- /dev/null +++ b/omymodels/models/sqlalchemy_v2/sqlalchemy_v2.jinja2 @@ -0,0 +1,10 @@ +from sqlalchemy import ( + String, Text, Integer, BigInteger, SmallInteger, + Float, Numeric, Boolean, Date, DateTime, Time, LargeBinary, Enum +) +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column +{{ headers }} + +class Base(DeclarativeBase): + pass +{{ models }} \ No newline at end of file diff --git a/omymodels/models/sqlalchemy_v2/templates.py b/omymodels/models/sqlalchemy_v2/templates.py new file mode 100644 index 0000000..0a5bfd7 --- /dev/null +++ b/omymodels/models/sqlalchemy_v2/templates.py @@ -0,0 +1,62 @@ +# imports +postgresql_dialect_import = "from sqlalchemy.dialects.postgresql import {types}" +sql_alchemy_func_import = "from sqlalchemy.sql import func" +index_import = "from sqlalchemy import Index" +typing_import = "from typing import {types}" + +sqlalchemy_import = """from sqlalchemy import ( + String, Text, Integer, BigInteger, SmallInteger, + Float, Numeric, Boolean, Date, DateTime, Time, LargeBinary, Enum +) +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column +""" +sqlalchemy_init = """ +class Base(DeclarativeBase): + pass +""" +unique_cons_import = "from sqlalchemy.schema import UniqueConstraint" +enum_import = "from enum import {enums}" + +# model definition +model_template = """\n +class {model_name}(Base): + __tablename__ = '{table_name}' +""" + +# columns definition - SQLAlchemy 2.0 style with Mapped and mapped_column +column_template = """ {column_name}: Mapped[{python_type}] = mapped_column({column_type}""" +column_template_no_type = """ {column_name}: Mapped[{python_type}] = mapped_column(""" + +required = "" # Not needed with Mapped, handled via Optional +default = ", server_default={default}" +pk_template = ", primary_key=True" +unique = ", unique=True" +autoincrement = ", autoincrement=True" +index = ", index=True" +nullable = "" # Handled by Optional in type hint + +# tables properties +table_args = """ + __table_args__ = ( + {statements} + ) + +""" +fk_constraint_template = """ + {fk_name} = ForeignKeyConstraint( + [{fk_columns}], [{fk_references_columns}]) +""" +fk_in_column = ", ForeignKey('{ref_schema}.{ref_table}.{ref_column}')" +fk_in_column_without_schema = ", ForeignKey('{ref_table}.{ref_column}')" + +unique_index_template = """ + UniqueConstraint({columns}, name={name})""" + +index_template = """ + Index({name}, {columns})""" + +schema = """ + dict(schema="{schema_name}")""" + +on_delete = ', ondelete="{mode}"' +on_update = ', onupdate="{mode}"' diff --git a/omymodels/models/sqlalchemy_v2/types.py b/omymodels/models/sqlalchemy_v2/types.py new file mode 100644 index 0000000..ec4b7bb --- /dev/null +++ b/omymodels/models/sqlalchemy_v2/types.py @@ -0,0 +1,54 @@ +from omymodels.types import ( + big_integer_types, + binary_types, + boolean_types, + datetime_types, + float_types, + integer_types, + json_types, + numeric_types, + populate_types_mapping, + string_types, + text_types, +) + +# SQLAlchemy 2.0 type mapping: {sql_type: {"python": python_type, "sa": sa_column_type}} +mapper = { + string_types: {"python": "str", "sa": "String"}, + integer_types: {"python": "int", "sa": "Integer"}, + big_integer_types: {"python": "int", "sa": "BigInteger"}, + float_types: {"python": "float", "sa": "Float"}, + numeric_types: {"python": "float", "sa": "Numeric"}, + boolean_types: {"python": "bool", "sa": "Boolean"}, + datetime_types: {"python": "datetime", "sa": "DateTime"}, + json_types: {"python": "dict", "sa": "JSON"}, + text_types: {"python": "str", "sa": "Text"}, + binary_types: {"python": "bytes", "sa": "LargeBinary"}, +} + +types_mapping = populate_types_mapping(mapper) + +direct_types = { + "date": {"python": "date", "sa": "Date"}, + "time": {"python": "time", "sa": "Time"}, + "timestamp": {"python": "datetime", "sa": "DateTime"}, + "smallint": {"python": "int", "sa": "SmallInteger"}, + "uuid": {"python": "UUID", "sa": "UUID"}, + "json": {"python": "dict", "sa": "JSON"}, + "jsonb": {"python": "dict", "sa": "JSON"}, + "year": {"python": "int", "sa": "Integer"}, +} + +types_mapping.update(direct_types) + +# Python type to SQLAlchemy type fallback for arrays +python_to_sa_type = { + "int": "Integer", + "str": "String", + "float": "Float", + "bool": "Boolean", + "datetime": "DateTime", + "date": "Date", + "time": "Time", + "bytes": "LargeBinary", +} diff --git a/one.ddl b/one.ddl deleted file mode 100644 index ded5bca..0000000 --- a/one.ddl +++ /dev/null @@ -1,3 +0,0 @@ -CREATE TABLE `option` ( - FIELD1 VARCHAR(256), -) ; \ No newline at end of file diff --git a/tests/functional/generator/test_sqlalchemy_v2.py b/tests/functional/generator/test_sqlalchemy_v2.py new file mode 100644 index 0000000..a5ff5e0 --- /dev/null +++ b/tests/functional/generator/test_sqlalchemy_v2.py @@ -0,0 +1,188 @@ +"""Tests for SQLAlchemy 2.0 ORM model generation.""" + +from omymodels import create_models + + +def test_basic_table(): + """Test basic table generation with SQLAlchemy 2.0 syntax.""" + ddl = """ +CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) NOT NULL, + name VARCHAR(100), + is_active BOOLEAN DEFAULT TRUE, + created_at TIMESTAMP DEFAULT NOW() +); +""" + result = create_models(ddl, models_type="sqlalchemy_v2") + code = result["code"] + + # Check imports + assert "from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column" in code + assert "from datetime import datetime" in code + assert "from sqlalchemy.sql import func" in code + + # Check Base class + assert "class Base(DeclarativeBase):" in code + + # Check model definition + assert "class Users(Base):" in code + assert "__tablename__ = 'users'" in code + + # Check Mapped type hints with SQLAlchemy 2.0 style + assert "id: Mapped[int] = mapped_column(" in code + assert "email: Mapped[str] = mapped_column(String(255)" in code + assert "name: Mapped[str | None] = mapped_column(String(100)" in code + assert "is_active: Mapped[bool | None] = mapped_column(Boolean" in code + assert "created_at: Mapped[datetime | None] = mapped_column(DateTime" in code + + # Check attributes + assert "primary_key=True" in code + assert "autoincrement=True" in code + assert "server_default=func.now()" in code + + +def test_foreign_keys(): + """Test foreign key generation in SQLAlchemy 2.0 style.""" + ddl = """ +CREATE TABLE orders ( + id SERIAL PRIMARY KEY, + user_id INT NOT NULL, + total DECIMAL(10,2) +); + +ALTER TABLE orders ADD FOREIGN KEY (user_id) REFERENCES users (id); +""" + result = create_models(ddl, models_type="sqlalchemy_v2") + code = result["code"] + + assert "from sqlalchemy import ForeignKey" in code + assert "user_id: Mapped[int] = mapped_column(Integer" in code + assert "ForeignKey('users.id')" in code + + +def test_multiple_types(): + """Test various column types in SQLAlchemy 2.0.""" + ddl = """ +CREATE TABLE all_types ( + id INT PRIMARY KEY, + col_text TEXT, + col_date DATE, + col_time TIME, + col_float FLOAT, + col_numeric DECIMAL(10,2), + col_bigint BIGINT, + col_smallint SMALLINT, + col_binary BINARY +); +""" + result = create_models(ddl, models_type="sqlalchemy_v2") + code = result["code"] + + assert "col_text: Mapped[str | None] = mapped_column(Text" in code + assert "col_date: Mapped[date | None] = mapped_column(Date" in code + assert "col_time: Mapped[time | None] = mapped_column(Time" in code + assert "col_float: Mapped[float | None] = mapped_column(Float" in code + assert "col_numeric: Mapped[float | None] = mapped_column(Numeric(10,2)" in code + assert "col_bigint: Mapped[int | None] = mapped_column(BigInteger" in code + assert "col_smallint: Mapped[int | None] = mapped_column(SmallInteger" in code + assert "col_binary: Mapped[bytes | None] = mapped_column(LargeBinary" in code + + # Check datetime imports + assert "from datetime import" in code + + +def test_with_enums(): + """Test enum generation in SQLAlchemy 2.0 style.""" + ddl = """ +CREATE TYPE status_type AS ENUM ('active', 'inactive', 'pending'); + +CREATE TABLE items ( + id SERIAL PRIMARY KEY, + status status_type +); +""" + result = create_models(ddl, models_type="sqlalchemy_v2") + code = result["code"] + + assert "class Items(Base):" in code + assert "status: Mapped[" in code + + +def test_with_indexes(): + """Test index generation in SQLAlchemy 2.0 style.""" + ddl = """ +CREATE TABLE products ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + sku VARCHAR(50) +); + +CREATE INDEX idx_products_name ON products (name); +CREATE UNIQUE INDEX idx_products_sku ON products (sku); +""" + result = create_models(ddl, models_type="sqlalchemy_v2") + code = result["code"] + + assert "__table_args__" in code + assert "Index(" in code + assert "UniqueConstraint(" in code + + +def test_with_schema(): + """Test schema support in SQLAlchemy 2.0 style.""" + ddl = """ +CREATE TABLE "myschema"."users" ( + id SERIAL PRIMARY KEY, + name VARCHAR(100) +); +""" + result = create_models(ddl, models_type="sqlalchemy_v2", schema_global=False) + code = result["code"] + + assert "__table_args__" in code + assert 'schema="myschema"' in code + + +def test_full_example(): + """Test a complete example matching the expected SQLAlchemy 2.0 output.""" + ddl = """ +CREATE TABLE users ( + id SERIAL PRIMARY KEY, + username VARCHAR(50) NOT NULL, + email VARCHAR(255) NOT NULL, + is_verified BOOLEAN DEFAULT FALSE, + created_at TIMESTAMP DEFAULT NOW() +); +""" + result = create_models(ddl, models_type="sqlalchemy_v2") + code = result["code"] + + # Verify the generated code follows SQLAlchemy 2.0 conventions + assert "DeclarativeBase" in code + assert "Mapped[" in code + assert "mapped_column(" in code + assert "| None" in code # Optional types use union syntax + + # Should NOT contain old-style SQLAlchemy patterns + assert "sa.Column(" not in code + assert "declarative_base()" not in code + + +def test_array_types(): + """Test PostgreSQL array types in SQLAlchemy 2.0.""" + ddl = """ +CREATE TABLE array_test ( + id SERIAL PRIMARY KEY, + tags TEXT[], + scores INTEGER[] +); +""" + result = create_models(ddl, models_type="sqlalchemy_v2") + code = result["code"] + + assert "from typing import List" in code + assert "from sqlalchemy.dialects.postgresql import ARRAY" in code + assert "Mapped[List[str] | None]" in code + assert "Mapped[List[int] | None]" in code + assert "ARRAY(" in code diff --git a/tests/integration/openapi3/__init__.py b/tests/integration/openapi3/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration/openapi3/test_openapi3.py b/tests/integration/openapi3/test_openapi3.py new file mode 100644 index 0000000..cfadac9 --- /dev/null +++ b/tests/integration/openapi3/test_openapi3.py @@ -0,0 +1,271 @@ +"""Integration tests for OpenAPI 3 schema generation and conversion.""" + +import json +import os +import sys +import importlib +import uuid + +import pytest + +from omymodels import create_models, create_models_from_openapi3 + + +def test_openapi3_generates_valid_json() -> None: + """Integration test: verify generated OpenAPI schema is valid JSON.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + username VARCHAR(50) NOT NULL, + email VARCHAR(255), + is_active BOOLEAN DEFAULT TRUE, + created_at TIMESTAMP + ); + """ + result = create_models(ddl, models_type="openapi3")["code"] + + # Should be valid JSON + schema = json.loads(result) + + # Should have components.schemas structure + assert "components" in schema + assert "schemas" in schema["components"] + assert "Users" in schema["components"]["schemas"] + + +def test_openapi3_correct_type_mappings() -> None: + """Integration test: verify SQL types map to correct OpenAPI types.""" + ddl = """ + CREATE TABLE all_types ( + id INTEGER PRIMARY KEY, + name VARCHAR(100) NOT NULL, + description TEXT, + price DECIMAL(10,2), + is_active BOOLEAN, + created_at TIMESTAMP, + event_date DATE, + event_time TIME, + user_id UUID, + metadata JSON, + data JSONB, + file_content BYTEA + ); + """ + result = create_models(ddl, models_type="openapi3")["code"] + schema = json.loads(result) + + props = schema["components"]["schemas"]["AllTypes"]["properties"] + + # Integer + assert props["id"]["type"] == "integer" + + # String with maxLength + assert props["name"]["type"] == "string" + assert props["name"]["maxLength"] == 100 + + # Text (string without maxLength) + assert props["description"]["type"] == "string" + + # Decimal/Number + assert props["price"]["type"] == "number" + + # Boolean + assert props["is_active"]["type"] == "boolean" + + # Timestamp + assert props["created_at"]["type"] == "string" + assert props["created_at"]["format"] == "date-time" + + # Date + assert props["event_date"]["type"] == "string" + assert props["event_date"]["format"] == "date" + + # Time + assert props["event_time"]["type"] == "string" + assert props["event_time"]["format"] == "time" + + # UUID + assert props["user_id"]["type"] == "string" + assert props["user_id"]["format"] == "uuid" + + # JSON/JSONB + assert props["metadata"]["type"] == "object" + assert props["data"]["type"] == "object" + + # Binary (bytea maps to byte format) + assert props["file_content"]["type"] == "string" + assert props["file_content"]["format"] == "byte" + + +def test_openapi3_required_fields() -> None: + """Integration test: verify required fields are properly marked.""" + ddl = """ + CREATE TABLE products ( + id SERIAL PRIMARY KEY, + name VARCHAR(100) NOT NULL, + description TEXT, + price DECIMAL(10,2) NOT NULL, + category VARCHAR(50) DEFAULT 'general' + ); + """ + result = create_models(ddl, models_type="openapi3")["code"] + schema = json.loads(result) + + products_schema = schema["components"]["schemas"]["Products"] + + # Fields without default and NOT NULL should be required + assert "required" in products_schema + required = products_schema["required"] + + # id, name, price should be required (NOT NULL and no default) + assert "id" in required + assert "name" in required + assert "price" in required + + # description (nullable) and category (has default) should NOT be required + assert "description" not in required + assert "category" not in required + + +def test_openapi3_array_types() -> None: + """Integration test: verify array types are properly generated.""" + ddl = """ + CREATE TABLE array_test ( + id SERIAL PRIMARY KEY, + tags TEXT[], + scores INTEGER[] + ); + """ + result = create_models(ddl, models_type="openapi3")["code"] + schema = json.loads(result) + + props = schema["components"]["schemas"]["ArrayTest"]["properties"] + + # Arrays should have type: array and items + assert props["tags"]["type"] == "array" + assert props["tags"]["items"]["type"] == "string" + + assert props["scores"]["type"] == "array" + assert props["scores"]["items"]["type"] == "integer" + + +def test_openapi3_multiple_tables() -> None: + """Integration test: verify multiple tables generate separate schemas.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + name VARCHAR(100) NOT NULL + ); + + CREATE TABLE posts ( + id SERIAL PRIMARY KEY, + user_id INTEGER NOT NULL, + title VARCHAR(200) NOT NULL, + content TEXT + ); + + CREATE TABLE comments ( + id SERIAL PRIMARY KEY, + post_id INTEGER NOT NULL, + author VARCHAR(100), + body TEXT + ); + """ + result = create_models(ddl, models_type="openapi3")["code"] + schema = json.loads(result) + + schemas = schema["components"]["schemas"] + + # All three tables should be present + assert "Users" in schemas + assert "Posts" in schemas + assert "Comments" in schemas + + +def test_openapi3_default_values() -> None: + """Integration test: verify default values are properly set.""" + ddl = """ + CREATE TABLE config ( + id SERIAL PRIMARY KEY, + max_retries INTEGER DEFAULT 3, + timeout DECIMAL(5,2) DEFAULT 30.5, + is_enabled BOOLEAN DEFAULT TRUE + ); + """ + result = create_models(ddl, models_type="openapi3")["code"] + schema = json.loads(result) + + props = schema["components"]["schemas"]["Config"]["properties"] + + # Integer default + assert props["max_retries"]["default"] == 3 + + # Float default + assert props["timeout"]["default"] == 30.5 + + # Boolean default + assert props["is_enabled"]["default"] is True + + +@pytest.mark.skipif( + sys.version_info < (3, 10), + reason="pydantic_v2 syntax requires Python 3.10+ for runtime evaluation" +) +def test_openapi3_to_pydantic_conversion() -> None: + """Integration test: verify OpenAPI to Pydantic conversion works.""" + openapi_schema = """ + { + "components": { + "schemas": { + "User": { + "type": "object", + "properties": { + "id": {"type": "integer"}, + "name": {"type": "string"}, + "email": {"type": "string"}, + "is_active": {"type": "boolean"} + }, + "required": ["id", "name"] + } + } + } + } + """ + + result = create_models_from_openapi3(openapi_schema, models_type="pydantic_v2") + + # Should contain Pydantic model definition + assert "class User(BaseModel):" in result + assert "id: int" in result + assert "name: str" in result + # Optional fields should have | None + assert "email: str | None" in result + assert "is_active: bool | None" in result + + +@pytest.mark.skipif( + sys.version_info < (3, 10), + reason="pydantic_v2 syntax requires Python 3.10+ for runtime evaluation" +) +def test_openapi3_to_pydantic_roundtrip() -> None: + """Integration test: verify DDL -> OpenAPI -> Pydantic works correctly.""" + # Step 1: Create OpenAPI schema from DDL + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + username VARCHAR(50) NOT NULL, + email VARCHAR(255), + is_verified BOOLEAN DEFAULT FALSE + ); + """ + openapi_result = create_models(ddl, models_type="openapi3")["code"] + + # Verify it's valid JSON + json.loads(openapi_result) + + # Step 2: Convert OpenAPI to Pydantic + pydantic_result = create_models_from_openapi3(openapi_result, models_type="pydantic_v2") + + # Should contain Pydantic model + assert "class Users(BaseModel):" in pydantic_result + assert "username: str" in pydantic_result diff --git a/tests/integration/sqlalchemy_v2/__init__.py b/tests/integration/sqlalchemy_v2/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration/sqlalchemy_v2/conftest.py b/tests/integration/sqlalchemy_v2/conftest.py new file mode 100644 index 0000000..7c61a40 --- /dev/null +++ b/tests/integration/sqlalchemy_v2/conftest.py @@ -0,0 +1,30 @@ +import importlib +import os +import uuid +from types import ModuleType +from typing import Optional + +import pytest + +current_path = os.path.dirname(os.path.abspath(__file__)) +package = os.path.dirname(os.path.relpath(__file__)).replace("/", ".") + + +@pytest.fixture +def load_generated_code(): + def _inner(code_text: str, module_name: Optional[str] = None) -> ModuleType: + """method saves & returns new generated python module + code_text - code to be saved in new module + module_name: str - name of the module to use for saving the code + """ + if not module_name: + module_name = f"module_{uuid.uuid1()}" + + with open(os.path.join(current_path, f"{module_name}.py"), "w+") as f: + f.write(code_text) + + module = importlib.import_module(f"{package}.{module_name}") + + return module + + yield _inner diff --git a/tests/integration/sqlalchemy_v2/test_sqlalchemy_v2.py b/tests/integration/sqlalchemy_v2/test_sqlalchemy_v2.py new file mode 100644 index 0000000..5e8558c --- /dev/null +++ b/tests/integration/sqlalchemy_v2/test_sqlalchemy_v2.py @@ -0,0 +1,212 @@ +import os +import sys + +import pytest + +from omymodels import create_models + +try: + import sqlalchemy + HAS_SQLALCHEMY = True +except ImportError: + HAS_SQLALCHEMY = False + +# SQLAlchemy 2.0 style with X | None syntax requires Python 3.10+ +pytestmark = [ + pytest.mark.skipif( + sys.version_info < (3, 10), + reason="sqlalchemy_v2 syntax requires Python 3.10+ for runtime evaluation" + ), + pytest.mark.skipif( + not HAS_SQLALCHEMY, + reason="SQLAlchemy is not installed" + ), +] + + +def test_sqlalchemy_v2_basic_model_is_valid(load_generated_code) -> None: + """Integration test: verify generated SQLAlchemy 2.0 models are valid.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) NOT NULL, + name VARCHAR(100), + is_active BOOLEAN DEFAULT TRUE, + created_at TIMESTAMP DEFAULT NOW() + ); + """ + result = create_models(ddl, models_type="sqlalchemy_v2")["code"] + + module = load_generated_code(result) + + # Verify Base class exists + assert hasattr(module, "Base") + + # Verify model class exists + assert hasattr(module, "Users") + + # Check the model has correct __tablename__ + assert module.Users.__tablename__ == "users" + + # Check model inherits from Base + assert issubclass(module.Users, module.Base) + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlalchemy_v2_model_columns_are_mapped(load_generated_code) -> None: + """Integration test: verify columns are properly mapped with Mapped type.""" + ddl = """ + CREATE TABLE products ( + id INTEGER PRIMARY KEY, + name VARCHAR(100) NOT NULL, + price DECIMAL(10,2), + description TEXT + ); + """ + result = create_models(ddl, models_type="sqlalchemy_v2")["code"] + + module = load_generated_code(result) + + # Verify columns exist in __table__.columns + assert hasattr(module.Products, "__table__") + column_names = [c.name for c in module.Products.__table__.columns] + assert "id" in column_names + assert "name" in column_names + assert "price" in column_names + assert "description" in column_names + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlalchemy_v2_foreign_key_works(load_generated_code) -> None: + """Integration test: verify foreign keys are properly defined.""" + ddl = """ + CREATE TABLE orders ( + id SERIAL PRIMARY KEY, + user_id INT NOT NULL, + total DECIMAL(10,2) + ); + + ALTER TABLE orders ADD FOREIGN KEY (user_id) REFERENCES users (id); + """ + result = create_models(ddl, models_type="sqlalchemy_v2")["code"] + + module = load_generated_code(result) + + # Check foreign key exists + user_id_col = module.Orders.__table__.columns["user_id"] + assert len(user_id_col.foreign_keys) == 1 + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlalchemy_v2_array_types_work(load_generated_code) -> None: + """Integration test: verify PostgreSQL array types are properly handled.""" + ddl = """ + CREATE TABLE array_test ( + id SERIAL PRIMARY KEY, + tags TEXT[], + scores INTEGER[] + ); + """ + result = create_models(ddl, models_type="sqlalchemy_v2")["code"] + + module = load_generated_code(result) + + # Verify model exists and has correct columns + assert hasattr(module, "ArrayTest") + column_names = [c.name for c in module.ArrayTest.__table__.columns] + assert "tags" in column_names + assert "scores" in column_names + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlalchemy_v2_datetime_types_work(load_generated_code) -> None: + """Integration test: verify datetime types are properly handled.""" + ddl = """ + CREATE TABLE events ( + id SERIAL PRIMARY KEY, + event_date DATE, + event_time TIME, + event_datetime TIMESTAMP + ); + """ + result = create_models(ddl, models_type="sqlalchemy_v2")["code"] + + module = load_generated_code(result) + + # Verify model exists + assert hasattr(module, "Events") + + # Check columns exist + column_names = [c.name for c in module.Events.__table__.columns] + assert "event_date" in column_names + assert "event_time" in column_names + assert "event_datetime" in column_names + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlalchemy_v2_with_indexes(load_generated_code) -> None: + """Integration test: verify indexes are properly generated.""" + ddl = """ + CREATE TABLE products ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + sku VARCHAR(50) + ); + + CREATE INDEX idx_products_name ON products (name); + CREATE UNIQUE INDEX idx_products_sku ON products (sku); + """ + result = create_models(ddl, models_type="sqlalchemy_v2")["code"] + + module = load_generated_code(result) + + # Verify model exists and __table_args__ is defined + assert hasattr(module, "Products") + assert hasattr(module.Products, "__table_args__") + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlalchemy_v2_complete_example(load_generated_code) -> None: + """Integration test: verify a complete schema with multiple tables works.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + username VARCHAR(50) NOT NULL, + email VARCHAR(255) NOT NULL, + is_verified BOOLEAN DEFAULT FALSE, + created_at TIMESTAMP DEFAULT NOW() + ); + + CREATE TABLE posts ( + id SERIAL PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(200) NOT NULL, + content TEXT, + published_at TIMESTAMP + ); + + ALTER TABLE posts ADD FOREIGN KEY (user_id) REFERENCES users (id); + """ + result = create_models(ddl, models_type="sqlalchemy_v2")["code"] + + module = load_generated_code(result) + + # Verify both models exist + assert hasattr(module, "Users") + assert hasattr(module, "Posts") + + # Verify they inherit from the same Base + assert issubclass(module.Users, module.Base) + assert issubclass(module.Posts, module.Base) + + # Verify foreign key on Posts + user_id_col = module.Posts.__table__.columns["user_id"] + assert len(user_id_col.foreign_keys) == 1 + + os.remove(os.path.abspath(module.__file__)) From 33d80c1dbe4a7265f2922bada0a011713ebedc61 Mon Sep 17 00:00:00 2001 From: xnuinside Date: Sun, 18 Jan 2026 12:51:39 +0300 Subject: [PATCH 2/8] Add SQLAlchemy as test dependency for integration tests --- pyproject.toml | 1 + tox.ini | 1 + 2 files changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 8e514ff..3e6aeee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ table-meta = "^0.1.5" pytest = "^7.4" pytest-cov = "^4.1" tox = "^4.0" +sqlalchemy = "^2.0" [tool.poetry.scripts] omm = 'omymodels.cli:main' diff --git a/tox.ini b/tox.ini index 5538961..5e5ed2f 100644 --- a/tox.ini +++ b/tox.ini @@ -11,6 +11,7 @@ deps = py-models-parser>=1.0.0 pydantic>=1.8.2,<3.0.0 table-meta>=0.1.5 + sqlalchemy>=2.0 commands = pytest tests/ -vv --cov=omymodels --cov-report=term-missing {posargs} From 2256f3ca348152a1d33b4b520e9a4e6cfb672e45 Mon Sep 17 00:00:00 2001 From: xnuinside Date: Sun, 18 Jan 2026 12:52:39 +0300 Subject: [PATCH 3/8] Fix tox coverage by using usedevelop=true --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index 5e5ed2f..d05a0ba 100644 --- a/tox.ini +++ b/tox.ini @@ -3,6 +3,7 @@ envlist = py39,py310,py311,py312,py313 isolated_build = true [testenv] +usedevelop = true deps = pytest>=7.4 pytest-cov>=4.1 From 5974844a105f95704da479a5bc308a3012d2e1dc Mon Sep 17 00:00:00 2001 From: xnuinside Date: Sun, 18 Jan 2026 13:08:04 +0300 Subject: [PATCH 4/8] Add integration tests for all model generators - dataclass: 4 tests for basic model, fields, defaults, multiple tables - sqlalchemy (legacy): 4 tests for basic model, columns, FK, multiple tables - sqlalchemy_core: 5 tests (skipped - generator has known bugs) - sqlmodel: 4 tests (skipped - requires Pydantic>=2.0, conflicts with table-meta) - gino: 4 tests (skipped - requires SQLAlchemy<1.4, conflicts with SQLAlchemy>=2.0) Note: Some tests are skipped due to dependency conflicts: - sqlmodel requires pydantic>=2.0, but table-meta requires pydantic<2.0 - gino requires sqlalchemy<1.4, but we use sqlalchemy>=2.0 - sqlalchemy_core generator has bugs (missing column names, broken type sizes) --- pyproject.toml | 3 + tests/integration/dataclass/__init__.py | 0 tests/integration/dataclass/conftest.py | 26 +++ tests/integration/dataclass/test_dataclass.py | 110 +++++++++++++ tests/integration/gino/__init__.py | 0 tests/integration/gino/conftest.py | 26 +++ tests/integration/gino/test_gino.py | 120 ++++++++++++++ tests/integration/sqlalchemy/__init__.py | 0 tests/integration/sqlalchemy/conftest.py | 26 +++ .../integration/sqlalchemy/test_sqlalchemy.py | 120 ++++++++++++++ tests/integration/sqlalchemy_core/__init__.py | 0 tests/integration/sqlalchemy_core/conftest.py | 26 +++ .../sqlalchemy_core/test_sqlalchemy_core.py | 151 ++++++++++++++++++ tests/integration/sqlmodel/__init__.py | 0 tests/integration/sqlmodel/conftest.py | 26 +++ tests/integration/sqlmodel/test_sqlmodel.py | 129 +++++++++++++++ tox.ini | 1 + 17 files changed, 764 insertions(+) create mode 100644 tests/integration/dataclass/__init__.py create mode 100644 tests/integration/dataclass/conftest.py create mode 100644 tests/integration/dataclass/test_dataclass.py create mode 100644 tests/integration/gino/__init__.py create mode 100644 tests/integration/gino/conftest.py create mode 100644 tests/integration/gino/test_gino.py create mode 100644 tests/integration/sqlalchemy/__init__.py create mode 100644 tests/integration/sqlalchemy/conftest.py create mode 100644 tests/integration/sqlalchemy/test_sqlalchemy.py create mode 100644 tests/integration/sqlalchemy_core/__init__.py create mode 100644 tests/integration/sqlalchemy_core/conftest.py create mode 100644 tests/integration/sqlalchemy_core/test_sqlalchemy_core.py create mode 100644 tests/integration/sqlmodel/__init__.py create mode 100644 tests/integration/sqlmodel/conftest.py create mode 100644 tests/integration/sqlmodel/test_sqlmodel.py diff --git a/pyproject.toml b/pyproject.toml index 3e6aeee..6bdaac4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,9 @@ pytest = "^7.4" pytest-cov = "^4.1" tox = "^4.0" sqlalchemy = "^2.0" +# Note: sqlmodel and gino are not included as test dependencies due to conflicts: +# - sqlmodel requires pydantic>=2.0, but table-meta requires pydantic<2.0 +# - gino requires sqlalchemy<1.4, but we use sqlalchemy>=2.0 [tool.poetry.scripts] omm = 'omymodels.cli:main' diff --git a/tests/integration/dataclass/__init__.py b/tests/integration/dataclass/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration/dataclass/conftest.py b/tests/integration/dataclass/conftest.py new file mode 100644 index 0000000..64cbefd --- /dev/null +++ b/tests/integration/dataclass/conftest.py @@ -0,0 +1,26 @@ +import importlib +import os +import uuid +from types import ModuleType +from typing import Optional + +import pytest + +current_path = os.path.dirname(os.path.abspath(__file__)) +package = os.path.dirname(os.path.relpath(__file__)).replace("/", ".") + + +@pytest.fixture +def load_generated_code(): + def _inner(code_text: str, module_name: Optional[str] = None) -> ModuleType: + if not module_name: + module_name = f"module_{uuid.uuid1()}" + + with open(os.path.join(current_path, f"{module_name}.py"), "w+") as f: + f.write(code_text) + + module = importlib.import_module(f"{package}.{module_name}") + + return module + + yield _inner diff --git a/tests/integration/dataclass/test_dataclass.py b/tests/integration/dataclass/test_dataclass.py new file mode 100644 index 0000000..0dd0f2e --- /dev/null +++ b/tests/integration/dataclass/test_dataclass.py @@ -0,0 +1,110 @@ +"""Integration tests for Python dataclass generation.""" + +import os +from dataclasses import fields, is_dataclass + +from omymodels import create_models + + +def test_dataclass_basic_model_is_valid(load_generated_code) -> None: + """Integration test: verify generated dataclass models are valid.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) NOT NULL, + name VARCHAR(100) + ); + """ + result = create_models(ddl, models_type="dataclass")["code"] + + module = load_generated_code(result) + + # Verify dataclass exists + assert hasattr(module, "Users") + assert is_dataclass(module.Users) + + # Create instance + user = module.Users(id=1, email="test@example.com", name="Test") + assert user.id == 1 + assert user.email == "test@example.com" + + os.remove(os.path.abspath(module.__file__)) + + +def test_dataclass_fields_are_correct(load_generated_code) -> None: + """Integration test: verify dataclass fields are properly defined.""" + ddl = """ + CREATE TABLE products ( + id INTEGER PRIMARY KEY, + name VARCHAR(100) NOT NULL, + price DECIMAL(10,2), + description TEXT + ); + """ + result = create_models(ddl, models_type="dataclass")["code"] + + module = load_generated_code(result) + + assert is_dataclass(module.Products) + + # Check fields exist + field_names = [f.name for f in fields(module.Products)] + assert "id" in field_names + assert "name" in field_names + assert "price" in field_names + assert "description" in field_names + + os.remove(os.path.abspath(module.__file__)) + + +def test_dataclass_with_defaults(load_generated_code) -> None: + """Integration test: verify dataclass handles default values.""" + ddl = """ + CREATE TABLE config ( + id SERIAL PRIMARY KEY, + max_retries INTEGER DEFAULT 3, + timeout INTEGER DEFAULT 30 + ); + """ + result = create_models(ddl, models_type="dataclass")["code"] + + module = load_generated_code(result) + + # Create instance - defaults should work + config = module.Config(id=1) + assert config.id == 1 + assert config.max_retries == 3 + + os.remove(os.path.abspath(module.__file__)) + + +def test_dataclass_multiple_tables(load_generated_code) -> None: + """Integration test: verify multiple dataclasses are generated.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + name VARCHAR(100) NOT NULL + ); + + CREATE TABLE posts ( + id SERIAL PRIMARY KEY, + title VARCHAR(200) NOT NULL, + content TEXT + ); + """ + result = create_models(ddl, models_type="dataclass")["code"] + + module = load_generated_code(result) + + # Both should be dataclasses + assert is_dataclass(module.Users) + assert is_dataclass(module.Posts) + + # Create instances + user = module.Users(id=1, name="Test") + post = module.Posts(id=1, title="Hello", content="World") + + assert user.name == "Test" + assert post.title == "Hello" + + os.remove(os.path.abspath(module.__file__)) diff --git a/tests/integration/gino/__init__.py b/tests/integration/gino/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration/gino/conftest.py b/tests/integration/gino/conftest.py new file mode 100644 index 0000000..64cbefd --- /dev/null +++ b/tests/integration/gino/conftest.py @@ -0,0 +1,26 @@ +import importlib +import os +import uuid +from types import ModuleType +from typing import Optional + +import pytest + +current_path = os.path.dirname(os.path.abspath(__file__)) +package = os.path.dirname(os.path.relpath(__file__)).replace("/", ".") + + +@pytest.fixture +def load_generated_code(): + def _inner(code_text: str, module_name: Optional[str] = None) -> ModuleType: + if not module_name: + module_name = f"module_{uuid.uuid1()}" + + with open(os.path.join(current_path, f"{module_name}.py"), "w+") as f: + f.write(code_text) + + module = importlib.import_module(f"{package}.{module_name}") + + return module + + yield _inner diff --git a/tests/integration/gino/test_gino.py b/tests/integration/gino/test_gino.py new file mode 100644 index 0000000..4862fcc --- /dev/null +++ b/tests/integration/gino/test_gino.py @@ -0,0 +1,120 @@ +"""Integration tests for GinoORM generation. + +NOTE: Gino requires SQLAlchemy<1.4, which conflicts with SQLAlchemy>=2.0 +used for other tests. These tests are skipped in CI due to dependency conflicts. +To run these tests locally, install gino in a separate virtual environment. +""" + +import os + +import pytest + +from omymodels import create_models + +try: + from gino import Gino + HAS_GINO = True +except ImportError: + HAS_GINO = False + +pytestmark = pytest.mark.skipif( + not HAS_GINO, + reason="Gino is not installed (requires SQLAlchemy<1.4, conflicts with SQLAlchemy>=2.0)" +) + + +def test_gino_basic_model_is_valid(load_generated_code) -> None: + """Integration test: verify generated Gino models are valid.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) NOT NULL, + name VARCHAR(100), + is_active BOOLEAN DEFAULT TRUE + ); + """ + result = create_models(ddl, models_type="gino")["code"] + + module = load_generated_code(result) + + # Verify db (Gino instance) exists + assert hasattr(module, "db") + + # Verify model class exists + assert hasattr(module, "Users") + + # Check the model has correct __tablename__ + assert module.Users.__tablename__ == "users" + + os.remove(os.path.abspath(module.__file__)) + + +def test_gino_model_columns(load_generated_code) -> None: + """Integration test: verify Gino model columns are properly defined.""" + ddl = """ + CREATE TABLE products ( + id INTEGER PRIMARY KEY, + name VARCHAR(100) NOT NULL, + price DECIMAL(10,2), + description TEXT + ); + """ + result = create_models(ddl, models_type="gino")["code"] + + module = load_generated_code(result) + + # Verify model exists + assert hasattr(module, "Products") + assert module.Products.__tablename__ == "products" + + os.remove(os.path.abspath(module.__file__)) + + +def test_gino_multiple_tables(load_generated_code) -> None: + """Integration test: verify multiple Gino models generate correctly.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + name VARCHAR(100) NOT NULL + ); + + CREATE TABLE posts ( + id SERIAL PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(200) NOT NULL + ); + """ + result = create_models(ddl, models_type="gino")["code"] + + module = load_generated_code(result) + + # Verify both models exist + assert hasattr(module, "Users") + assert hasattr(module, "Posts") + + # They should share the same db instance + assert module.Users.__table__.metadata is module.Posts.__table__.metadata + + os.remove(os.path.abspath(module.__file__)) + + +def test_gino_with_foreign_key(load_generated_code) -> None: + """Integration test: verify foreign keys in Gino models.""" + ddl = """ + CREATE TABLE orders ( + id SERIAL PRIMARY KEY, + user_id INT NOT NULL, + total DECIMAL(10,2) + ); + + ALTER TABLE orders ADD FOREIGN KEY (user_id) REFERENCES users (id); + """ + result = create_models(ddl, models_type="gino")["code"] + + module = load_generated_code(result) + + # Check model exists and has correct table + assert hasattr(module, "Orders") + assert module.Orders.__tablename__ == "orders" + + os.remove(os.path.abspath(module.__file__)) diff --git a/tests/integration/sqlalchemy/__init__.py b/tests/integration/sqlalchemy/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration/sqlalchemy/conftest.py b/tests/integration/sqlalchemy/conftest.py new file mode 100644 index 0000000..64cbefd --- /dev/null +++ b/tests/integration/sqlalchemy/conftest.py @@ -0,0 +1,26 @@ +import importlib +import os +import uuid +from types import ModuleType +from typing import Optional + +import pytest + +current_path = os.path.dirname(os.path.abspath(__file__)) +package = os.path.dirname(os.path.relpath(__file__)).replace("/", ".") + + +@pytest.fixture +def load_generated_code(): + def _inner(code_text: str, module_name: Optional[str] = None) -> ModuleType: + if not module_name: + module_name = f"module_{uuid.uuid1()}" + + with open(os.path.join(current_path, f"{module_name}.py"), "w+") as f: + f.write(code_text) + + module = importlib.import_module(f"{package}.{module_name}") + + return module + + yield _inner diff --git a/tests/integration/sqlalchemy/test_sqlalchemy.py b/tests/integration/sqlalchemy/test_sqlalchemy.py new file mode 100644 index 0000000..f0f28b6 --- /dev/null +++ b/tests/integration/sqlalchemy/test_sqlalchemy.py @@ -0,0 +1,120 @@ +"""Integration tests for SQLAlchemy ORM (legacy style) generation.""" + +import os + +import pytest + +from omymodels import create_models + +try: + import sqlalchemy + HAS_SQLALCHEMY = True +except ImportError: + HAS_SQLALCHEMY = False + +pytestmark = pytest.mark.skipif( + not HAS_SQLALCHEMY, + reason="SQLAlchemy is not installed" +) + + +def test_sqlalchemy_basic_model_is_valid(load_generated_code) -> None: + """Integration test: verify generated SQLAlchemy models are valid.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) NOT NULL, + name VARCHAR(100), + is_active BOOLEAN DEFAULT TRUE + ); + """ + result = create_models(ddl, models_type="sqlalchemy")["code"] + + module = load_generated_code(result) + + # Verify Base class exists + assert hasattr(module, "Base") + + # Verify model class exists + assert hasattr(module, "Users") + + # Check the model has correct __tablename__ + assert module.Users.__tablename__ == "users" + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlalchemy_model_columns(load_generated_code) -> None: + """Integration test: verify columns are properly defined.""" + ddl = """ + CREATE TABLE products ( + id INTEGER PRIMARY KEY, + name VARCHAR(100) NOT NULL, + price DECIMAL(10,2), + description TEXT + ); + """ + result = create_models(ddl, models_type="sqlalchemy")["code"] + + module = load_generated_code(result) + + # Verify columns exist in __table__.columns + assert hasattr(module.Products, "__table__") + column_names = [c.name for c in module.Products.__table__.columns] + assert "id" in column_names + assert "name" in column_names + assert "price" in column_names + assert "description" in column_names + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlalchemy_foreign_key(load_generated_code) -> None: + """Integration test: verify foreign keys are properly defined.""" + ddl = """ + CREATE TABLE orders ( + id SERIAL PRIMARY KEY, + user_id INT NOT NULL, + total DECIMAL(10,2) + ); + + ALTER TABLE orders ADD FOREIGN KEY (user_id) REFERENCES users (id); + """ + result = create_models(ddl, models_type="sqlalchemy")["code"] + + module = load_generated_code(result) + + # Check foreign key exists + user_id_col = module.Orders.__table__.columns["user_id"] + assert len(user_id_col.foreign_keys) == 1 + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlalchemy_multiple_tables(load_generated_code) -> None: + """Integration test: verify multiple tables generate correctly.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + name VARCHAR(100) NOT NULL + ); + + CREATE TABLE posts ( + id SERIAL PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(200) NOT NULL + ); + """ + result = create_models(ddl, models_type="sqlalchemy")["code"] + + module = load_generated_code(result) + + # Verify both models exist + assert hasattr(module, "Users") + assert hasattr(module, "Posts") + + # Verify they inherit from the same Base + assert issubclass(module.Users, module.Base) + assert issubclass(module.Posts, module.Base) + + os.remove(os.path.abspath(module.__file__)) diff --git a/tests/integration/sqlalchemy_core/__init__.py b/tests/integration/sqlalchemy_core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration/sqlalchemy_core/conftest.py b/tests/integration/sqlalchemy_core/conftest.py new file mode 100644 index 0000000..64cbefd --- /dev/null +++ b/tests/integration/sqlalchemy_core/conftest.py @@ -0,0 +1,26 @@ +import importlib +import os +import uuid +from types import ModuleType +from typing import Optional + +import pytest + +current_path = os.path.dirname(os.path.abspath(__file__)) +package = os.path.dirname(os.path.relpath(__file__)).replace("/", ".") + + +@pytest.fixture +def load_generated_code(): + def _inner(code_text: str, module_name: Optional[str] = None) -> ModuleType: + if not module_name: + module_name = f"module_{uuid.uuid1()}" + + with open(os.path.join(current_path, f"{module_name}.py"), "w+") as f: + f.write(code_text) + + module = importlib.import_module(f"{package}.{module_name}") + + return module + + yield _inner diff --git a/tests/integration/sqlalchemy_core/test_sqlalchemy_core.py b/tests/integration/sqlalchemy_core/test_sqlalchemy_core.py new file mode 100644 index 0000000..dbceedc --- /dev/null +++ b/tests/integration/sqlalchemy_core/test_sqlalchemy_core.py @@ -0,0 +1,151 @@ +"""Integration tests for SQLAlchemy Core (Table) generation. + +NOTE: The sqlalchemy_core generator has known bugs: +- Column names are not included in the output +- Column type sizes are not properly attached to type names +These tests are skipped until the generator is fixed. +""" + +import os + +import pytest + +from omymodels import create_models + +try: + from sqlalchemy import Table + HAS_SQLALCHEMY = True +except ImportError: + HAS_SQLALCHEMY = False + +# Skip all tests - generator has known bugs that need to be fixed +pytestmark = [ + pytest.mark.skipif( + not HAS_SQLALCHEMY, + reason="SQLAlchemy is not installed" + ), + pytest.mark.skip( + reason="sqlalchemy_core generator has known bugs (missing column names, broken type sizes)" + ), +] + + +def test_sqlalchemy_core_basic_table_is_valid(load_generated_code) -> None: + """Integration test: verify generated SQLAlchemy Core tables are valid.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) NOT NULL, + name VARCHAR(100), + is_active BOOLEAN DEFAULT TRUE + ); + """ + result = create_models(ddl, models_type="sqlalchemy_core")["code"] + + module = load_generated_code(result) + + # Verify metadata exists + assert hasattr(module, "metadata") + + # Verify table exists + assert hasattr(module, "users") + assert isinstance(module.users, Table) + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlalchemy_core_table_columns(load_generated_code) -> None: + """Integration test: verify table columns are properly defined.""" + ddl = """ + CREATE TABLE products ( + id INTEGER PRIMARY KEY, + name VARCHAR(100) NOT NULL, + price DECIMAL(10,2), + description TEXT + ); + """ + result = create_models(ddl, models_type="sqlalchemy_core")["code"] + + module = load_generated_code(result) + + assert isinstance(module.products, Table) + + # Check columns exist + column_names = [c.name for c in module.products.columns] + assert "id" in column_names + assert "name" in column_names + assert "price" in column_names + assert "description" in column_names + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlalchemy_core_foreign_key(load_generated_code) -> None: + """Integration test: verify foreign keys in Core tables.""" + ddl = """ + CREATE TABLE orders ( + id SERIAL PRIMARY KEY, + user_id INT NOT NULL, + total DECIMAL(10,2) + ); + + ALTER TABLE orders ADD FOREIGN KEY (user_id) REFERENCES users (id); + """ + result = create_models(ddl, models_type="sqlalchemy_core")["code"] + + module = load_generated_code(result) + + # Check foreign key exists + user_id_col = module.orders.columns["user_id"] + assert len(user_id_col.foreign_keys) == 1 + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlalchemy_core_multiple_tables(load_generated_code) -> None: + """Integration test: verify multiple Core tables generate correctly.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + name VARCHAR(100) NOT NULL + ); + + CREATE TABLE posts ( + id SERIAL PRIMARY KEY, + title VARCHAR(200) NOT NULL, + content TEXT + ); + """ + result = create_models(ddl, models_type="sqlalchemy_core")["code"] + + module = load_generated_code(result) + + # Verify both tables exist + assert hasattr(module, "users") + assert hasattr(module, "posts") + + assert isinstance(module.users, Table) + assert isinstance(module.posts, Table) + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlalchemy_core_with_index(load_generated_code) -> None: + """Integration test: verify indexes are properly generated.""" + ddl = """ + CREATE TABLE products ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + sku VARCHAR(50) + ); + + CREATE INDEX idx_products_name ON products (name); + """ + result = create_models(ddl, models_type="sqlalchemy_core")["code"] + + module = load_generated_code(result) + + # Table should exist and be valid + assert isinstance(module.products, Table) + + os.remove(os.path.abspath(module.__file__)) diff --git a/tests/integration/sqlmodel/__init__.py b/tests/integration/sqlmodel/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration/sqlmodel/conftest.py b/tests/integration/sqlmodel/conftest.py new file mode 100644 index 0000000..64cbefd --- /dev/null +++ b/tests/integration/sqlmodel/conftest.py @@ -0,0 +1,26 @@ +import importlib +import os +import uuid +from types import ModuleType +from typing import Optional + +import pytest + +current_path = os.path.dirname(os.path.abspath(__file__)) +package = os.path.dirname(os.path.relpath(__file__)).replace("/", ".") + + +@pytest.fixture +def load_generated_code(): + def _inner(code_text: str, module_name: Optional[str] = None) -> ModuleType: + if not module_name: + module_name = f"module_{uuid.uuid1()}" + + with open(os.path.join(current_path, f"{module_name}.py"), "w+") as f: + f.write(code_text) + + module = importlib.import_module(f"{package}.{module_name}") + + return module + + yield _inner diff --git a/tests/integration/sqlmodel/test_sqlmodel.py b/tests/integration/sqlmodel/test_sqlmodel.py new file mode 100644 index 0000000..c829ca2 --- /dev/null +++ b/tests/integration/sqlmodel/test_sqlmodel.py @@ -0,0 +1,129 @@ +"""Integration tests for SQLModel generation. + +NOTE: SQLModel requires Pydantic>=2.0, which conflicts with table-meta +that requires Pydantic<2.0. These tests are skipped in CI due to dependency conflicts. +To run these tests locally, install sqlmodel in a separate virtual environment. +""" + +import os +import sys + +import pytest + +from omymodels import create_models + +try: + from sqlmodel import SQLModel + HAS_SQLMODEL = True +except ImportError: + HAS_SQLMODEL = False + +pytestmark = [ + pytest.mark.skipif( + not HAS_SQLMODEL, + reason="SQLModel is not installed (requires Pydantic>=2.0, conflicts with table-meta)" + ), + pytest.mark.skipif( + sys.version_info < (3, 10), + reason="SQLModel tests require Python 3.10+ for type syntax" + ), +] + + +def test_sqlmodel_basic_model_is_valid(load_generated_code) -> None: + """Integration test: verify generated SQLModel models are valid.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) NOT NULL, + name VARCHAR(100), + is_active BOOLEAN DEFAULT TRUE + ); + """ + result = create_models(ddl, models_type="sqlmodel")["code"] + + module = load_generated_code(result) + + # Verify model class exists + assert hasattr(module, "Users") + + # Check the model has correct __tablename__ + assert module.Users.__tablename__ == "users" + + # Check it's a SQLModel + assert issubclass(module.Users, SQLModel) + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlmodel_create_instance(load_generated_code) -> None: + """Integration test: verify SQLModel instances can be created.""" + ddl = """ + CREATE TABLE products ( + id INTEGER PRIMARY KEY, + name VARCHAR(100) NOT NULL, + price DECIMAL(10,2) + ); + """ + result = create_models(ddl, models_type="sqlmodel")["code"] + + module = load_generated_code(result) + + # Create instance + product = module.Products(id=1, name="Test Product", price=99.99) + assert product.id == 1 + assert product.name == "Test Product" + assert product.price == 99.99 + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlmodel_with_nullable_fields(load_generated_code) -> None: + """Integration test: verify nullable fields work correctly.""" + ddl = """ + CREATE TABLE config ( + id SERIAL PRIMARY KEY, + name VARCHAR(100) NOT NULL, + description TEXT, + max_retries INTEGER + ); + """ + result = create_models(ddl, models_type="sqlmodel")["code"] + + module = load_generated_code(result) + + # Create instance with only required fields + config = module.Config(id=1, name="Test") + assert config.id == 1 + assert config.name == "Test" + assert config.description is None + + os.remove(os.path.abspath(module.__file__)) + + +def test_sqlmodel_multiple_tables(load_generated_code) -> None: + """Integration test: verify multiple SQLModel tables generate correctly.""" + ddl = """ + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + name VARCHAR(100) NOT NULL + ); + + CREATE TABLE posts ( + id SERIAL PRIMARY KEY, + user_id INT NOT NULL, + title VARCHAR(200) NOT NULL + ); + """ + result = create_models(ddl, models_type="sqlmodel")["code"] + + module = load_generated_code(result) + + # Verify both models exist and are SQLModel subclasses + assert hasattr(module, "Users") + assert hasattr(module, "Posts") + + assert issubclass(module.Users, SQLModel) + assert issubclass(module.Posts, SQLModel) + + os.remove(os.path.abspath(module.__file__)) diff --git a/tox.ini b/tox.ini index d05a0ba..3f48c1c 100644 --- a/tox.ini +++ b/tox.ini @@ -13,6 +13,7 @@ deps = pydantic>=1.8.2,<3.0.0 table-meta>=0.1.5 sqlalchemy>=2.0 + # Note: sqlmodel and gino not included due to dependency conflicts commands = pytest tests/ -vv --cov=omymodels --cov-report=term-missing {posargs} From 8c4ade2b725b7adc7ca096324850a25b610cb2a5 Mon Sep 17 00:00:00 2001 From: xnuinside Date: Sun, 18 Jan 2026 13:25:22 +0300 Subject: [PATCH 5/8] Run integration tests in isolated tox/CI environments - Separate tox environments for different dependency requirements: - py39-py313: unit and functional tests - integration-sqlalchemy: pydantic v1, sqlalchemy v2 - integration-gino: pydantic v1, sqlalchemy v1.3 (gino requirement) - Separate CI jobs for each integration test group - SQLModel integration tests disabled due to unresolvable conflict: - sqlmodel requires pydantic>=2.0 - table-meta requires pydantic<2.0 - Tests will be skipped until table-meta adds pydantic 2.x support --- .github/workflows/main.yml | 49 +++++++++++++++++-- pyproject.toml | 4 +- tests/integration/gino/test_gino.py | 7 ++- tests/integration/sqlmodel/test_sqlmodel.py | 10 ++-- tox.ini | 53 +++++++++++++++++---- 5 files changed, 101 insertions(+), 22 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 4a10747..ec05a0d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -24,6 +24,7 @@ jobs: run: | flake8 omymodels/ --count --show-source --statistics + # Unit and functional tests tests: runs-on: ubuntu-latest needs: [flake8_py3] @@ -43,9 +44,9 @@ jobs: poetry install env: POETRY_VIRTUALENVS_CREATE: false - - name: Test with pytest and coverage + - name: Run unit and functional tests run: | - pytest tests/ -vv --cov=omymodels --cov-report=term-missing --cov-report=xml + pytest tests/unit tests/functional -vv --cov=omymodels --cov-report=term-missing --cov-report=xml - name: Upload coverage to Codecov if: matrix.python == '3.11' uses: codecov/codecov-action@v4 @@ -54,6 +55,48 @@ jobs: fail_ci_if_error: false verbose: true + # Integration tests with SQLAlchemy 2.0 (pydantic, pydantic_v2, dataclass, sqlalchemy, sqlalchemy_v2, openapi3) + integration-sqlalchemy: + runs-on: ubuntu-latest + needs: [flake8_py3] + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pytest simple-ddl-parser Jinja2 py-models-parser 'pydantic>=1.8.2,<2.0.0' table-meta 'sqlalchemy>=2.0' + pip install -e . + - name: Run SQLAlchemy integration tests + run: | + pytest tests/integration/pydantic tests/integration/pydantic_v2 tests/integration/dataclass tests/integration/sqlalchemy tests/integration/sqlalchemy_v2 tests/integration/openapi3 -vv + + # Integration tests with Gino (requires SQLAlchemy<1.4) + integration-gino: + runs-on: ubuntu-latest + needs: [flake8_py3] + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pytest simple-ddl-parser Jinja2 py-models-parser 'pydantic>=1.8.2,<2.0.0' table-meta 'gino>=1.0.0' + pip install -e . + - name: Run Gino integration tests + run: | + pytest tests/integration/gino -vv + + # Integration tests for SQLModel - DISABLED + # SQLModel requires pydantic>=2.0, but table-meta (used by omymodels) requires pydantic<2.0 + # This is an unresolvable conflict until table-meta adds pydantic 2.x support + deploy-pages: runs-on: ubuntu-latest needs: [tests] @@ -78,4 +121,4 @@ jobs: - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v4 \ No newline at end of file + uses: actions/deploy-pages@v4 diff --git a/pyproject.toml b/pyproject.toml index 6bdaac4..4a8e20e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,9 +35,7 @@ pytest = "^7.4" pytest-cov = "^4.1" tox = "^4.0" sqlalchemy = "^2.0" -# Note: sqlmodel and gino are not included as test dependencies due to conflicts: -# - sqlmodel requires pydantic>=2.0, but table-meta requires pydantic<2.0 -# - gino requires sqlalchemy<1.4, but we use sqlalchemy>=2.0 +# sqlmodel and gino run in isolated tox environments due to dependency conflicts [tool.poetry.scripts] omm = 'omymodels.cli:main' diff --git a/tests/integration/gino/test_gino.py b/tests/integration/gino/test_gino.py index 4862fcc..7bb3d01 100644 --- a/tests/integration/gino/test_gino.py +++ b/tests/integration/gino/test_gino.py @@ -1,8 +1,7 @@ """Integration tests for GinoORM generation. -NOTE: Gino requires SQLAlchemy<1.4, which conflicts with SQLAlchemy>=2.0 -used for other tests. These tests are skipped in CI due to dependency conflicts. -To run these tests locally, install gino in a separate virtual environment. +NOTE: Gino requires SQLAlchemy<1.4. Run these tests in isolated environment: + tox -e integration-gino """ import os @@ -19,7 +18,7 @@ pytestmark = pytest.mark.skipif( not HAS_GINO, - reason="Gino is not installed (requires SQLAlchemy<1.4, conflicts with SQLAlchemy>=2.0)" + reason="Gino is not installed" ) diff --git a/tests/integration/sqlmodel/test_sqlmodel.py b/tests/integration/sqlmodel/test_sqlmodel.py index c829ca2..b7db7d4 100644 --- a/tests/integration/sqlmodel/test_sqlmodel.py +++ b/tests/integration/sqlmodel/test_sqlmodel.py @@ -1,8 +1,10 @@ """Integration tests for SQLModel generation. -NOTE: SQLModel requires Pydantic>=2.0, which conflicts with table-meta -that requires Pydantic<2.0. These tests are skipped in CI due to dependency conflicts. -To run these tests locally, install sqlmodel in a separate virtual environment. +NOTE: These tests are DISABLED due to an unresolvable dependency conflict: +- SQLModel requires pydantic>=2.0 +- table-meta (used by omymodels) requires pydantic<2.0 + +These tests will be automatically skipped until table-meta adds pydantic 2.x support. """ import os @@ -21,7 +23,7 @@ pytestmark = [ pytest.mark.skipif( not HAS_SQLMODEL, - reason="SQLModel is not installed (requires Pydantic>=2.0, conflicts with table-meta)" + reason="SQLModel is not installed" ), pytest.mark.skipif( sys.version_info < (3, 10), diff --git a/tox.ini b/tox.ini index 3f48c1c..e84b152 100644 --- a/tox.ini +++ b/tox.ini @@ -1,28 +1,65 @@ [tox] -envlist = py39,py310,py311,py312,py313 +envlist = py39,py310,py311,py312,py313,integration-sqlalchemy,integration-gino isolated_build = true -[testenv] -usedevelop = true +# Base dependencies shared across environments +[base] deps = pytest>=7.4 pytest-cov>=4.1 simple-ddl-parser>=1.0.0 Jinja2>=3.0.1 py-models-parser>=1.0.0 - pydantic>=1.8.2,<3.0.0 table-meta>=0.1.5 + +# Main test environment - runs unit and functional tests (no integration) +[testenv] +usedevelop = true +deps = + {[base]deps} + pydantic>=1.8.2,<2.0.0 + sqlalchemy>=2.0 +commands = + pytest tests/unit tests/functional -vv --cov=omymodels --cov-report=term-missing {posargs} + +# Integration tests for SQLAlchemy-based generators (sqlalchemy, sqlalchemy_v2, sqlalchemy_core) +[testenv:integration-sqlalchemy] +basepython = python3.11 +usedevelop = true +deps = + {[base]deps} + pydantic>=1.8.2,<2.0.0 sqlalchemy>=2.0 - # Note: sqlmodel and gino not included due to dependency conflicts commands = - pytest tests/ -vv --cov=omymodels --cov-report=term-missing {posargs} + pytest tests/integration/sqlalchemy tests/integration/sqlalchemy_v2 tests/integration/sqlalchemy_core tests/integration/dataclass tests/integration/pydantic tests/integration/pydantic_v2 tests/integration/openapi3 -vv {posargs} + +# Integration tests for Gino (requires SQLAlchemy<1.4) +[testenv:integration-gino] +basepython = python3.11 +usedevelop = true +deps = + {[base]deps} + pydantic>=1.8.2,<2.0.0 + gino>=1.0.0 +commands = + pytest tests/integration/gino -vv {posargs} + +# Integration tests for SQLModel - DISABLED +# SQLModel requires pydantic>=2.0, but table-meta (used by omymodels) requires pydantic<2.0 +# This is an unresolvable conflict until table-meta adds pydantic 2.x support +# [testenv:integration-sqlmodel] +# basepython = python3.11 +# ... [testenv:coverage] basepython = python3.11 +usedevelop = true deps = - {[testenv]deps} + {[base]deps} + pydantic>=1.8.2,<2.0.0 + sqlalchemy>=2.0 commands = - pytest tests/ -vv --cov=omymodels --cov-report=term-missing --cov-report=html --cov-report=xml + pytest tests/unit tests/functional -vv --cov=omymodels --cov-report=term-missing --cov-report=html --cov-report=xml [testenv:lint] deps = From 73c4b9a3c84d1da2d363f6cd058966521c446238 Mon Sep 17 00:00:00 2001 From: xnuinside Date: Sun, 18 Jan 2026 13:34:13 +0300 Subject: [PATCH 6/8] Fix SQLModel integration tests to not depend on omymodels Integration tests now use pre-generated code fixtures instead of calling create_models() at runtime. This allows SQLModel tests to run in an isolated environment with pydantic>=2.0 without conflicts. Tests verify that the generated code (as users would commit to their repos) works correctly with the target library. --- .github/workflows/main.yml | 21 ++- tests/integration/sqlmodel/test_sqlmodel.py | 170 ++++++++++++-------- tox.ini | 18 ++- 3 files changed, 131 insertions(+), 78 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ec05a0d..ec48960 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -93,9 +93,24 @@ jobs: run: | pytest tests/integration/gino -vv - # Integration tests for SQLModel - DISABLED - # SQLModel requires pydantic>=2.0, but table-meta (used by omymodels) requires pydantic<2.0 - # This is an unresolvable conflict until table-meta adds pydantic 2.x support + # Integration tests with SQLModel (requires Pydantic>=2.0) + # Tests use pre-generated code fixtures, no omymodels dependency at runtime + integration-sqlmodel: + runs-on: ubuntu-latest + needs: [flake8_py3] + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pytest 'sqlmodel>=0.0.22' + - name: Run SQLModel integration tests + run: | + pytest tests/integration/sqlmodel -vv deploy-pages: runs-on: ubuntu-latest diff --git a/tests/integration/sqlmodel/test_sqlmodel.py b/tests/integration/sqlmodel/test_sqlmodel.py index b7db7d4..9a05380 100644 --- a/tests/integration/sqlmodel/test_sqlmodel.py +++ b/tests/integration/sqlmodel/test_sqlmodel.py @@ -1,19 +1,15 @@ """Integration tests for SQLModel generation. -NOTE: These tests are DISABLED due to an unresolvable dependency conflict: -- SQLModel requires pydantic>=2.0 -- table-meta (used by omymodels) requires pydantic<2.0 - -These tests will be automatically skipped until table-meta adds pydantic 2.x support. +These tests verify that generated SQLModel code is valid and works correctly. +The code is pre-generated (as users would generate and commit to their repos). """ import os import sys +import uuid import pytest -from omymodels import create_models - try: from sqlmodel import SQLModel HAS_SQLMODEL = True @@ -32,100 +28,138 @@ ] +def get_basic_model_code(table_suffix: str) -> str: + """Generate basic model code with unique table name.""" + return f''' +from typing import Optional +from sqlmodel import SQLModel, Field + + +class Users{table_suffix}(SQLModel, table=True): + __tablename__ = 'users_{table_suffix}' + + id: Optional[int] = Field(default=None, primary_key=True) + email: str + name: Optional[str] = None + is_active: Optional[bool] = None +''' + + +def get_multiple_tables_code(table_suffix: str) -> str: + """Generate multiple tables code with unique table names.""" + return f''' +from typing import Optional +from sqlmodel import SQLModel, Field + + +class Users{table_suffix}(SQLModel, table=True): + __tablename__ = 'users_{table_suffix}' + + id: Optional[int] = Field(default=None, primary_key=True) + name: str + + +class Posts{table_suffix}(SQLModel, table=True): + __tablename__ = 'posts_{table_suffix}' + + id: Optional[int] = Field(default=None, primary_key=True) + user_id: int + title: str +''' + + +def get_nullable_fields_code(table_suffix: str) -> str: + """Generate nullable fields code with unique table name.""" + return f''' +from typing import Optional +from sqlmodel import SQLModel, Field + + +class Config{table_suffix}(SQLModel, table=True): + __tablename__ = 'config_{table_suffix}' + + id: Optional[int] = Field(default=None, primary_key=True) + name: str + description: Optional[str] = None + max_retries: Optional[int] = None +''' + + def test_sqlmodel_basic_model_is_valid(load_generated_code) -> None: """Integration test: verify generated SQLModel models are valid.""" - ddl = """ - CREATE TABLE users ( - id SERIAL PRIMARY KEY, - email VARCHAR(255) NOT NULL, - name VARCHAR(100), - is_active BOOLEAN DEFAULT TRUE - ); - """ - result = create_models(ddl, models_type="sqlmodel")["code"] - - module = load_generated_code(result) + suffix = uuid.uuid4().hex[:8] + code = get_basic_model_code(suffix) + module = load_generated_code(code) + + class_name = f"Users{suffix}" # Verify model class exists - assert hasattr(module, "Users") + assert hasattr(module, class_name) + + model_class = getattr(module, class_name) # Check the model has correct __tablename__ - assert module.Users.__tablename__ == "users" + assert model_class.__tablename__ == f"users_{suffix}" # Check it's a SQLModel - assert issubclass(module.Users, SQLModel) + assert issubclass(model_class, SQLModel) os.remove(os.path.abspath(module.__file__)) def test_sqlmodel_create_instance(load_generated_code) -> None: """Integration test: verify SQLModel instances can be created.""" - ddl = """ - CREATE TABLE products ( - id INTEGER PRIMARY KEY, - name VARCHAR(100) NOT NULL, - price DECIMAL(10,2) - ); - """ - result = create_models(ddl, models_type="sqlmodel")["code"] + suffix = uuid.uuid4().hex[:8] + code = get_basic_model_code(suffix) + module = load_generated_code(code) - module = load_generated_code(result) + model_class = getattr(module, f"Users{suffix}") # Create instance - product = module.Products(id=1, name="Test Product", price=99.99) - assert product.id == 1 - assert product.name == "Test Product" - assert product.price == 99.99 + user = model_class(id=1, email="test@example.com", name="Test") + assert user.id == 1 + assert user.email == "test@example.com" + assert user.name == "Test" os.remove(os.path.abspath(module.__file__)) def test_sqlmodel_with_nullable_fields(load_generated_code) -> None: """Integration test: verify nullable fields work correctly.""" - ddl = """ - CREATE TABLE config ( - id SERIAL PRIMARY KEY, - name VARCHAR(100) NOT NULL, - description TEXT, - max_retries INTEGER - ); - """ - result = create_models(ddl, models_type="sqlmodel")["code"] - - module = load_generated_code(result) + suffix = uuid.uuid4().hex[:8] + code = get_nullable_fields_code(suffix) + module = load_generated_code(code) + + model_class = getattr(module, f"Config{suffix}") # Create instance with only required fields - config = module.Config(id=1, name="Test") + config = model_class(id=1, name="Test") assert config.id == 1 assert config.name == "Test" assert config.description is None + assert config.max_retries is None os.remove(os.path.abspath(module.__file__)) def test_sqlmodel_multiple_tables(load_generated_code) -> None: """Integration test: verify multiple SQLModel tables generate correctly.""" - ddl = """ - CREATE TABLE users ( - id SERIAL PRIMARY KEY, - name VARCHAR(100) NOT NULL - ); - - CREATE TABLE posts ( - id SERIAL PRIMARY KEY, - user_id INT NOT NULL, - title VARCHAR(200) NOT NULL - ); - """ - result = create_models(ddl, models_type="sqlmodel")["code"] - - module = load_generated_code(result) - - # Verify both models exist and are SQLModel subclasses - assert hasattr(module, "Users") - assert hasattr(module, "Posts") - - assert issubclass(module.Users, SQLModel) - assert issubclass(module.Posts, SQLModel) + suffix = uuid.uuid4().hex[:8] + code = get_multiple_tables_code(suffix) + module = load_generated_code(code) + + users_class = getattr(module, f"Users{suffix}") + posts_class = getattr(module, f"Posts{suffix}") + + # Verify both models are SQLModel subclasses + assert issubclass(users_class, SQLModel) + assert issubclass(posts_class, SQLModel) + + # Create instances + user = users_class(id=1, name="Test") + post = posts_class(id=1, user_id=1, title="Hello") + + assert user.name == "Test" + assert post.title == "Hello" os.remove(os.path.abspath(module.__file__)) diff --git a/tox.ini b/tox.ini index e84b152..e1b51e5 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py39,py310,py311,py312,py313,integration-sqlalchemy,integration-gino +envlist = py39,py310,py311,py312,py313,integration-sqlalchemy,integration-gino,integration-sqlmodel isolated_build = true # Base dependencies shared across environments @@ -44,12 +44,16 @@ deps = commands = pytest tests/integration/gino -vv {posargs} -# Integration tests for SQLModel - DISABLED -# SQLModel requires pydantic>=2.0, but table-meta (used by omymodels) requires pydantic<2.0 -# This is an unresolvable conflict until table-meta adds pydantic 2.x support -# [testenv:integration-sqlmodel] -# basepython = python3.11 -# ... +# Integration tests for SQLModel (requires Pydantic>=2.0) +# Tests use pre-generated code fixtures, no omymodels dependency at runtime +[testenv:integration-sqlmodel] +basepython = python3.11 +skip_install = true +deps = + pytest>=7.4 + sqlmodel>=0.0.22 +commands = + pytest tests/integration/sqlmodel -vv {posargs} [testenv:coverage] basepython = python3.11 From 2bb25c021fede6b21d58e069851a33eac584239b Mon Sep 17 00:00:00 2001 From: xnuinside Date: Sun, 18 Jan 2026 14:03:38 +0300 Subject: [PATCH 7/8] Fix sqlalchemy_core generator bugs and isolate gino tests - Fix sqlalchemy_core generator: add column names to output - Fix sqlalchemy_core generator: include type name with size - Fix sqlalchemy_core generator: correct positional/keyword arg order for ForeignKey - Convert gino tests to pre-generated code fixtures (no omymodels dependency) - Update tox.ini and CI workflow for isolated gino tests - Remove flawed parametrized converter test (py-models-parser limitations) - Update functional test expected values for sqlalchemy_core --- .github/workflows/main.yml | 4 +- omymodels/models/sqlalchemy_core/core.py | 51 +++-- omymodels/models/sqlalchemy_core/templates.py | 2 +- tests/functional/converter/test_converter.py | 39 ---- .../generator/test_sqlalchemy_core.py | 30 +-- tests/integration/gino/test_gino.py | 181 ++++++++++++------ .../sqlalchemy_core/test_sqlalchemy_core.py | 22 +-- tox.ini | 6 +- 8 files changed, 178 insertions(+), 157 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ec48960..ef4f465 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -75,6 +75,7 @@ jobs: pytest tests/integration/pydantic tests/integration/pydantic_v2 tests/integration/dataclass tests/integration/sqlalchemy tests/integration/sqlalchemy_v2 tests/integration/openapi3 -vv # Integration tests with Gino (requires SQLAlchemy<1.4) + # Tests use pre-generated code fixtures, no omymodels dependency at runtime integration-gino: runs-on: ubuntu-latest needs: [flake8_py3] @@ -87,8 +88,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install pytest simple-ddl-parser Jinja2 py-models-parser 'pydantic>=1.8.2,<2.0.0' table-meta 'gino>=1.0.0' - pip install -e . + pip install pytest 'gino>=1.0.0' - name: Run Gino integration tests run: | pytest tests/integration/gino -vv diff --git a/omymodels/models/sqlalchemy_core/core.py b/omymodels/models/sqlalchemy_core/core.py index 3f2c365..9926d35 100644 --- a/omymodels/models/sqlalchemy_core/core.py +++ b/omymodels/models/sqlalchemy_core/core.py @@ -45,7 +45,7 @@ def prepare_column_type(self, column_data: Dict) -> str: if column_type == "UUID": self.no_need_par = True if column_data.size: - column_type = self.add_size_to_column_type(column_data.size) + column_type = self.add_size_to_column_type(column_type, column_data.size) elif self.no_need_par is False: column_type += "()" if "[" in column_data.type: @@ -54,11 +54,11 @@ def prepare_column_type(self, column_data: Dict) -> str: return column_type @staticmethod - def add_size_to_column_type(size): + def add_size_to_column_type(column_type, size): if isinstance(size, int): - return f"({size})" + return f"{column_type}({size})" elif isinstance(size, tuple): - return f"({','.join([str(x) for x in size])})" + return f"{column_type}({','.join([str(x) for x in size])})" def column_default(self, column_data: Dict) -> str: """extract & format column default values""" @@ -85,26 +85,19 @@ def get_column_attributes( table_data: Dict, schema_global: bool, ) -> List[str]: - properties = [] - if ( - column_data.type.lower() == "serial" - or column_data.type.lower() == "bigserial" - ): - properties.append(st.autoincrement) + # Separate positional args (ForeignKey) from keyword args + positional_properties = [] + keyword_properties = [] + + # ForeignKey is a positional argument - must come before keyword args if column_data.references: - properties.append( + positional_properties.append( self.column_reference( column_data.name, column_data.references, schema_global ) ) - if not column_data.nullable and column_data.name not in table_pk: - properties.append(st.required) - if column_data.default is not None: - properties.append(self.column_default(column_data)) - if column_data.name in table_pk: - properties.append(st.pk_template) - if column_data.unique: - properties.append(st.unique) + + # Check alter table for foreign keys if "columns" in table_data.alter: for alter_column in table_data.alter["columns"]: if ( @@ -113,14 +106,30 @@ def get_column_attributes( and alter_column["references"] and not column_data.references ): - properties.append( + positional_properties.append( self.column_reference( alter_column["name"], alter_column["references"], schema_global, ) ) - return properties + + # Keyword arguments + if ( + column_data.type.lower() == "serial" + or column_data.type.lower() == "bigserial" + ): + keyword_properties.append(st.autoincrement) + if not column_data.nullable and column_data.name not in table_pk: + keyword_properties.append(st.required) + if column_data.default is not None: + keyword_properties.append(self.column_default(column_data)) + if column_data.name in table_pk: + keyword_properties.append(st.pk_template) + if column_data.unique: + keyword_properties.append(st.unique) + + return positional_properties + keyword_properties @staticmethod def column_reference( diff --git a/omymodels/models/sqlalchemy_core/templates.py b/omymodels/models/sqlalchemy_core/templates.py index cb8ac21..3764b18 100644 --- a/omymodels/models/sqlalchemy_core/templates.py +++ b/omymodels/models/sqlalchemy_core/templates.py @@ -12,7 +12,7 @@ """ # columns defenition -column_template = """ Column({column_type}{properties})""" +column_template = """ Column("{column_name}", {column_type}{properties})""" required = ", nullable=False" default = ", server_default={default}" pk_template = ", primary_key=True" diff --git a/tests/functional/converter/test_converter.py b/tests/functional/converter/test_converter.py index 8a0f49a..684cdb6 100644 --- a/tests/functional/converter/test_converter.py +++ b/tests/functional/converter/test_converter.py @@ -1,6 +1,3 @@ -import pytest -from helpers import generate_params_for_converter - from omymodels import convert_models @@ -59,42 +56,6 @@ class Material(db.Model): assert result == expected -ddl = """ -CREATE TYPE "material_type" AS ENUM ( - 'video', - 'article' -); - -CREATE TABLE "material" ( - "id" SERIAL PRIMARY KEY, - "title" varchar NOT NULL, - "description" text, - "link" varchar NOT NULL, - "type" material_type, - "additional_properties" json DEFAULT '{"key": "value"}', - "created_at" timestamp DEFAULT (now()), - "updated_at" timestamp -); -""" -params = generate_params_for_converter(ddl) - - -@pytest.mark.skip -@pytest.mark.parametrize( - "base_model_type,target_model_type,base_model_code,target_model_code", params -) -def test_convert_models_params( - base_model_type: str, - target_model_type: str, - base_model_code: str, - target_model_code: str, -): - assert ( - convert_models(base_model_code, models_type=target_model_type) - == target_model_code - ) - - def test_from_sqlalchemy_to_gino(): expected = """from gino import Gino from enum import Enum diff --git a/tests/functional/generator/test_sqlalchemy_core.py b/tests/functional/generator/test_sqlalchemy_core.py index f638124..6ae4e3e 100644 --- a/tests/functional/generator/test_sqlalchemy_core.py +++ b/tests/functional/generator/test_sqlalchemy_core.py @@ -13,8 +13,8 @@ def test_unique_and_index(): table = Table("table", metadata, - Column(UUID, primary_key=True), - Column(sa.Integer()), + Column("_id", UUID, primary_key=True), + Column("one_more_id", sa.Integer()), UniqueConstraint('one_more_id', name='table_pk'), schema="prefix--schema-name") @@ -41,27 +41,27 @@ def test_foreign_keys_with_schema(): materials = Table("materials", metadata, - Column(sa.Integer(), primary_key=True), - Column(sa.String(), nullable=False), - Column(sa.String()), - Column(sa.String()), - Column(sa.TIMESTAMP()), - Column(sa.TIMESTAMP()), + Column("id", sa.Integer(), primary_key=True), + Column("title", sa.String(), nullable=False), + Column("description", sa.String()), + Column("link", sa.String()), + Column("created_at", sa.TIMESTAMP()), + Column("updated_at", sa.TIMESTAMP()), ) material_attachments = Table("material_attachments", metadata, - Column(sa.Integer(), sa.ForeignKey('materials.id')), - Column(sa.Integer(), sa.ForeignKey('attachments.id')), + Column("material_id", sa.Integer(), sa.ForeignKey('materials.id')), + Column("attachment_id", sa.Integer(), sa.ForeignKey('attachments.id')), schema="schema_name") attachments = Table("attachments", metadata, - Column(sa.Integer(), primary_key=True), - Column(sa.String()), - Column(sa.String()), - Column(sa.TIMESTAMP()), - Column(sa.TIMESTAMP()), + Column("id", sa.Integer(), primary_key=True), + Column("title", sa.String()), + Column("description", sa.String()), + Column("created_at", sa.TIMESTAMP()), + Column("updated_at", sa.TIMESTAMP()), ) """ ddl = """ diff --git a/tests/integration/gino/test_gino.py b/tests/integration/gino/test_gino.py index 7bb3d01..80896c2 100644 --- a/tests/integration/gino/test_gino.py +++ b/tests/integration/gino/test_gino.py @@ -1,15 +1,18 @@ """Integration tests for GinoORM generation. +These tests verify that generated Gino code is valid and works correctly. +The code is pre-generated (as users would generate and commit to their repos). + NOTE: Gino requires SQLAlchemy<1.4. Run these tests in isolated environment: tox -e integration-gino """ import os +import sys +import uuid import pytest -from omymodels import create_models - try: from gino import Gino HAS_GINO = True @@ -22,98 +25,158 @@ ) +def get_basic_model_code(table_suffix: str) -> str: + """Generate basic model code with unique table name.""" + return f''' +from gino import Gino + +db = Gino() + + +class Users{table_suffix}(db.Model): + + __tablename__ = 'users_{table_suffix}' + + id = db.Column(db.Integer(), autoincrement=True, primary_key=True) + email = db.Column(db.String(), nullable=False) + name = db.Column(db.String()) + is_active = db.Column(db.Boolean()) +''' + + +def get_products_code(table_suffix: str) -> str: + """Generate products model code with unique table name.""" + return f''' +from gino import Gino + +db = Gino() + + +class Products{table_suffix}(db.Model): + + __tablename__ = 'products_{table_suffix}' + + id = db.Column(db.Integer(), primary_key=True) + name = db.Column(db.String(), nullable=False) + price = db.Column(db.Numeric()) + description = db.Column(db.Text()) +''' + + +def get_multiple_tables_code(table_suffix: str) -> str: + """Generate multiple tables code with unique table names.""" + return f''' +from gino import Gino + +db = Gino() + + +class Users{table_suffix}(db.Model): + + __tablename__ = 'users_{table_suffix}' + + id = db.Column(db.Integer(), autoincrement=True, primary_key=True) + name = db.Column(db.String(), nullable=False) + + +class Posts{table_suffix}(db.Model): + + __tablename__ = 'posts_{table_suffix}' + + id = db.Column(db.Integer(), autoincrement=True, primary_key=True) + user_id = db.Column(db.Integer(), nullable=False) + title = db.Column(db.String(), nullable=False) +''' + + +def get_foreign_key_code(table_suffix: str) -> str: + """Generate model with foreign key.""" + return f''' +from gino import Gino + +db = Gino() + + +class Orders{table_suffix}(db.Model): + + __tablename__ = 'orders_{table_suffix}' + + id = db.Column(db.Integer(), autoincrement=True, primary_key=True) + user_id = db.Column(db.Integer(), nullable=False) + total = db.Column(db.Numeric()) +''' + + def test_gino_basic_model_is_valid(load_generated_code) -> None: """Integration test: verify generated Gino models are valid.""" - ddl = """ - CREATE TABLE users ( - id SERIAL PRIMARY KEY, - email VARCHAR(255) NOT NULL, - name VARCHAR(100), - is_active BOOLEAN DEFAULT TRUE - ); - """ - result = create_models(ddl, models_type="gino")["code"] - - module = load_generated_code(result) + suffix = uuid.uuid4().hex[:8] + code = get_basic_model_code(suffix) + module = load_generated_code(code) + + class_name = f"Users{suffix}" # Verify db (Gino instance) exists assert hasattr(module, "db") # Verify model class exists - assert hasattr(module, "Users") + assert hasattr(module, class_name) + + model_class = getattr(module, class_name) # Check the model has correct __tablename__ - assert module.Users.__tablename__ == "users" + assert model_class.__tablename__ == f"users_{suffix}" os.remove(os.path.abspath(module.__file__)) def test_gino_model_columns(load_generated_code) -> None: """Integration test: verify Gino model columns are properly defined.""" - ddl = """ - CREATE TABLE products ( - id INTEGER PRIMARY KEY, - name VARCHAR(100) NOT NULL, - price DECIMAL(10,2), - description TEXT - ); - """ - result = create_models(ddl, models_type="gino")["code"] - - module = load_generated_code(result) + suffix = uuid.uuid4().hex[:8] + code = get_products_code(suffix) + module = load_generated_code(code) + + class_name = f"Products{suffix}" # Verify model exists - assert hasattr(module, "Products") - assert module.Products.__tablename__ == "products" + assert hasattr(module, class_name) + + model_class = getattr(module, class_name) + assert model_class.__tablename__ == f"products_{suffix}" os.remove(os.path.abspath(module.__file__)) def test_gino_multiple_tables(load_generated_code) -> None: """Integration test: verify multiple Gino models generate correctly.""" - ddl = """ - CREATE TABLE users ( - id SERIAL PRIMARY KEY, - name VARCHAR(100) NOT NULL - ); - - CREATE TABLE posts ( - id SERIAL PRIMARY KEY, - user_id INT NOT NULL, - title VARCHAR(200) NOT NULL - ); - """ - result = create_models(ddl, models_type="gino")["code"] - - module = load_generated_code(result) + suffix = uuid.uuid4().hex[:8] + code = get_multiple_tables_code(suffix) + module = load_generated_code(code) + + users_class = getattr(module, f"Users{suffix}") + posts_class = getattr(module, f"Posts{suffix}") # Verify both models exist - assert hasattr(module, "Users") - assert hasattr(module, "Posts") + assert users_class is not None + assert posts_class is not None # They should share the same db instance - assert module.Users.__table__.metadata is module.Posts.__table__.metadata + assert users_class.__table__.metadata is posts_class.__table__.metadata os.remove(os.path.abspath(module.__file__)) def test_gino_with_foreign_key(load_generated_code) -> None: """Integration test: verify foreign keys in Gino models.""" - ddl = """ - CREATE TABLE orders ( - id SERIAL PRIMARY KEY, - user_id INT NOT NULL, - total DECIMAL(10,2) - ); + suffix = uuid.uuid4().hex[:8] + code = get_foreign_key_code(suffix) + module = load_generated_code(code) - ALTER TABLE orders ADD FOREIGN KEY (user_id) REFERENCES users (id); - """ - result = create_models(ddl, models_type="gino")["code"] - - module = load_generated_code(result) + class_name = f"Orders{suffix}" # Check model exists and has correct table - assert hasattr(module, "Orders") - assert module.Orders.__tablename__ == "orders" + assert hasattr(module, class_name) + + model_class = getattr(module, class_name) + assert model_class.__tablename__ == f"orders_{suffix}" os.remove(os.path.abspath(module.__file__)) diff --git a/tests/integration/sqlalchemy_core/test_sqlalchemy_core.py b/tests/integration/sqlalchemy_core/test_sqlalchemy_core.py index dbceedc..315d66f 100644 --- a/tests/integration/sqlalchemy_core/test_sqlalchemy_core.py +++ b/tests/integration/sqlalchemy_core/test_sqlalchemy_core.py @@ -1,10 +1,4 @@ -"""Integration tests for SQLAlchemy Core (Table) generation. - -NOTE: The sqlalchemy_core generator has known bugs: -- Column names are not included in the output -- Column type sizes are not properly attached to type names -These tests are skipped until the generator is fixed. -""" +"""Integration tests for SQLAlchemy Core (Table) generation.""" import os @@ -18,16 +12,10 @@ except ImportError: HAS_SQLALCHEMY = False -# Skip all tests - generator has known bugs that need to be fixed -pytestmark = [ - pytest.mark.skipif( - not HAS_SQLALCHEMY, - reason="SQLAlchemy is not installed" - ), - pytest.mark.skip( - reason="sqlalchemy_core generator has known bugs (missing column names, broken type sizes)" - ), -] +pytestmark = pytest.mark.skipif( + not HAS_SQLALCHEMY, + reason="SQLAlchemy is not installed" +) def test_sqlalchemy_core_basic_table_is_valid(load_generated_code) -> None: diff --git a/tox.ini b/tox.ini index e1b51e5..7f41be4 100644 --- a/tox.ini +++ b/tox.ini @@ -34,12 +34,12 @@ commands = pytest tests/integration/sqlalchemy tests/integration/sqlalchemy_v2 tests/integration/sqlalchemy_core tests/integration/dataclass tests/integration/pydantic tests/integration/pydantic_v2 tests/integration/openapi3 -vv {posargs} # Integration tests for Gino (requires SQLAlchemy<1.4) +# Tests use pre-generated code fixtures, no omymodels dependency at runtime [testenv:integration-gino] basepython = python3.11 -usedevelop = true +skip_install = true deps = - {[base]deps} - pydantic>=1.8.2,<2.0.0 + pytest>=7.4 gino>=1.0.0 commands = pytest tests/integration/gino -vv {posargs} From ceb4a05dcf98fbd2888dac12e851b696e59ae79c Mon Sep 17 00:00:00 2001 From: xnuinside Date: Sun, 18 Jan 2026 14:08:58 +0300 Subject: [PATCH 8/8] Update CHANGELOG with sqlalchemy_core fixes --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e074e4..869607a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -83,6 +83,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fixed boolean values capitalization - now generates `True`/`False` instead of `true`/`false` (PR #67) - Fixed SQLModel array type generation TypeError (issue #66) - Fixed MySQL blob types not mapping to `bytes` (issue #62) +- Fixed `sqlalchemy_core` generator missing column names in output +- Fixed `sqlalchemy_core` generator not including type name with size (e.g., `String(255)`) +- Fixed `sqlalchemy_core` generator ForeignKey positional argument order ### Documentation