Skip to main content

Make pydantic have a GraphQL-like assembly experience.

Project description

Pydantic-resolve

import asyncio
from pydantic import BaseModel
from pydantic_resolve import resolve

class Student(BaseModel):
    name: str
    greet: str = ''

    async def resolve_greet(self):
        await asyncio.sleep(1)
        return f'hello {self.name}'

async def main():
    students = [Student(name='john' )]
    results = await resolve(students)
    print(results)

# [Student(name='john', greet='hello john')]
  • Pydantic-resolve helps you asynchoronously, resursively resolve a pydantic object (or dataclass object)

  • Pydantic-resolve, when used in conjunction with aiodataloader, allows you to easily generate nested data structures without worrying about generating N+1 queries.

  • Inspired by GraphQL and graphene

CI Python Versions Test Coverage pypi

Install

pip install pydantic-resolve

imports

from pydantic_resolve import (
    Resolver, LoaderDepend,  # schema with DataLoader
    resolve  # simple resolve
)

Feature-1, Resolve asynchoronously

class NodeB(BaseModel):  # concurrently resolve fields
    value_1: int = 0
    async def resolve_value_1(self):
        await asyncio.sleep(1)  # sleep 1
        return random()
    
    value_2: int = 0
    async def resolve_value_2(self):
        await asyncio.sleep(1)  # sleep 1
        return 12

    value_3: int = 0
    async def resolve_value_3(self):
        await asyncio.sleep(1)  # sleep 1
        return 12

class NodeA(BaseModel):
    node_b_1: int = 0
    def resolve_node_b_1(self):
        return NodeB()

    node_b_2: int = 0
    def resolve_node_b_2(self):
        return NodeB()

class Root(BaseModel):
    node_a_1: int = 0
    def resolve_node_a_1(self):
        return NodeA()

    node_a_2: int = 0
    def resolve_node_a_2(self):
        return NodeA()

async def main():
    t = time()
    root = Root()
    result = await resolve(root)  # <=== simple resolve
    print(result.json())
    print(time() - t)

# output
{
    "node_a_1": {
        "node_b_1": {
            "value": 0.7815090210172618,
            "value_2": 12, "value_3": 12
        },
        "node_b_2": {
            "value": 0.22252007296099774,
            "value_2": 12,
            "value_3": 12
        }}, 
    "node_a_2": {
        "node_b_1": {
            "value": 0.30685697832345826,
            "value_2": 12,
            "value_3": 12
        }, 
        "node_b_2": {
            "value": 0.7664967562984117,
            "value_2": 12,
            "value_3": 12
        }
    }
}
# 1.0116631984710693

Feature-2: Integrated with aiodataloader:

pydantic_resolve.Resolver will handle the lifecycle and injection of loader instance, you don't need to manage it with contextvars any more.

  1. Define loaders
class FeedbackLoader(DataLoader):
    async def batch_load_fn(self, comment_ids):
        async with async_session() as session:
            res = await session.execute(select(Feedback).where(Feedback.comment_id.in_(comment_ids)))
            rows = res.scalars().all()
            dct = defaultdict(list)
            for row in rows:
                dct[row.comment_id].append(FeedbackSchema.from_orm(row))
            return [dct.get(k, []) for k in comment_ids]


class CommentLoader(DataLoader):
    async def batch_load_fn(self, task_ids):
        async with async_session() as session:
            res = await session.execute(select(Comment).where(Comment.task_id.in_(task_ids)))
            rows = res.scalars().all()

            dct = defaultdict(list)
            for row in rows:
                dct[row.task_id].append(CommentSchema.from_orm(row))
            return [dct.get(k, []) for k in task_ids]
  1. Define schemas
class FeedbackSchema(BaseModel):
    id: int
    comment_id: int
    content: str

    class Config:
        orm_mode = True

class CommentSchema(BaseModel):
    id: int
    task_id: int
    content: str
    feedbacks: Tuple[FeedbackSchema, ...]  = tuple()
    def resolve_feedbacks(self, feedback_loader = LoaderDepend(FeedbackLoader)):  
        # LoaderDepend will manage contextvars for you
        return feedback_loader.load(self.id)

    class Config:
        orm_mode = True

class TaskSchema(BaseModel):
    id: int
    name: str
    comments: Tuple[CommentSchema, ...]  = tuple()
    def resolve_comments(self, comment_loader = LoaderDepend(CommentLoader)):
        return comment_loader.load(self.id)

    class Config:
        orm_mode = True
  1. Resolve it
tasks = (await session.execute(select(Task))).scalars().all()
tasks = [TaskSchema.from_orm(t) for t in tasks]
results = await Resolver().resolve(tasks)  # <=== resolve schema with DataLoaders

# output
[
    {
        'id': 1,
        'name': 'task-1 xyz',
        'comments': [
            {
                'content': 'comment-1 for task 1 (changes)',
                'feedbacks': [
                    {'comment_id': 1, 'content': 'feedback-1 for comment-1 (changes)', 'id': 1},
                    {'comment_id': 1, 'content': 'feedback-2 for comment-1', 'id': 2},
                    {'comment_id': 1, 'content': 'feedback-3 for comment-1', 'id': 3}
                ],
                'id': 1,
                'task_id': 1
            },
            {
                'content': 'comment-2 for task 1',
                'feedbacks': [
                    {'comment_id': 2, 'content': 'test', 'id': 4},
                ],
                'id': 2,
                'task_id': 1
            }
        ]
    }
]

For more examples, please explore examples folder.

Unittest

poetry run python -m unittest  # or
poetry run pytest  # or
poetry run tox

Coverage

poetry run coverage run -m pytest
poetry run coverage report -m

Project details


Download files

Download the file for your platform. If you're not sure which to choose, learn more about installing packages.

Source Distribution

pydantic_resolve-0.2.8.tar.gz (5.4 kB view hashes)

Uploaded Source

Built Distribution

pydantic_resolve-0.2.8-py3-none-any.whl (6.3 kB view hashes)

Uploaded Python 3

Supported by

AWS AWS Cloud computing and Security Sponsor Datadog Datadog Monitoring Fastly Fastly CDN Google Google Download Analytics Microsoft Microsoft PSF Sponsor Pingdom Pingdom Monitoring Sentry Sentry Error logging StatusPage StatusPage Status page