Skip to content

implement pydantic model data type #779

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Mar 19, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 7 additions & 13 deletions .github/workflows/ci-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ jobs:
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
run: python -m pip install nox
run: python -m pip install nox pre_commit
- name: Pip info
run: python -m pip list

Expand All @@ -58,20 +58,14 @@ jobs:
--python ${{ matrix.python-version }}
--session requirements-${{ matrix.python-version }}

- name: Check isort and black formatting
run: >
nox
-db virtualenv -r
--non-interactive
--session black isort
- name: Sort Imports
run: pre-commit run isort --all-files

- name: Black
run: pre-commit run black --all-files

- name: Pylint
run: >
nox
-db virtualenv -r
--non-interactive
--python ${{ matrix.python-version }}
--session lint-${{ matrix.python-version }}
run: pre-commit run pylint --all-files

- name: Mypy Type Checking
run: >
Expand Down
2 changes: 0 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,6 @@ repos:
exclude: (^docs/|^scripts)

- repo: https://github.com/pre-commit/mirrors-mypy
# TODO: in mypy 0.900+ one needs to pip install type stubs separately (i.e. typeshed is no longer included)
# TODO: but pre-commit downloads mypy in a different location (~/.cache)
rev: v0.921
hooks:
- id: mypy
Expand Down
3 changes: 2 additions & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,5 @@ disable=
inconsistent-return-statements,
protected-access,
too-many-ancestors,
too-many-lines
too-many-lines,
too-few-public-methods
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ dependencies:
- isort >= 5.7.0
- codecov
- mypy <= 0.921
- pylint = 2.11.1
- pylint = 2.12.2
- pytest
- pytest-cov
- pytest-xdist
Expand Down
41 changes: 0 additions & 41 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,6 @@

nox.options.sessions = (
"requirements",
"black",
"isort",
"lint",
"mypy",
"tests",
"docs",
Expand Down Expand Up @@ -271,44 +268,6 @@ def requirements(session: Session) -> None: # pylint:disable=unused-argument
sys.exit(1)


@nox.session(python=DEFAULT_PYTHON)
def black(session: Session) -> None:
"""Check black style."""
install_from_requirements(session, "black")
args = ["--check"] if CI_RUN else session.posargs
session.run(
"black",
f"--line-length={LINE_LENGTH}",
*args,
*SOURCE_PATHS,
)


@nox.session(python=DEFAULT_PYTHON)
def isort(session: Session) -> None:
"""Check isort style."""
install_from_requirements(session, "isort")
args = ["--check-only"] if CI_RUN else session.posargs
session.run(
"isort",
f"--line-length={LINE_LENGTH}",
*args,
*SOURCE_PATHS,
)


@nox.session(python=PYTHON_VERSIONS)
def lint(session: Session) -> None:
"""Lint using pylint."""
install_extras(session, extra="all")
args = session.posargs or SOURCE_PATHS

if session.python == "3.9":
# https://github.com/PyCQA/pylint/issues/776
args = ["--disable=unsubscriptable-object", *args]
session.run("pylint", *args)


@nox.session(python=PYTHON_VERSIONS)
def mypy(session: Session) -> None:
"""Type-check using mypy."""
Expand Down
2 changes: 1 addition & 1 deletion pandera/checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ def statistics(self, statistics):
def _format_groupby_input(
groupby_obj: GroupbyObject,
groups: Optional[List[str]],
) -> Union[Dict[str, Union[pd.Series, pd.DataFrame]]]:
) -> Dict[str, Union[pd.Series, pd.DataFrame]]:
"""Format groupby object into dict of groups to Series or DataFrame.

:param groupby_obj: a pandas groupby object.
Expand Down
61 changes: 60 additions & 1 deletion pandera/engines/pandas_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,12 @@
import inspect
import warnings
from enum import Enum
from typing import Any, Dict, Iterable, List, Optional, Union
from typing import Any, Dict, Iterable, List, Optional, Type, Union

import numpy as np
import pandas as pd
from packaging import version
from pydantic import BaseModel, ValidationError

from .. import dtypes, errors
from ..dtypes import immutable
Expand Down Expand Up @@ -84,6 +85,8 @@ def try_coerce(self, data_container: PandasObject) -> PandasObject:
try:
return self.coerce(data_container)
except Exception as exc: # pylint:disable=broad-except
if isinstance(exc, errors.ParserError):
raise
raise errors.ParserError(
f"Could not coerce {type(data_container)} data_container "
f"into type {self.type}",
Expand Down Expand Up @@ -702,6 +705,62 @@ class Geometry(DataType):
type = gpd.array.GeometryDtype()


###############################################################################
# pydantic
###############################################################################


@Engine.register_dtype
@dtypes.immutable(init=True)
class PydanticModel(DataType):
"""A pydantic model datatype applying to rows in a dataframe."""

type: Type[BaseModel] = dataclasses.field(default=None, init=False) # type: ignore # noqa

# pylint:disable=super-init-not-called
def __init__(self, model: Type[BaseModel]) -> None:
object.__setattr__(self, "type", model)

def coerce(self, data_container: pd.DataFrame) -> pd.DataFrame:
"""Coerce pandas dataframe with pydantic record model."""

# pylint: disable=import-outside-toplevel
from pandera import error_formatters

def _coerce_row(row):
"""
Coerce each row using pydantic model, keeping track of failure
cases.
"""
try:
# pylint: disable=not-callable
row = pd.Series(self.type(**row).dict())
row["failure_cases"] = np.nan
except ValidationError as exc:
row["failure_cases"] = {
k: row[k] for k in (x["loc"][0] for x in exc.errors())
}

return row

coerced_df = data_container.apply(_coerce_row, axis="columns")

# raise a ParserError with failure cases where each case is a
# dictionary containing the failed elements in the pydantic record
if coerced_df["failure_cases"].any():
failure_cases = coerced_df["failure_cases"][
coerced_df["failure_cases"].notna()
].astype(str)
raise errors.ParserError(
f"Could not coerce {type(data_container)} data_container "
f"into type {self.type}",
failure_cases=error_formatters.reshape_failure_cases(
failure_cases, ignore_na=False
),
)
return coerced_df.drop(["failure_cases"], axis="columns")


class PandasDtype(Enum):
# pylint: disable=line-too-long,invalid-name
"""Enumerate all valid pandas data types.
Expand Down
5 changes: 2 additions & 3 deletions pandera/error_formatters.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,11 @@
import pandas as pd

from . import check_utils
from .checks import _CheckBase


def format_generic_error_message(
parent_schema,
check: _CheckBase,
check,
check_index: int,
) -> str:
"""Construct an error message when a check validator fails.
Expand All @@ -27,7 +26,7 @@ def format_generic_error_message(

def format_vectorized_error_message(
parent_schema,
check: _CheckBase,
check,
check_index: int,
reshaped_failure_cases: pd.DataFrame,
) -> str:
Expand Down
1 change: 1 addition & 0 deletions pandera/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,7 @@ def to_schema(cls) -> DataFrameSchema:
kwargs = {}
if cls.__config__ is not None:
kwargs = {
"dtype": cls.__config__.dtype,
"coerce": cls.__config__.coerce,
"strict": cls.__config__.strict,
"name": cls.__config__.name,
Expand Down
15 changes: 14 additions & 1 deletion pandera/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,13 @@ def __init__(
# set to True in the case that a schema is created by infer_schema.
self._IS_INFERRED = False

# This restriction can be removed once logical types are introduced:
# https://github.com/pandera-dev/pandera/issues/788
if not coerce and isinstance(self.dtype, pandas_engine.PydanticModel):
raise errors.SchemaInitError(
"Specifying a PydanticModel type requires coerce=True."
)

@property
def coerce(self) -> bool:
"""Whether to coerce series to specified type."""
Expand Down Expand Up @@ -382,7 +389,7 @@ def _coerce_dtype(self, obj: pd.DataFrame) -> pd.DataFrame:
obj,
(
f"Error while coercing '{self.name}' to type "
f"{self.dtype}: {exc}"
f"{self.dtype}: {exc}\n{exc.failure_cases}"
),
failure_cases=exc.failure_cases,
check=f"coerce_dtype('{self.dtype}')",
Expand Down Expand Up @@ -1754,6 +1761,12 @@ def __init__(
# set to True in the case that a schema is created by infer_schema.
self._IS_INFERRED = False

if isinstance(self.dtype, pandas_engine.PydanticModel):
raise errors.SchemaInitError(
"PydanticModel dtype can only be specified as a "
"DataFrameSchema dtype."
)

# the _is_inferred getter and setter methods are not public
@property
def _is_inferred(self):
Expand Down
5 changes: 5 additions & 0 deletions pandera/typing/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from typing import Any, Dict, List, Optional, Union

from ..schemas import PandasDtypeInputTypes
from .formats import Format


Expand All @@ -11,6 +12,10 @@ class BaseConfig: # pylint:disable=R0903
*new in 0.5.0*
"""

#: datatype of the dataframe. This overrides the data types specified in
#: any of the fields.
dtype: Optional[PandasDtypeInputTypes] = None

name: Optional[str] = None #: name of schema
title: Optional[str] = None #: human-readable label for schema
description: Optional[str] = None #: arbitrary textual description
Expand Down
2 changes: 1 addition & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ black >= 22.1.0
isort >= 5.7.0
codecov
mypy <= 0.921
pylint == 2.11.1
pylint == 2.12.2
pytest
pytest-cov
pytest-xdist
Expand Down
87 changes: 87 additions & 0 deletions tests/core/test_pydantic_dtype.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
"""Unit tests for pydantic datatype."""

import pandas as pd
import pytest
from pydantic import BaseModel

import pandera as pa
from pandera.engines.pandas_engine import PydanticModel


class Record(BaseModel):
"""Pydantic record model."""

name: str
xcoord: int
ycoord: int


class PydanticSchema(pa.SchemaModel):
"""Pandera schema using the pydantic model."""

class Config:
"""Config with dataframe-level data type."""

dtype = PydanticModel(Record)
coerce = True


class PanderaSchema(pa.SchemaModel):
"""Pandera schema that's equivalent to PydanticSchema."""

name: pa.typing.Series[str]
xcoord: pa.typing.Series[int]
ycoord: pa.typing.Series[int]


def test_pydantic_model():
"""Test that pydantic model correctly validates data."""

@pa.check_types
def func(df: pa.typing.DataFrame[PydanticSchema]):
return df

valid_df = pd.DataFrame(
{
"name": ["foo", "bar", "baz"],
"xcoord": [1.0, 2, 3],
"ycoord": [4, 5.0, 6],
}
)

invalid_df = pd.DataFrame(
{
"name": ["foo", "bar", "baz"],
"xcoord": [1, 2, "c"],
"ycoord": [4, 5, "d"],
}
)

validated = func(valid_df)
PanderaSchema.validate(validated)

expected_failure_cases = pd.DataFrame(
{"index": [2], "failure_case": ["{'xcoord': 'c', 'ycoord': 'd'}"]}
)

try:
func(invalid_df)
except pa.errors.SchemaError as exc:
pd.testing.assert_frame_equal(
exc.failure_cases, expected_failure_cases
)


def test_pydantic_model_init_errors():
"""SchemaInitError should be raised when coerce=False"""
with pytest.raises(pa.errors.SchemaInitError):
pa.DataFrameSchema(dtype=PydanticModel(Record), coerce=False)

with pytest.raises(pa.errors.SchemaInitError):
pa.SeriesSchema(dtype=PydanticModel(Record))

with pytest.raises(pa.errors.SchemaInitError):
pa.Column(dtype=PydanticModel(Record))

with pytest.raises(pa.errors.SchemaInitError):
pa.Index(dtype=PydanticModel(Record))
Loading