Skip to content

Commit

Permalink
DEPR: remove use of nan_as_null from callers of __dataframe__
Browse files Browse the repository at this point in the history
No one was using it yet and it seemed easier to clean it up
from libraries than to ask everyone to add support for it - see
data-apis/dataframe-api#125.
  • Loading branch information
rgommers committed Aug 29, 2023
1 parent 4e0630c commit 4f4a4be
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 19 deletions.
9 changes: 3 additions & 6 deletions pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -893,8 +893,8 @@ def __dataframe__(
Parameters
----------
nan_as_null : bool, default False
Whether to tell the DataFrame to overwrite null values in the data
with ``NaN`` (or ``NaT``).
`nan_as_null` is DEPRECATED and has no effect. Please avoid using
it; it will be removed in a future release (after Aug 2024).
allow_copy : bool, default True
Whether to allow memory copying when exporting. If set to False
it would cause non-zero-copy exports to fail.
Expand All @@ -909,9 +909,6 @@ def __dataframe__(
Details on the interchange protocol:
https://data-apis.org/dataframe-protocol/latest/index.html
`nan_as_null` currently has no effect; once support for nullable extension
dtypes is added, this value should be propagated to columns.
Examples
--------
>>> df_not_necessarily_pandas = pd.DataFrame({'A': [1, 2], 'B': [3, 4]})
Expand All @@ -931,7 +928,7 @@ def __dataframe__(

from pandas.core.interchange.dataframe import PandasDataFrameXchg

return PandasDataFrameXchg(self, nan_as_null, allow_copy)
return PandasDataFrameXchg(self, allow_copy=allow_copy)

def __dataframe_consortium_standard__(
self, *, api_version: str | None = None
Expand Down
20 changes: 7 additions & 13 deletions pandas/core/interchange/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,25 +27,20 @@ class PandasDataFrameXchg(DataFrameXchg):
attributes defined on this class.
"""

def __init__(
self, df: DataFrame, nan_as_null: bool = False, allow_copy: bool = True
) -> None:
def __init__(self, df: DataFrame, allow_copy: bool = True) -> None:
"""
Constructor - an instance of this (private) class is returned from
`pd.DataFrame.__dataframe__`.
"""
self._df = df
# ``nan_as_null`` is a keyword intended for the consumer to tell the
# producer to overwrite null values in the data with ``NaN`` (or ``NaT``).
# This currently has no effect; once support for nullable extension
# dtypes is added, this value should be propagated to columns.
self._nan_as_null = nan_as_null
self._allow_copy = allow_copy

def __dataframe__(
self, nan_as_null: bool = False, allow_copy: bool = True
) -> PandasDataFrameXchg:
return PandasDataFrameXchg(self._df, nan_as_null, allow_copy)
# `nan_as_null` can be removed here once it's removed from
# Dataframe.__dataframe__
return PandasDataFrameXchg(self._df, allow_copy)

@property
def metadata(self) -> dict[str, Index]:
Expand Down Expand Up @@ -84,7 +79,7 @@ def select_columns(self, indices: Sequence[int]) -> PandasDataFrameXchg:
indices = list(indices)

return PandasDataFrameXchg(
self._df.iloc[:, indices], self._nan_as_null, self._allow_copy
self._df.iloc[:, indices], allow_copy=self._allow_copy
)

def select_columns_by_name(self, names: list[str]) -> PandasDataFrameXchg: # type: ignore[override] # noqa: E501
Expand All @@ -94,7 +89,7 @@ def select_columns_by_name(self, names: list[str]) -> PandasDataFrameXchg: # ty
names = list(names)

return PandasDataFrameXchg(
self._df.loc[:, names], self._nan_as_null, self._allow_copy
self._df.loc[:, names], allow_copy=self._allow_copy
)

def get_chunks(self, n_chunks: int | None = None) -> Iterable[PandasDataFrameXchg]:
Expand All @@ -109,8 +104,7 @@ def get_chunks(self, n_chunks: int | None = None) -> Iterable[PandasDataFrameXch
for start in range(0, step * n_chunks, step):
yield PandasDataFrameXchg(
self._df.iloc[start : start + step, :],
self._nan_as_null,
self._allow_copy,
allow_copy=self._allow_copy,
)
else:
yield self

0 comments on commit 4f4a4be

Please sign in to comment.