done
This commit is contained in:
@ -0,0 +1,9 @@
|
||||
from typing import Callable
|
||||
|
||||
from pandas._typing import TypeT
|
||||
|
||||
class PandasDelegate: ...
|
||||
|
||||
def register_dataframe_accessor(name: str) -> Callable[[TypeT], TypeT]: ...
|
||||
def register_series_accessor(name: str) -> Callable[[TypeT], TypeT]: ...
|
||||
def register_index_accessor(name: str) -> Callable[[TypeT], TypeT]: ...
|
||||
@ -0,0 +1,80 @@
|
||||
from typing import (
|
||||
Literal,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
Categorical,
|
||||
CategoricalIndex,
|
||||
Index,
|
||||
IntervalIndex,
|
||||
PeriodIndex,
|
||||
Series,
|
||||
)
|
||||
from pandas.api.extensions import ExtensionArray
|
||||
|
||||
from pandas._typing import (
|
||||
AnyArrayLike,
|
||||
IntervalT,
|
||||
TakeIndexer,
|
||||
np_1darray,
|
||||
)
|
||||
|
||||
# These are type: ignored because the Index types overlap due to inheritance but indices
|
||||
# with extension types return the same type while standard type return ndarray
|
||||
|
||||
@overload
|
||||
def unique( # pyright: ignore[reportOverlappingOverload]
|
||||
values: PeriodIndex,
|
||||
) -> PeriodIndex: ...
|
||||
@overload
|
||||
def unique(values: CategoricalIndex) -> CategoricalIndex: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def unique(values: IntervalIndex[IntervalT]) -> IntervalIndex[IntervalT]: ...
|
||||
@overload
|
||||
def unique(values: Index) -> np.ndarray: ...
|
||||
@overload
|
||||
def unique(values: Categorical) -> Categorical: ...
|
||||
@overload
|
||||
def unique(values: Series) -> np.ndarray | ExtensionArray: ...
|
||||
@overload
|
||||
def unique(values: np.ndarray) -> np.ndarray: ...
|
||||
@overload
|
||||
def unique(values: ExtensionArray) -> ExtensionArray: ...
|
||||
@overload
|
||||
def factorize(
|
||||
values: np.ndarray,
|
||||
sort: bool = ...,
|
||||
use_na_sentinel: bool = ...,
|
||||
size_hint: int | None = ...,
|
||||
) -> tuple[np.ndarray, np.ndarray]: ...
|
||||
@overload
|
||||
def factorize(
|
||||
values: Index | Series,
|
||||
sort: bool = ...,
|
||||
use_na_sentinel: bool = ...,
|
||||
size_hint: int | None = ...,
|
||||
) -> tuple[np_1darray, Index]: ...
|
||||
@overload
|
||||
def factorize(
|
||||
values: Categorical,
|
||||
sort: bool = ...,
|
||||
use_na_sentinel: bool = ...,
|
||||
size_hint: int | None = ...,
|
||||
) -> tuple[np_1darray, Categorical]: ...
|
||||
def value_counts(
|
||||
values: AnyArrayLike | list | tuple,
|
||||
sort: bool = True,
|
||||
ascending: bool = False,
|
||||
normalize: bool = False,
|
||||
bins: int | None = None,
|
||||
dropna: bool = True,
|
||||
) -> Series: ...
|
||||
def take(
|
||||
arr,
|
||||
indices: TakeIndexer,
|
||||
axis: Literal[0, 1] = 0,
|
||||
allow_fill: bool = False,
|
||||
fill_value=None,
|
||||
): ...
|
||||
78
lib/python3.11/site-packages/pandas-stubs/core/api.pyi
Normal file
78
lib/python3.11/site-packages/pandas-stubs/core/api.pyi
Normal file
@ -0,0 +1,78 @@
|
||||
from pandas.core.algorithms import (
|
||||
factorize as factorize,
|
||||
unique as unique,
|
||||
value_counts as value_counts,
|
||||
)
|
||||
from pandas.core.arrays import Categorical as Categorical
|
||||
from pandas.core.arrays.arrow.dtype import ArrowDtype as ArrowDtype
|
||||
from pandas.core.arrays.boolean import BooleanDtype as BooleanDtype
|
||||
from pandas.core.arrays.floating import (
|
||||
Float32Dtype as Float32Dtype,
|
||||
Float64Dtype as Float64Dtype,
|
||||
)
|
||||
from pandas.core.arrays.integer import (
|
||||
Int8Dtype as Int8Dtype,
|
||||
Int16Dtype as Int16Dtype,
|
||||
Int32Dtype as Int32Dtype,
|
||||
Int64Dtype as Int64Dtype,
|
||||
UInt8Dtype as UInt8Dtype,
|
||||
UInt16Dtype as UInt16Dtype,
|
||||
UInt32Dtype as UInt32Dtype,
|
||||
UInt64Dtype as UInt64Dtype,
|
||||
)
|
||||
from pandas.core.arrays.string_ import StringDtype as StringDtype
|
||||
from pandas.core.construction import array as array
|
||||
from pandas.core.frame import DataFrame as DataFrame
|
||||
from pandas.core.groupby import (
|
||||
Grouper as Grouper,
|
||||
NamedAgg as NamedAgg,
|
||||
)
|
||||
from pandas.core.indexes.api import (
|
||||
CategoricalIndex as CategoricalIndex,
|
||||
DatetimeIndex as DatetimeIndex,
|
||||
Index as Index,
|
||||
IntervalIndex as IntervalIndex,
|
||||
MultiIndex as MultiIndex,
|
||||
PeriodIndex as PeriodIndex,
|
||||
RangeIndex as RangeIndex,
|
||||
TimedeltaIndex as TimedeltaIndex,
|
||||
)
|
||||
from pandas.core.indexes.datetimes import (
|
||||
bdate_range as bdate_range,
|
||||
date_range as date_range,
|
||||
)
|
||||
from pandas.core.indexes.interval import (
|
||||
Interval as Interval,
|
||||
interval_range as interval_range,
|
||||
)
|
||||
from pandas.core.indexes.period import period_range as period_range
|
||||
from pandas.core.indexes.timedeltas import timedelta_range as timedelta_range
|
||||
from pandas.core.indexing import IndexSlice as IndexSlice
|
||||
from pandas.core.series import Series as Series
|
||||
from pandas.core.tools.datetimes import to_datetime as to_datetime
|
||||
from pandas.core.tools.numeric import to_numeric as to_numeric
|
||||
from pandas.core.tools.timedeltas import to_timedelta as to_timedelta
|
||||
|
||||
from pandas._libs import (
|
||||
NaT as NaT,
|
||||
Period as Period,
|
||||
Timedelta as Timedelta,
|
||||
)
|
||||
from pandas._libs.missing import NA as NA
|
||||
from pandas._libs.tslibs import Timestamp as Timestamp
|
||||
|
||||
from pandas.core.dtypes.dtypes import (
|
||||
CategoricalDtype as CategoricalDtype,
|
||||
DatetimeTZDtype as DatetimeTZDtype,
|
||||
IntervalDtype as IntervalDtype,
|
||||
PeriodDtype as PeriodDtype,
|
||||
)
|
||||
from pandas.core.dtypes.missing import (
|
||||
isna as isna,
|
||||
isnull as isnull,
|
||||
notna as notna,
|
||||
notnull as notnull,
|
||||
)
|
||||
|
||||
from pandas.io.formats.format import set_eng_float_format as set_eng_float_format
|
||||
from pandas.tseries.offsets import DateOffset as DateOffset
|
||||
39
lib/python3.11/site-packages/pandas-stubs/core/arraylike.pyi
Normal file
39
lib/python3.11/site-packages/pandas-stubs/core/arraylike.pyi
Normal file
@ -0,0 +1,39 @@
|
||||
from typing import Any
|
||||
|
||||
from typing_extensions import Self
|
||||
|
||||
class OpsMixin:
|
||||
def __eq__(self, other: object) -> Self: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
def __ne__(self, other: object) -> Self: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
def __lt__(self, other: Any) -> Self: ...
|
||||
def __le__(self, other: Any) -> Self: ...
|
||||
def __gt__(self, other: Any) -> Self: ...
|
||||
def __ge__(self, other: Any) -> Self: ...
|
||||
# -------------------------------------------------------------
|
||||
# Logical Methods
|
||||
def __and__(self, other: Any) -> Self: ...
|
||||
def __rand__(self, other: Any) -> Self: ...
|
||||
def __or__(self, other: Any) -> Self: ...
|
||||
def __ror__(self, other: Any) -> Self: ...
|
||||
def __xor__(self, other: Any) -> Self: ...
|
||||
def __rxor__(self, other: Any) -> Self: ...
|
||||
# -------------------------------------------------------------
|
||||
# Arithmetic Methods
|
||||
def __add__(self, other: Any) -> Self: ...
|
||||
def __radd__(self, other: Any) -> Self: ...
|
||||
def __sub__(self, other: Any) -> Self: ...
|
||||
def __rsub__(self, other: Any) -> Self: ...
|
||||
def __mul__(self, other: Any) -> Self: ...
|
||||
def __rmul__(self, other: Any) -> Self: ...
|
||||
# Handled by subclasses that specify only the valid values
|
||||
# that can be passed
|
||||
# def __truediv__(self, other: Any) -> Self: ...
|
||||
# def __rtruediv__(self, other: Any) -> Self: ...
|
||||
# def __floordiv__(self, other: Any) -> Self: ...
|
||||
# def __rfloordiv__(self, other: Any) -> Self: ...
|
||||
def __mod__(self, other: Any) -> Self: ...
|
||||
def __rmod__(self, other: Any) -> Self: ...
|
||||
def __divmod__(self, other: Any) -> tuple[Self, Self]: ...
|
||||
def __rdivmod__(self, other: Any) -> tuple[Self, Self]: ...
|
||||
def __pow__(self, other: Any) -> Self: ...
|
||||
def __rpow__(self, other: Any) -> Self: ...
|
||||
@ -0,0 +1,15 @@
|
||||
from pandas.core.arrays.base import (
|
||||
ExtensionArray as ExtensionArray,
|
||||
ExtensionOpsMixin as ExtensionOpsMixin,
|
||||
ExtensionScalarOpsMixin as ExtensionScalarOpsMixin,
|
||||
)
|
||||
from pandas.core.arrays.boolean import BooleanArray as BooleanArray
|
||||
from pandas.core.arrays.categorical import Categorical as Categorical
|
||||
from pandas.core.arrays.datetimes import DatetimeArray as DatetimeArray
|
||||
from pandas.core.arrays.integer import IntegerArray as IntegerArray
|
||||
from pandas.core.arrays.interval import IntervalArray as IntervalArray
|
||||
from pandas.core.arrays.numpy_ import PandasArray as PandasArray
|
||||
from pandas.core.arrays.period import PeriodArray as PeriodArray
|
||||
from pandas.core.arrays.sparse import SparseArray as SparseArray
|
||||
from pandas.core.arrays.string_ import StringArray as StringArray
|
||||
from pandas.core.arrays.timedeltas import TimedeltaArray as TimedeltaArray
|
||||
@ -0,0 +1,11 @@
|
||||
import pyarrow as pa
|
||||
|
||||
from pandas._libs.missing import NAType
|
||||
|
||||
from pandas.core.dtypes.base import StorageExtensionDtype
|
||||
|
||||
class ArrowDtype(StorageExtensionDtype):
|
||||
pyarrow_dtype: pa.DataType
|
||||
def __init__(self, pyarrow_dtype: pa.DataType) -> None: ...
|
||||
@property
|
||||
def na_value(self) -> NAType: ...
|
||||
@ -0,0 +1,81 @@
|
||||
from collections.abc import Iterator
|
||||
from typing import (
|
||||
Any,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from typing_extensions import Self
|
||||
|
||||
from pandas._typing import (
|
||||
ArrayLike,
|
||||
Scalar,
|
||||
ScalarIndexer,
|
||||
SequenceIndexer,
|
||||
TakeIndexer,
|
||||
np_1darray,
|
||||
npt,
|
||||
)
|
||||
|
||||
from pandas.core.dtypes.dtypes import ExtensionDtype as ExtensionDtype
|
||||
|
||||
class ExtensionArray:
|
||||
@overload
|
||||
def __getitem__(self, item: ScalarIndexer) -> Any: ...
|
||||
@overload
|
||||
def __getitem__(self, item: SequenceIndexer) -> Self: ...
|
||||
def __setitem__(self, key: int | slice | np.ndarray, value: Any) -> None: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __iter__(self) -> Iterator[Any]: ...
|
||||
def __contains__(self, item: object) -> bool | np.bool_: ...
|
||||
def to_numpy(
|
||||
self,
|
||||
dtype: npt.DTypeLike | None = ...,
|
||||
copy: bool = False,
|
||||
na_value: Scalar = ...,
|
||||
) -> np_1darray[Any]: ...
|
||||
@property
|
||||
def dtype(self) -> ExtensionDtype: ...
|
||||
@property
|
||||
def shape(self) -> tuple[int, ...]: ...
|
||||
@property
|
||||
def ndim(self) -> int: ...
|
||||
@property
|
||||
def nbytes(self) -> int: ...
|
||||
def astype(self, dtype, copy: bool = True): ...
|
||||
def isna(self) -> ArrayLike: ...
|
||||
def argsort(
|
||||
self, *, ascending: bool = ..., kind: str = ..., **kwargs
|
||||
) -> np_1darray: ...
|
||||
def fillna(self, value=..., method=None, limit=None): ...
|
||||
def dropna(self): ...
|
||||
def shift(self, periods: int = 1, fill_value: object = ...) -> Self: ...
|
||||
def unique(self): ...
|
||||
def searchsorted(self, value, side: str = ..., sorter=...): ...
|
||||
def factorize(self, use_na_sentinel: bool = True) -> tuple[np_1darray, Self]: ...
|
||||
def repeat(self, repeats, axis=...): ...
|
||||
def take(
|
||||
self,
|
||||
indexer: TakeIndexer,
|
||||
*,
|
||||
allow_fill: bool = ...,
|
||||
fill_value=...,
|
||||
) -> Self: ...
|
||||
def copy(self) -> Self: ...
|
||||
def view(self, dtype=...) -> Self | np_1darray: ...
|
||||
def ravel(self, order="C") -> Self: ...
|
||||
def tolist(self) -> list: ...
|
||||
def _reduce(
|
||||
self, name: str, *, skipna: bool = ..., keepdims: bool = ..., **kwargs
|
||||
) -> object: ...
|
||||
def _accumulate(self, name: str, *, skipna: bool = ..., **kwargs) -> Self: ...
|
||||
|
||||
class ExtensionOpsMixin:
|
||||
@classmethod
|
||||
def _add_arithmetic_ops(cls) -> None: ...
|
||||
@classmethod
|
||||
def _add_comparison_ops(cls) -> None: ...
|
||||
@classmethod
|
||||
def _add_logical_ops(cls) -> None: ...
|
||||
|
||||
class ExtensionScalarOpsMixin(ExtensionOpsMixin): ...
|
||||
@ -0,0 +1,25 @@
|
||||
import numpy as np
|
||||
from pandas.core.arrays.masked import BaseMaskedArray as BaseMaskedArray
|
||||
|
||||
from pandas._libs.missing import NAType
|
||||
from pandas._typing import type_t
|
||||
|
||||
from pandas.core.dtypes.base import ExtensionDtype as ExtensionDtype
|
||||
|
||||
class BooleanDtype(ExtensionDtype):
|
||||
@property
|
||||
def na_value(self) -> NAType: ...
|
||||
@classmethod
|
||||
def construct_array_type(cls) -> type_t[BooleanArray]: ...
|
||||
|
||||
class BooleanArray(BaseMaskedArray):
|
||||
def __init__(
|
||||
self, values: np.ndarray, mask: np.ndarray, copy: bool = ...
|
||||
) -> None: ...
|
||||
@property
|
||||
def dtype(self): ...
|
||||
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): ...
|
||||
def __setitem__(self, key, value) -> None: ...
|
||||
def astype(self, dtype, copy: bool = True): ...
|
||||
def any(self, *, skipna: bool = ..., **kwargs): ...
|
||||
def all(self, *, skipna: bool = ..., **kwargs): ...
|
||||
@ -0,0 +1,161 @@
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Sequence,
|
||||
)
|
||||
from typing import (
|
||||
Any,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import Series
|
||||
from pandas.core.accessor import PandasDelegate as PandasDelegate
|
||||
from pandas.core.arrays.base import ExtensionArray as ExtensionArray
|
||||
from pandas.core.base import NoNewAttributesMixin as NoNewAttributesMixin
|
||||
from pandas.core.indexes.base import Index
|
||||
from typing_extensions import Self
|
||||
|
||||
from pandas._typing import (
|
||||
ArrayLike,
|
||||
Dtype,
|
||||
ListLike,
|
||||
Ordered,
|
||||
PositionalIndexerTuple,
|
||||
Scalar,
|
||||
ScalarIndexer,
|
||||
SequenceIndexer,
|
||||
TakeIndexer,
|
||||
np_1darray,
|
||||
)
|
||||
|
||||
from pandas.core.dtypes.dtypes import CategoricalDtype as CategoricalDtype
|
||||
|
||||
def contains(cat, key, container): ...
|
||||
|
||||
class Categorical(ExtensionArray):
|
||||
__array_priority__: int = ...
|
||||
def __init__(
|
||||
self,
|
||||
values: ListLike,
|
||||
categories=...,
|
||||
ordered: bool | None = ...,
|
||||
dtype: CategoricalDtype | None = ...,
|
||||
fastpath: bool = ...,
|
||||
) -> None: ...
|
||||
@property
|
||||
def categories(self): ...
|
||||
@property
|
||||
def ordered(self) -> Ordered: ...
|
||||
@property
|
||||
def dtype(self) -> CategoricalDtype: ...
|
||||
def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: ...
|
||||
def size(self) -> int: ...
|
||||
def tolist(self) -> list[Scalar]: ...
|
||||
to_list = ...
|
||||
@classmethod
|
||||
def from_codes(
|
||||
cls,
|
||||
codes: Sequence[int],
|
||||
categories: Index | None = ...,
|
||||
ordered: bool | None = ...,
|
||||
dtype: CategoricalDtype | None = ...,
|
||||
fastpath: bool = ...,
|
||||
) -> Categorical: ...
|
||||
@property
|
||||
def codes(self) -> np_1darray[np.signedinteger]: ...
|
||||
def set_ordered(self, value) -> Categorical: ...
|
||||
def as_ordered(self) -> Categorical: ...
|
||||
def as_unordered(self) -> Categorical: ...
|
||||
def set_categories(
|
||||
self,
|
||||
new_categories,
|
||||
ordered: bool | None = False,
|
||||
rename: bool = False,
|
||||
) -> Categorical: ...
|
||||
def rename_categories(self, new_categories) -> Categorical: ...
|
||||
def reorder_categories(
|
||||
self, new_categories, ordered: bool | None = ...
|
||||
) -> Categorical: ...
|
||||
def add_categories(self, new_categories) -> Categorical: ...
|
||||
def remove_categories(self, removals) -> Categorical: ...
|
||||
def remove_unused_categories(self) -> Categorical: ...
|
||||
def map(self, mapper): ...
|
||||
def __eq__(self, other) -> bool: ...
|
||||
def __ne__(self, other) -> bool: ...
|
||||
def __lt__(self, other) -> bool: ...
|
||||
def __gt__(self, other) -> bool: ...
|
||||
def __le__(self, other) -> bool: ...
|
||||
def __ge__(self, other) -> bool: ...
|
||||
@property
|
||||
def shape(self): ...
|
||||
def shift(self, periods=1, fill_value=...): ...
|
||||
def __array__(self, dtype=...) -> np_1darray: ...
|
||||
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): ...
|
||||
@property
|
||||
def T(self): ...
|
||||
@property
|
||||
def nbytes(self) -> int: ...
|
||||
def memory_usage(self, deep: bool = ...): ...
|
||||
def searchsorted(self, value, side: str = ..., sorter=...): ...
|
||||
def isna(self) -> np_1darray[np.bool]: ...
|
||||
def isnull(self) -> np_1darray[np.bool]: ...
|
||||
def notna(self) -> np_1darray[np.bool]: ...
|
||||
def notnull(self) -> np_1darray[np.bool]: ...
|
||||
def dropna(self): ...
|
||||
def value_counts(self, dropna: bool = True): ...
|
||||
def check_for_ordered(self, op) -> None: ...
|
||||
def argsort(self, *, ascending: bool = ..., kind: str = ..., **kwargs): ...
|
||||
def sort_values(
|
||||
self, *, inplace: bool = ..., ascending: bool = ..., na_position: str = ...
|
||||
): ...
|
||||
def view(self, dtype=...): ...
|
||||
def fillna(self, value=..., method=None, limit=None): ...
|
||||
def take(
|
||||
self, indexer: TakeIndexer, *, allow_fill: bool = ..., fill_value=...
|
||||
) -> Categorical: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __iter__(self): ...
|
||||
def __contains__(self, key) -> bool: ...
|
||||
@overload
|
||||
def __getitem__(self, key: ScalarIndexer) -> Any: ...
|
||||
@overload
|
||||
def __getitem__(
|
||||
self,
|
||||
key: SequenceIndexer | PositionalIndexerTuple,
|
||||
) -> Self: ...
|
||||
def __setitem__(self, key, value) -> None: ...
|
||||
def min(self, *, skipna: bool = ...): ...
|
||||
def max(self, *, skipna: bool = ...): ...
|
||||
def unique(self): ...
|
||||
def equals(self, other): ...
|
||||
def describe(self): ...
|
||||
def repeat(self, repeats, axis=...): ...
|
||||
def isin(self, values): ...
|
||||
|
||||
class CategoricalAccessor(PandasDelegate, NoNewAttributesMixin):
|
||||
def __init__(self, data) -> None: ...
|
||||
@property
|
||||
def codes(self) -> Series[int]: ...
|
||||
@property
|
||||
def categories(self) -> Index: ...
|
||||
@property
|
||||
def ordered(self) -> bool | None: ...
|
||||
def rename_categories(
|
||||
self, new_categories: ListLike | dict[Any, Any] | Callable[[Any], Any]
|
||||
) -> Series: ...
|
||||
def reorder_categories(
|
||||
self,
|
||||
new_categories: ListLike,
|
||||
ordered: bool = ...,
|
||||
) -> Series: ...
|
||||
def add_categories(self, new_categories: Scalar | ListLike) -> Series: ...
|
||||
def remove_categories(self, removals: Scalar | ListLike) -> Series: ...
|
||||
def remove_unused_categories(self) -> Series: ...
|
||||
def set_categories(
|
||||
self,
|
||||
new_categories: ListLike,
|
||||
ordered: bool | None = False,
|
||||
rename: bool = False,
|
||||
) -> Series: ...
|
||||
def as_ordered(self) -> Series: ...
|
||||
def as_unordered(self) -> Series: ...
|
||||
@ -0,0 +1,114 @@
|
||||
from collections.abc import Sequence
|
||||
from typing import overload
|
||||
|
||||
import numpy as np
|
||||
from pandas.core.arrays.base import (
|
||||
ExtensionArray,
|
||||
ExtensionOpsMixin,
|
||||
)
|
||||
from typing_extensions import (
|
||||
Self,
|
||||
TypeAlias,
|
||||
)
|
||||
|
||||
from pandas._libs import (
|
||||
NaT as NaT,
|
||||
NaTType as NaTType,
|
||||
)
|
||||
from pandas._typing import (
|
||||
DatetimeLikeScalar,
|
||||
PositionalIndexerTuple,
|
||||
ScalarIndexer,
|
||||
SequenceIndexer,
|
||||
TimeAmbiguous,
|
||||
TimeNonexistent,
|
||||
TimeUnit,
|
||||
)
|
||||
|
||||
DTScalarOrNaT: TypeAlias = DatetimeLikeScalar | NaTType
|
||||
|
||||
class DatelikeOps:
|
||||
def strftime(self, date_format): ...
|
||||
|
||||
class TimelikeOps:
|
||||
@property
|
||||
def unit(self) -> TimeUnit: ...
|
||||
def as_unit(self, unit: TimeUnit) -> Self: ...
|
||||
def round(
|
||||
self,
|
||||
freq,
|
||||
ambiguous: TimeAmbiguous = "raise",
|
||||
nonexistent: TimeNonexistent = "raise",
|
||||
): ...
|
||||
def floor(
|
||||
self,
|
||||
freq,
|
||||
ambiguous: TimeAmbiguous = "raise",
|
||||
nonexistent: TimeNonexistent = "raise",
|
||||
): ...
|
||||
def ceil(
|
||||
self,
|
||||
freq,
|
||||
ambiguous: TimeAmbiguous = "raise",
|
||||
nonexistent: TimeNonexistent = "raise",
|
||||
): ...
|
||||
|
||||
class DatetimeLikeArrayMixin(ExtensionOpsMixin, ExtensionArray):
|
||||
@property
|
||||
def ndim(self) -> int: ...
|
||||
@property
|
||||
def shape(self): ...
|
||||
def reshape(self, *args, **kwargs): ...
|
||||
def ravel(self, *args, **kwargs): ... # pyrefly: ignore
|
||||
def __iter__(self): ...
|
||||
@property
|
||||
def asi8(self) -> np.ndarray: ...
|
||||
@property
|
||||
def nbytes(self): ...
|
||||
def __array__(self, dtype=...) -> np.ndarray: ...
|
||||
@property
|
||||
def size(self) -> int: ...
|
||||
def __len__(self) -> int: ...
|
||||
@overload
|
||||
def __getitem__(self, key: ScalarIndexer) -> DTScalarOrNaT: ...
|
||||
@overload
|
||||
def __getitem__(
|
||||
self,
|
||||
key: SequenceIndexer | PositionalIndexerTuple,
|
||||
) -> Self: ...
|
||||
def __setitem__( # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, key: int | Sequence[int] | Sequence[bool] | slice, value
|
||||
) -> None: ...
|
||||
def astype(self, dtype, copy: bool = True): ...
|
||||
def view(self, dtype=...): ...
|
||||
def unique(self): ...
|
||||
def copy(self): ...
|
||||
def shift(self, periods: int = 1, fill_value=..., axis: int = ...): ...
|
||||
def searchsorted(self, value, side: str = ..., sorter=...): ...
|
||||
def repeat(self, repeats, *args, **kwargs): ... # pyrefly: ignore
|
||||
def value_counts(self, dropna: bool = True): ...
|
||||
def map(self, mapper): ...
|
||||
def isna(self): ...
|
||||
def fillna(self, value=..., method=None, limit=None): ...
|
||||
@property
|
||||
def freq(self): ...
|
||||
@freq.setter
|
||||
def freq(self, value) -> None: ...
|
||||
@property
|
||||
def freqstr(self): ...
|
||||
@property
|
||||
def inferred_freq(self): ...
|
||||
@property
|
||||
def resolution(self): ...
|
||||
__pow__ = ...
|
||||
__rpow__ = ...
|
||||
__rmul__ = ...
|
||||
def __add__(self, other): ...
|
||||
def __radd__(self, other): ...
|
||||
def __sub__(self, other): ...
|
||||
def __rsub__(self, other): ...
|
||||
def __iadd__(self, other): ...
|
||||
def __isub__(self, other): ...
|
||||
def min(self, *, axis=..., skipna: bool = ..., **kwargs): ...
|
||||
def max(self, *, axis=..., skipna: bool = ..., **kwargs): ...
|
||||
def mean(self, *, skipna: bool = ...): ...
|
||||
@ -0,0 +1,85 @@
|
||||
from datetime import tzinfo as _tzinfo
|
||||
|
||||
import numpy as np
|
||||
from pandas.core.arrays.datetimelike import (
|
||||
DatelikeOps,
|
||||
DatetimeLikeArrayMixin,
|
||||
TimelikeOps,
|
||||
)
|
||||
|
||||
from pandas._typing import (
|
||||
TimeAmbiguous,
|
||||
TimeNonexistent,
|
||||
TimeZones,
|
||||
)
|
||||
|
||||
from pandas.core.dtypes.dtypes import DatetimeTZDtype as DatetimeTZDtype
|
||||
|
||||
class DatetimeArray(DatetimeLikeArrayMixin, TimelikeOps, DatelikeOps):
|
||||
__array_priority__: int = ...
|
||||
def __init__(self, values, dtype=..., freq=..., copy: bool = ...) -> None: ...
|
||||
# ignore in dtype() is from the pandas source
|
||||
@property
|
||||
def dtype(self) -> np.dtype | DatetimeTZDtype: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
@property
|
||||
def tz(self): ...
|
||||
@tz.setter
|
||||
def tz(self, value) -> None: ...
|
||||
@property
|
||||
def tzinfo(self) -> _tzinfo | None: ...
|
||||
@property
|
||||
def is_normalized(self): ...
|
||||
def __array__(self, dtype=...) -> np.ndarray: ...
|
||||
def __iter__(self): ...
|
||||
def astype(self, dtype, copy: bool = True): ...
|
||||
def tz_convert(self, tz: TimeZones): ...
|
||||
def tz_localize(
|
||||
self,
|
||||
tz: TimeZones,
|
||||
ambiguous: TimeAmbiguous = "raise",
|
||||
nonexistent: TimeNonexistent = "raise",
|
||||
): ...
|
||||
def to_pydatetime(self): ...
|
||||
def normalize(self): ...
|
||||
def to_period(self, freq=...): ...
|
||||
def to_perioddelta(self, freq): ...
|
||||
def month_name(self, locale=...): ...
|
||||
def day_name(self, locale=...): ...
|
||||
@property
|
||||
def time(self): ...
|
||||
@property
|
||||
def timetz(self): ...
|
||||
@property
|
||||
def date(self): ...
|
||||
year = ...
|
||||
month = ...
|
||||
day = ...
|
||||
hour = ...
|
||||
minute = ...
|
||||
second = ...
|
||||
microsecond = ...
|
||||
nanosecond = ...
|
||||
dayofweek = ...
|
||||
weekday = ...
|
||||
dayofyear = ...
|
||||
quarter = ...
|
||||
days_in_month = ...
|
||||
daysinmonth = ...
|
||||
is_month_start = ...
|
||||
is_month_end = ...
|
||||
is_quarter_start = ...
|
||||
is_quarter_end = ...
|
||||
is_year_start = ...
|
||||
is_year_end = ...
|
||||
is_leap_year = ...
|
||||
def to_julian_date(self): ...
|
||||
|
||||
def objects_to_datetime64ns(
|
||||
data,
|
||||
dayfirst,
|
||||
yearfirst,
|
||||
utc: bool = ...,
|
||||
errors: str = ...,
|
||||
require_iso8601: bool = ...,
|
||||
allow_object: bool = ...,
|
||||
): ...
|
||||
@ -0,0 +1,4 @@
|
||||
from pandas.core.arrays.numeric import NumericDtype
|
||||
|
||||
class Float32Dtype(NumericDtype): ...
|
||||
class Float64Dtype(NumericDtype): ...
|
||||
@ -0,0 +1,31 @@
|
||||
from pandas.core.arrays.masked import BaseMaskedArray
|
||||
|
||||
from pandas._libs.missing import NAType
|
||||
|
||||
from pandas.core.dtypes.base import ExtensionDtype as ExtensionDtype
|
||||
|
||||
class _IntegerDtype(ExtensionDtype):
|
||||
base: None
|
||||
@property
|
||||
def na_value(self) -> NAType: ...
|
||||
@property
|
||||
def itemsize(self) -> int: ...
|
||||
@classmethod
|
||||
def construct_array_type(cls) -> type[IntegerArray]: ...
|
||||
|
||||
class IntegerArray(BaseMaskedArray):
|
||||
@property
|
||||
def dtype(self) -> _IntegerDtype: ...
|
||||
def __init__(self, values, mask, copy: bool = ...) -> None: ...
|
||||
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): ...
|
||||
def __setitem__(self, key, value) -> None: ...
|
||||
def astype(self, dtype, copy: bool = True): ...
|
||||
|
||||
class Int8Dtype(_IntegerDtype): ...
|
||||
class Int16Dtype(_IntegerDtype): ...
|
||||
class Int32Dtype(_IntegerDtype): ...
|
||||
class Int64Dtype(_IntegerDtype): ...
|
||||
class UInt8Dtype(_IntegerDtype): ...
|
||||
class UInt16Dtype(_IntegerDtype): ...
|
||||
class UInt32Dtype(_IntegerDtype): ...
|
||||
class UInt64Dtype(_IntegerDtype): ...
|
||||
@ -0,0 +1,112 @@
|
||||
from typing import overload
|
||||
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
Index,
|
||||
Series,
|
||||
)
|
||||
from pandas.core.arrays.base import ExtensionArray as ExtensionArray
|
||||
from typing_extensions import (
|
||||
Self,
|
||||
TypeAlias,
|
||||
)
|
||||
|
||||
from pandas._libs.interval import (
|
||||
Interval as Interval,
|
||||
IntervalMixin as IntervalMixin,
|
||||
)
|
||||
from pandas._typing import (
|
||||
Axis,
|
||||
Scalar,
|
||||
ScalarIndexer,
|
||||
SequenceIndexer,
|
||||
TakeIndexer,
|
||||
np_1darray,
|
||||
)
|
||||
|
||||
IntervalOrNA: TypeAlias = Interval | float
|
||||
|
||||
class IntervalArray(IntervalMixin, ExtensionArray):
|
||||
can_hold_na: bool = ...
|
||||
def __new__(
|
||||
cls, data, closed=..., dtype=..., copy: bool = ..., verify_integrity: bool = ...
|
||||
): ...
|
||||
@classmethod
|
||||
def from_breaks(
|
||||
cls,
|
||||
breaks,
|
||||
closed: str = "right",
|
||||
copy: bool = False,
|
||||
dtype=None,
|
||||
): ...
|
||||
@classmethod
|
||||
def from_arrays(
|
||||
cls,
|
||||
left,
|
||||
right,
|
||||
closed: str = "right",
|
||||
copy: bool = False,
|
||||
dtype=...,
|
||||
): ...
|
||||
@classmethod
|
||||
def from_tuples(
|
||||
cls,
|
||||
data,
|
||||
closed: str = "right",
|
||||
copy: bool = False,
|
||||
dtype=None,
|
||||
): ...
|
||||
def __iter__(self): ...
|
||||
def __len__(self) -> int: ...
|
||||
@overload
|
||||
def __getitem__(self, key: ScalarIndexer) -> IntervalOrNA: ...
|
||||
@overload
|
||||
def __getitem__(self, key: SequenceIndexer) -> Self: ...
|
||||
def __setitem__(self, key, value) -> None: ...
|
||||
def __eq__(self, other): ...
|
||||
def __ne__(self, other): ...
|
||||
def fillna(self, value=..., method=None, limit=None): ...
|
||||
@property
|
||||
def dtype(self): ...
|
||||
def astype(self, dtype, copy: bool = True): ...
|
||||
def copy(self): ...
|
||||
def isna(self): ...
|
||||
@property
|
||||
def nbytes(self) -> int: ...
|
||||
@property
|
||||
def size(self) -> int: ...
|
||||
def shift(self, periods: int = 1, fill_value: object = ...) -> IntervalArray: ...
|
||||
def take( # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self: Self,
|
||||
indices: TakeIndexer,
|
||||
*,
|
||||
allow_fill: bool = ...,
|
||||
fill_value=...,
|
||||
axis=...,
|
||||
**kwargs,
|
||||
) -> Self: ...
|
||||
def value_counts(self, dropna: bool = True): ...
|
||||
@property
|
||||
def left(self) -> Index: ...
|
||||
@property
|
||||
def right(self) -> Index: ...
|
||||
@property
|
||||
def closed(self) -> bool: ...
|
||||
def set_closed(self, closed): ...
|
||||
@property
|
||||
def length(self) -> Index: ...
|
||||
@property
|
||||
def mid(self) -> Index: ...
|
||||
@property
|
||||
def is_non_overlapping_monotonic(self) -> bool: ...
|
||||
def __array__(self, dtype=...) -> np_1darray: ...
|
||||
def __arrow_array__(self, type=...): ...
|
||||
def to_tuples(self, na_tuple: bool = True): ...
|
||||
def repeat(self, repeats, axis: Axis | None = ...): ...
|
||||
@overload
|
||||
def contains(self, other: Series) -> Series[bool]: ...
|
||||
@overload
|
||||
def contains(
|
||||
self, other: Scalar | ExtensionArray | Index | np.ndarray
|
||||
) -> np_1darray[np.bool]: ...
|
||||
def overlaps(self, other: Interval) -> bool: ...
|
||||
@ -0,0 +1,41 @@
|
||||
from typing import (
|
||||
Any,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas.core.arrays import (
|
||||
ExtensionArray as ExtensionArray,
|
||||
ExtensionOpsMixin,
|
||||
)
|
||||
from typing_extensions import Self
|
||||
|
||||
from pandas._typing import (
|
||||
Scalar,
|
||||
ScalarIndexer,
|
||||
SequenceIndexer,
|
||||
npt,
|
||||
)
|
||||
|
||||
class BaseMaskedArray(ExtensionArray, ExtensionOpsMixin):
|
||||
@overload
|
||||
def __getitem__(self, item: ScalarIndexer) -> Any: ...
|
||||
@overload
|
||||
def __getitem__(self, item: SequenceIndexer) -> Self: ...
|
||||
def __iter__(self): ...
|
||||
def __len__(self) -> int: ...
|
||||
def __invert__(self): ...
|
||||
def to_numpy(
|
||||
self,
|
||||
dtype: npt.DTypeLike | None = ...,
|
||||
copy: bool = False,
|
||||
na_value: Scalar = ...,
|
||||
) -> np.ndarray: ...
|
||||
__array_priority__: int = ...
|
||||
def __array__(self, dtype=...) -> np.ndarray: ...
|
||||
def __arrow_array__(self, type=...): ...
|
||||
def isna(self): ...
|
||||
@property
|
||||
def nbytes(self) -> int: ...
|
||||
def copy(self): ...
|
||||
def value_counts(self, dropna: bool = True): ...
|
||||
@ -0,0 +1,3 @@
|
||||
from pandas.core.dtypes.dtypes import BaseMaskedDtype
|
||||
|
||||
class NumericDtype(BaseMaskedDtype): ...
|
||||
@ -0,0 +1,17 @@
|
||||
import numpy as np
|
||||
from numpy.lib.mixins import NDArrayOperatorsMixin
|
||||
from pandas.core.arrays.base import (
|
||||
ExtensionArray,
|
||||
ExtensionOpsMixin,
|
||||
)
|
||||
|
||||
from pandas.core.dtypes.dtypes import ExtensionDtype
|
||||
|
||||
class PandasDtype(ExtensionDtype):
|
||||
@property
|
||||
def numpy_dtype(self) -> np.dtype: ...
|
||||
@property
|
||||
def itemsize(self) -> int: ...
|
||||
|
||||
class PandasArray(ExtensionArray, ExtensionOpsMixin, NDArrayOperatorsMixin):
|
||||
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): ...
|
||||
@ -0,0 +1,42 @@
|
||||
import numpy as np
|
||||
from pandas import PeriodDtype
|
||||
from pandas.core.arrays.datetimelike import (
|
||||
DatelikeOps,
|
||||
DatetimeLikeArrayMixin,
|
||||
)
|
||||
|
||||
from pandas._libs.tslibs import Timestamp
|
||||
from pandas._libs.tslibs.period import Period
|
||||
|
||||
class PeriodArray(DatetimeLikeArrayMixin, DatelikeOps):
|
||||
__array_priority__: int = ...
|
||||
def __init__(self, values, freq=..., dtype=..., copy: bool = ...) -> None: ...
|
||||
@property
|
||||
def dtype(self) -> PeriodDtype: ...
|
||||
def __array__(self, dtype=...) -> np.ndarray: ...
|
||||
def __arrow_array__(self, type=...): ...
|
||||
year: int = ...
|
||||
month: int = ...
|
||||
day: int = ...
|
||||
hour: int = ...
|
||||
minute: int = ...
|
||||
second: int = ...
|
||||
weekofyear: int = ...
|
||||
week: int = ...
|
||||
dayofweek: int = ...
|
||||
weekday: int = ...
|
||||
dayofyear: int = ...
|
||||
day_of_year = ...
|
||||
quarter: int = ...
|
||||
qyear: int = ...
|
||||
days_in_month: int = ...
|
||||
daysinmonth: int = ...
|
||||
@property
|
||||
def is_leap_year(self) -> bool: ...
|
||||
@property
|
||||
def start_time(self) -> Timestamp: ...
|
||||
@property
|
||||
def end_time(self) -> Timestamp: ...
|
||||
def to_timestamp(self, freq: str | None = ..., how: str = ...) -> Timestamp: ...
|
||||
def asfreq(self, freq: str | None = ..., how: str = "E") -> Period: ...
|
||||
def astype(self, dtype, copy: bool = True): ...
|
||||
@ -0,0 +1,6 @@
|
||||
from pandas.core.arrays.sparse.accessor import (
|
||||
SparseAccessor as SparseAccessor,
|
||||
SparseFrameAccessor as SparseFrameAccessor,
|
||||
)
|
||||
from pandas.core.arrays.sparse.array import SparseArray as SparseArray
|
||||
from pandas.core.arrays.sparse.dtype import SparseDtype as SparseDtype
|
||||
@ -0,0 +1,19 @@
|
||||
from pandas import Series
|
||||
from pandas.core.accessor import PandasDelegate
|
||||
|
||||
class BaseAccessor:
|
||||
def __init__(self, data=...) -> None: ...
|
||||
|
||||
class SparseAccessor(BaseAccessor, PandasDelegate):
|
||||
@classmethod
|
||||
def from_coo(cls, A, dense_index: bool = False) -> Series: ...
|
||||
def to_coo(self, row_levels=..., column_levels=..., sort_labels: bool = False): ...
|
||||
def to_dense(self): ...
|
||||
|
||||
class SparseFrameAccessor(BaseAccessor, PandasDelegate):
|
||||
@classmethod
|
||||
def from_spmatrix(cls, data, index=..., columns=...): ...
|
||||
def to_dense(self): ...
|
||||
def to_coo(self): ...
|
||||
@property
|
||||
def density(self) -> float: ...
|
||||
@ -0,0 +1,82 @@
|
||||
from enum import Enum
|
||||
from typing import (
|
||||
Any,
|
||||
final,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas.core.arrays import (
|
||||
ExtensionArray,
|
||||
ExtensionOpsMixin,
|
||||
)
|
||||
from typing_extensions import Self
|
||||
|
||||
from pandas._typing import (
|
||||
ScalarIndexer,
|
||||
SequenceIndexer,
|
||||
)
|
||||
|
||||
@final
|
||||
class ellipsis(Enum):
|
||||
Ellipsis = "..."
|
||||
|
||||
class SparseArray(ExtensionArray, ExtensionOpsMixin):
|
||||
def __init__(
|
||||
self,
|
||||
data,
|
||||
sparse_index=...,
|
||||
fill_value=...,
|
||||
kind: str = ...,
|
||||
dtype=...,
|
||||
copy: bool = ...,
|
||||
) -> None: ...
|
||||
@classmethod
|
||||
def from_spmatrix(cls, data): ...
|
||||
def __array__(self, dtype=..., copy=...) -> np.ndarray: ...
|
||||
def __setitem__(self, key, value) -> None: ...
|
||||
@property
|
||||
def sp_index(self): ...
|
||||
@property
|
||||
def sp_values(self): ...
|
||||
@property
|
||||
def dtype(self): ...
|
||||
@property
|
||||
def fill_value(self): ...
|
||||
@fill_value.setter
|
||||
def fill_value(self, value) -> None: ...
|
||||
@property
|
||||
def kind(self) -> str: ...
|
||||
def __len__(self) -> int: ...
|
||||
@property
|
||||
def nbytes(self) -> int: ...
|
||||
@property
|
||||
def density(self): ...
|
||||
@property
|
||||
def npoints(self) -> int: ...
|
||||
def isna(self): ...
|
||||
def fillna(self, value=..., method=..., limit=...): ...
|
||||
def shift(self, periods: int = 1, fill_value=...): ...
|
||||
def unique(self): ...
|
||||
def value_counts(self, dropna: bool = True): ...
|
||||
@overload
|
||||
def __getitem__(self, key: ScalarIndexer) -> Any: ...
|
||||
@overload
|
||||
def __getitem__(
|
||||
self,
|
||||
key: SequenceIndexer | tuple[int | ellipsis, ...],
|
||||
) -> Self: ...
|
||||
def copy(self): ...
|
||||
def astype(self, dtype=..., copy: bool = True): ...
|
||||
def map(self, mapper): ...
|
||||
def to_dense(self): ...
|
||||
def nonzero(self): ...
|
||||
def all(self, axis=..., *args, **kwargs): ...
|
||||
def any(self, axis: int = ..., *args, **kwargs): ...
|
||||
def sum(self, axis: int = 0, *args, **kwargs): ...
|
||||
def cumsum(self, axis: int = ..., *args, **kwargs): ...
|
||||
def mean(self, axis: int = ..., *args, **kwargs): ...
|
||||
@property
|
||||
def T(self): ...
|
||||
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): ...
|
||||
def __abs__(self): ...
|
||||
@ -0,0 +1,17 @@
|
||||
from pandas._typing import (
|
||||
Dtype,
|
||||
Scalar,
|
||||
npt,
|
||||
)
|
||||
|
||||
from pandas.core.dtypes.base import ExtensionDtype
|
||||
from pandas.core.dtypes.dtypes import (
|
||||
register_extension_dtype as register_extension_dtype,
|
||||
)
|
||||
|
||||
class SparseDtype(ExtensionDtype):
|
||||
def __init__(
|
||||
self, dtype: Dtype | npt.DTypeLike = ..., fill_value: Scalar | None = ...
|
||||
) -> None: ...
|
||||
@property
|
||||
def fill_value(self) -> Scalar | None: ...
|
||||
@ -0,0 +1,20 @@
|
||||
from typing import Literal
|
||||
|
||||
from pandas.core.arrays import PandasArray
|
||||
|
||||
from pandas._libs.missing import NAType
|
||||
|
||||
from pandas.core.dtypes.base import ExtensionDtype
|
||||
|
||||
class StringDtype(ExtensionDtype):
|
||||
def __init__(self, storage: Literal["python", "pyarrow"] | None = None) -> None: ...
|
||||
@property
|
||||
def na_value(self) -> NAType: ...
|
||||
|
||||
class StringArray(PandasArray):
|
||||
def __init__(self, values, copy: bool = ...) -> None: ...
|
||||
def __arrow_array__(self, type=...): ...
|
||||
def __setitem__(self, key, value) -> None: ...
|
||||
def fillna(self, value=..., method=None, limit=None): ...
|
||||
def astype(self, dtype, copy: bool = True): ...
|
||||
def value_counts(self, dropna: bool = True): ...
|
||||
@ -0,0 +1,65 @@
|
||||
from collections.abc import Sequence
|
||||
from datetime import timedelta
|
||||
|
||||
from pandas.core.arrays.datetimelike import (
|
||||
DatetimeLikeArrayMixin,
|
||||
TimelikeOps,
|
||||
)
|
||||
|
||||
class TimedeltaArray(DatetimeLikeArrayMixin, TimelikeOps):
|
||||
__array_priority__: int = ...
|
||||
@property
|
||||
def dtype(self): ...
|
||||
def __init__(self, values, dtype=..., freq=..., copy: bool = ...) -> None: ...
|
||||
def astype(self, dtype, copy: bool = True): ...
|
||||
def sum(
|
||||
self,
|
||||
*,
|
||||
axis=...,
|
||||
dtype=...,
|
||||
out=...,
|
||||
keepdims: bool = ...,
|
||||
initial=...,
|
||||
skipna: bool = ...,
|
||||
min_count: int = ...,
|
||||
): ...
|
||||
def std(
|
||||
self,
|
||||
*,
|
||||
axis=...,
|
||||
dtype=...,
|
||||
out=...,
|
||||
ddof: int = ...,
|
||||
keepdims: bool = ...,
|
||||
skipna: bool = ...,
|
||||
): ...
|
||||
def median(
|
||||
self,
|
||||
*,
|
||||
axis=...,
|
||||
out=...,
|
||||
overwrite_input: bool = ...,
|
||||
keepdims: bool = ...,
|
||||
skipna: bool = ...,
|
||||
): ...
|
||||
def __mul__(self, other): ...
|
||||
__rmul__ = ...
|
||||
def __truediv__(self, other): ...
|
||||
def __rtruediv__(self, other): ...
|
||||
def __floordiv__(self, other): ...
|
||||
def __rfloordiv__(self, other): ...
|
||||
def __mod__(self, other): ...
|
||||
def __rmod__(self, other): ...
|
||||
def __divmod__(self, other): ...
|
||||
def __rdivmod__(self, other): ...
|
||||
def __neg__(self): ...
|
||||
def __pos__(self): ...
|
||||
def __abs__(self): ...
|
||||
def total_seconds(self) -> int: ...
|
||||
def to_pytimedelta(self) -> Sequence[timedelta]: ...
|
||||
days: int = ...
|
||||
seconds: int = ...
|
||||
microseconds: int = ...
|
||||
nanoseconds: int = ...
|
||||
@property
|
||||
def components(self) -> int: ...
|
||||
142
lib/python3.11/site-packages/pandas-stubs/core/base.pyi
Normal file
142
lib/python3.11/site-packages/pandas-stubs/core/base.pyi
Normal file
@ -0,0 +1,142 @@
|
||||
from collections.abc import (
|
||||
Hashable,
|
||||
Iterator,
|
||||
)
|
||||
from typing import (
|
||||
Any,
|
||||
Generic,
|
||||
Literal,
|
||||
final,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
Index,
|
||||
Series,
|
||||
)
|
||||
from pandas.core.arraylike import OpsMixin
|
||||
from pandas.core.arrays import ExtensionArray
|
||||
from pandas.core.arrays.categorical import Categorical
|
||||
from typing_extensions import Self
|
||||
|
||||
from pandas._typing import (
|
||||
S1,
|
||||
AxisIndex,
|
||||
DropKeep,
|
||||
DTypeLike,
|
||||
GenericT,
|
||||
GenericT_co,
|
||||
NDFrameT,
|
||||
Scalar,
|
||||
SupportsDType,
|
||||
np_1darray,
|
||||
)
|
||||
from pandas.util._decorators import cache_readonly
|
||||
|
||||
class NoNewAttributesMixin:
|
||||
def __setattr__(self, key: str, value: Any) -> None: ...
|
||||
|
||||
class SelectionMixin(Generic[NDFrameT]):
|
||||
obj: NDFrameT
|
||||
exclusions: frozenset[Hashable]
|
||||
@final
|
||||
@cache_readonly
|
||||
def ndim(self) -> int: ...
|
||||
def __getitem__(self, key): ...
|
||||
def aggregate(self, func, *args, **kwargs): ...
|
||||
|
||||
class IndexOpsMixin(OpsMixin, Generic[S1, GenericT_co]):
|
||||
__array_priority__: int = ...
|
||||
@property
|
||||
def T(self) -> Self: ...
|
||||
@property
|
||||
def shape(self) -> tuple: ...
|
||||
@property
|
||||
def ndim(self) -> int: ...
|
||||
def item(self) -> S1: ...
|
||||
@property
|
||||
def nbytes(self) -> int: ...
|
||||
@property
|
||||
def size(self) -> int: ...
|
||||
@property
|
||||
def array(self) -> ExtensionArray: ...
|
||||
@overload
|
||||
def to_numpy(
|
||||
self,
|
||||
dtype: None = None,
|
||||
copy: bool = False,
|
||||
na_value: Scalar = ...,
|
||||
**kwargs,
|
||||
) -> np_1darray[GenericT_co]: ...
|
||||
@overload
|
||||
def to_numpy(
|
||||
self,
|
||||
dtype: np.dtype[GenericT] | SupportsDType[GenericT] | type[GenericT],
|
||||
copy: bool = False,
|
||||
na_value: Scalar = ...,
|
||||
**kwargs,
|
||||
) -> np_1darray[GenericT]: ...
|
||||
@overload
|
||||
def to_numpy(
|
||||
self,
|
||||
dtype: DTypeLike,
|
||||
copy: bool = False,
|
||||
na_value: Scalar = ...,
|
||||
**kwargs,
|
||||
) -> np_1darray: ...
|
||||
@property
|
||||
def empty(self) -> bool: ...
|
||||
def max(self, axis=..., skipna: bool = ..., **kwargs): ...
|
||||
def min(self, axis=..., skipna: bool = ..., **kwargs): ...
|
||||
def argmax(
|
||||
self,
|
||||
axis: AxisIndex | None = ...,
|
||||
skipna: bool = True,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> np.int64: ...
|
||||
def argmin(
|
||||
self,
|
||||
axis: AxisIndex | None = ...,
|
||||
skipna: bool = True,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> np.int64: ...
|
||||
def tolist(self) -> list[S1]: ...
|
||||
def to_list(self) -> list[S1]: ...
|
||||
def __iter__(self) -> Iterator[S1]: ...
|
||||
@property
|
||||
def hasnans(self) -> bool: ...
|
||||
@overload
|
||||
def value_counts(
|
||||
self,
|
||||
normalize: Literal[False] = ...,
|
||||
sort: bool = ...,
|
||||
ascending: bool = ...,
|
||||
bins=...,
|
||||
dropna: bool = ...,
|
||||
) -> Series[int]: ...
|
||||
@overload
|
||||
def value_counts(
|
||||
self,
|
||||
normalize: Literal[True],
|
||||
sort: bool = ...,
|
||||
ascending: bool = ...,
|
||||
bins=...,
|
||||
dropna: bool = ...,
|
||||
) -> Series[float]: ...
|
||||
def nunique(self, dropna: bool = True) -> int: ...
|
||||
@property
|
||||
def is_unique(self) -> bool: ...
|
||||
@property
|
||||
def is_monotonic_decreasing(self) -> bool: ...
|
||||
@property
|
||||
def is_monotonic_increasing(self) -> bool: ...
|
||||
def factorize(
|
||||
self, sort: bool = False, use_na_sentinel: bool = True
|
||||
) -> tuple[np_1darray, np_1darray | Index | Categorical]: ...
|
||||
def searchsorted(
|
||||
self, value, side: Literal["left", "right"] = ..., sorter=...
|
||||
) -> int | list[int]: ...
|
||||
def drop_duplicates(self, *, keep: DropKeep = ...) -> Self: ...
|
||||
@ -0,0 +1 @@
|
||||
from pandas.core.computation.eval import eval as eval
|
||||
@ -0,0 +1,17 @@
|
||||
import abc
|
||||
|
||||
class AbstractEngine(metaclass=abc.ABCMeta):
|
||||
has_neg_frac: bool = ...
|
||||
expr = ...
|
||||
aligned_axes = ...
|
||||
result_type = ...
|
||||
def __init__(self, expr) -> None: ...
|
||||
def convert(self) -> str: ...
|
||||
def evaluate(self) -> object: ...
|
||||
|
||||
class NumExprEngine(AbstractEngine):
|
||||
has_neg_frac: bool = ...
|
||||
|
||||
class PythonEngine(AbstractEngine):
|
||||
has_neg_frac: bool = ...
|
||||
def evaluate(self): ...
|
||||
@ -0,0 +1,28 @@
|
||||
from collections.abc import Mapping
|
||||
from typing import (
|
||||
Any,
|
||||
Literal,
|
||||
)
|
||||
|
||||
from pandas import (
|
||||
DataFrame,
|
||||
Series,
|
||||
)
|
||||
from pandas.core.computation.ops import BinOp
|
||||
|
||||
from pandas._typing import (
|
||||
Scalar,
|
||||
npt,
|
||||
)
|
||||
|
||||
def eval(
|
||||
expr: str | BinOp,
|
||||
parser: Literal["pandas", "python"] = "pandas",
|
||||
engine: Literal["python", "numexpr"] | None = ...,
|
||||
local_dict: dict[str, Any] | None = None,
|
||||
global_dict: dict[str, Any] | None = None,
|
||||
resolvers: list[Mapping] | None = ...,
|
||||
level: int = 0,
|
||||
target: object | None = None,
|
||||
inplace: bool = False,
|
||||
) -> npt.NDArray | Scalar | DataFrame | Series | None: ...
|
||||
@ -0,0 +1,64 @@
|
||||
import ast
|
||||
|
||||
from pandas.core.computation.ops import Term as Term
|
||||
from pandas.core.computation.scope import Scope as Scope
|
||||
|
||||
class BaseExprVisitor(ast.NodeVisitor):
|
||||
const_type = ...
|
||||
term_type = ...
|
||||
binary_ops = ...
|
||||
binary_op_nodes = ...
|
||||
binary_op_nodes_map = ...
|
||||
unary_ops = ...
|
||||
unary_op_nodes = ...
|
||||
unary_op_nodes_map = ...
|
||||
rewrite_map = ...
|
||||
env = ...
|
||||
engine = ...
|
||||
parser = ...
|
||||
preparser = ...
|
||||
assigner = ...
|
||||
def __init__(self, env, engine, parser, preparser=...) -> None: ...
|
||||
def visit(self, node, **kwargs): ...
|
||||
def visit_Module(self, node, **kwargs): ...
|
||||
def visit_Expr(self, node, **kwargs): ...
|
||||
def visit_BinOp(self, node, **kwargs): ...
|
||||
def visit_Div(self, node, **kwargs): ...
|
||||
def visit_UnaryOp(self, node, **kwargs): ...
|
||||
def visit_Name(self, node, **kwargs): ...
|
||||
def visit_NameConstant(self, node, **kwargs): ...
|
||||
def visit_Num(self, node, **kwargs): ...
|
||||
def visit_Constant(self, node, **kwargs): ...
|
||||
def visit_Str(self, node, **kwargs): ...
|
||||
def visit_List(self, node, **kwargs): ...
|
||||
def visit_Index(self, node, **kwargs): ...
|
||||
def visit_Subscript(self, node, **kwargs): ...
|
||||
def visit_Slice(self, node, **kwargs): ...
|
||||
def visit_Assign(self, node, **kwargs): ...
|
||||
def visit_Attribute(self, node, **kwargs): ...
|
||||
def visit_Call(self, node, side=..., **kwargs): ...
|
||||
def translate_In(self, op): ...
|
||||
def visit_Compare(self, node, **kwargs): ...
|
||||
def visit_BoolOp(self, node, **kwargs): ...
|
||||
|
||||
class Expr:
|
||||
env: Scope
|
||||
engine: str
|
||||
parser: str
|
||||
expr = ...
|
||||
terms = ...
|
||||
def __init__(
|
||||
self,
|
||||
expr,
|
||||
engine: str = ...,
|
||||
parser: str = ...,
|
||||
env: Scope | None = ...,
|
||||
level: int = ...,
|
||||
) -> None: ...
|
||||
@property
|
||||
def assigner(self): ...
|
||||
def __call__(self): ...
|
||||
def __len__(self) -> int: ...
|
||||
def parse(self): ...
|
||||
@property
|
||||
def names(self): ...
|
||||
@ -0,0 +1,88 @@
|
||||
import numpy as np
|
||||
|
||||
class UndefinedVariableError(NameError):
|
||||
def __init__(self, name, is_local: bool = ...) -> None: ...
|
||||
|
||||
class Term:
|
||||
def __new__(cls, name, env, side=..., encoding=...): ...
|
||||
is_local: bool
|
||||
env = ...
|
||||
side = ...
|
||||
encoding = ...
|
||||
def __init__(self, name, env, side=..., encoding=...) -> None: ...
|
||||
@property
|
||||
def local_name(self) -> str: ...
|
||||
def __call__(self, *args, **kwargs): ...
|
||||
def evaluate(self, *args, **kwargs): ...
|
||||
def update(self, value) -> None: ...
|
||||
@property
|
||||
def is_scalar(self) -> bool: ...
|
||||
@property
|
||||
def type(self): ...
|
||||
return_type = ...
|
||||
@property
|
||||
def raw(self) -> str: ...
|
||||
@property
|
||||
def is_datetime(self) -> bool: ...
|
||||
@property
|
||||
def value(self): ...
|
||||
@value.setter
|
||||
def value(self, new_value) -> None: ...
|
||||
@property
|
||||
def name(self): ...
|
||||
@property
|
||||
def ndim(self) -> int: ...
|
||||
|
||||
class Constant(Term):
|
||||
@property
|
||||
def name(self): ...
|
||||
|
||||
class Op:
|
||||
op: str
|
||||
operands = ...
|
||||
encoding = ...
|
||||
def __init__(self, op: str, operands, *args, **kwargs) -> None: ...
|
||||
def __iter__(self): ...
|
||||
@property
|
||||
def return_type(self): ...
|
||||
@property
|
||||
def has_invalid_return_type(self) -> bool: ...
|
||||
@property
|
||||
def operand_types(self): ...
|
||||
@property
|
||||
def is_scalar(self) -> bool: ...
|
||||
@property
|
||||
def is_datetime(self) -> bool: ...
|
||||
|
||||
class BinOp(Op):
|
||||
lhs = ...
|
||||
rhs = ...
|
||||
func = ...
|
||||
def __init__(self, op: str, lhs, rhs, **kwargs) -> None: ...
|
||||
def __call__(self, env): ...
|
||||
def evaluate(self, env, engine: str, parser, term_type, eval_in_python): ...
|
||||
def convert_values(self): ...
|
||||
|
||||
def isnumeric(dtype) -> bool: ...
|
||||
|
||||
class Div(BinOp):
|
||||
def __init__(self, lhs, rhs, **kwargs) -> None: ...
|
||||
|
||||
class UnaryOp(Op):
|
||||
operand = ...
|
||||
func = ...
|
||||
def __init__(self, op: str, operand) -> None: ...
|
||||
def __call__(self, env): ...
|
||||
@property
|
||||
def return_type(self) -> np.dtype: ...
|
||||
|
||||
class MathCall(Op):
|
||||
func = ...
|
||||
def __init__(self, func, args) -> None: ...
|
||||
def __call__(self, env): ...
|
||||
|
||||
class FuncNode:
|
||||
name = ...
|
||||
func = ...
|
||||
def __init__(self, name: str) -> None: ...
|
||||
def __call__(self, *args): ...
|
||||
@ -0,0 +1,108 @@
|
||||
from typing import Any
|
||||
|
||||
from pandas.core.computation import (
|
||||
expr as expr,
|
||||
ops as ops,
|
||||
scope as _scope,
|
||||
)
|
||||
from pandas.core.computation.expr import BaseExprVisitor as BaseExprVisitor
|
||||
from pandas.core.indexes.base import Index
|
||||
|
||||
class PyTablesScope(_scope.Scope):
|
||||
queryables: dict[str, Any]
|
||||
def __init__(
|
||||
self,
|
||||
level: int,
|
||||
global_dict=...,
|
||||
local_dict=...,
|
||||
queryables: dict[str, Any] | None = ...,
|
||||
) -> None: ...
|
||||
|
||||
class Term(ops.Term):
|
||||
env = ...
|
||||
def __new__(cls, name, env, side=..., encoding=...): ...
|
||||
def __init__(self, name, env: PyTablesScope, side=..., encoding=...) -> None: ...
|
||||
@property
|
||||
def value(self): ...
|
||||
@value.setter
|
||||
def value(self, new_value) -> None: ...
|
||||
|
||||
class Constant(Term):
|
||||
def __init__(self, name, env: PyTablesScope, side=..., encoding=...) -> None: ...
|
||||
|
||||
class BinOp(ops.BinOp):
|
||||
op: str
|
||||
queryables: dict[str, Any]
|
||||
encoding = ...
|
||||
condition = ...
|
||||
def __init__(
|
||||
self, op: str, lhs, rhs, queryables: dict[str, Any], encoding
|
||||
) -> None: ...
|
||||
def prune(self, klass): ...
|
||||
def conform(self, rhs): ...
|
||||
@property
|
||||
def is_valid(self) -> bool: ...
|
||||
@property
|
||||
def is_in_table(self) -> bool: ...
|
||||
@property
|
||||
def kind(self): ...
|
||||
@property
|
||||
def meta(self): ...
|
||||
@property
|
||||
def metadata(self): ...
|
||||
def generate(self, v) -> str: ...
|
||||
def convert_value(self, v) -> TermValue: ...
|
||||
def convert_values(self) -> None: ...
|
||||
|
||||
class FilterBinOp(BinOp):
|
||||
filter: tuple[Any, Any, Index] | None = ...
|
||||
def invert(self): ...
|
||||
def format(self): ...
|
||||
def generate_filter_op(self, invert: bool = ...): ...
|
||||
|
||||
class JointFilterBinOp(FilterBinOp):
|
||||
def format(self) -> None: ...
|
||||
|
||||
class ConditionBinOp(BinOp):
|
||||
def invert(self) -> None: ...
|
||||
def format(self): ...
|
||||
condition = ...
|
||||
|
||||
class JointConditionBinOp(ConditionBinOp):
|
||||
condition = ...
|
||||
|
||||
class UnaryOp(ops.UnaryOp):
|
||||
def prune(self, klass): ...
|
||||
|
||||
class PyTablesExprVisitor(BaseExprVisitor):
|
||||
const_type = ...
|
||||
term_type = ...
|
||||
def __init__(self, env, engine, parser, **kwargs) -> None: ...
|
||||
def visit_UnaryOp(self, node, **kwargs): ...
|
||||
def visit_Index(self, node, **kwargs): ...
|
||||
def visit_Assign(self, node, **kwargs): ...
|
||||
def visit_Subscript(self, node, **kwargs): ...
|
||||
def visit_Attribute(self, node, **kwargs): ...
|
||||
def translate_In(self, op): ...
|
||||
|
||||
class PyTablesExpr(expr.Expr):
|
||||
encoding = ...
|
||||
condition = ...
|
||||
filter = ...
|
||||
terms = ...
|
||||
expr = ...
|
||||
def __init__(
|
||||
self,
|
||||
where,
|
||||
queryables: dict[str, Any] | None = ...,
|
||||
encoding=...,
|
||||
scope_level: int = ...,
|
||||
) -> None: ...
|
||||
def evaluate(self): ...
|
||||
|
||||
class TermValue:
|
||||
value = ...
|
||||
converted = ...
|
||||
kind = ...
|
||||
def __init__(self, value, converted, kind: str) -> None: ...
|
||||
def tostring(self, encoding) -> str: ...
|
||||
@ -0,0 +1,18 @@
|
||||
class Scope:
|
||||
level = ...
|
||||
scope = ...
|
||||
target = ...
|
||||
resolvers = ...
|
||||
temps = ...
|
||||
def __init__(
|
||||
self, level, global_dict=..., local_dict=..., resolvers=..., target=...
|
||||
) -> None: ...
|
||||
@property
|
||||
def has_resolvers(self) -> bool: ...
|
||||
def resolve(self, key: str, is_local: bool): ...
|
||||
def swapkey(self, old_key: str, new_key: str, new_value=...): ...
|
||||
def add_tmp(self, value) -> str: ...
|
||||
@property
|
||||
def ntemps(self) -> int: ...
|
||||
@property
|
||||
def full_scope(self): ...
|
||||
@ -0,0 +1,51 @@
|
||||
from typing import Literal
|
||||
|
||||
use_bottleneck_doc: str = ...
|
||||
|
||||
use_numexpr_doc: str = ...
|
||||
|
||||
pc_precision_doc: str = ...
|
||||
pc_colspace_doc: str = ...
|
||||
pc_max_rows_doc: str = ...
|
||||
pc_min_rows_doc: str = ...
|
||||
pc_max_cols_doc: str = ...
|
||||
pc_max_categories_doc: str = ...
|
||||
pc_max_info_cols_doc: str = ...
|
||||
pc_nb_repr_h_doc: str = ...
|
||||
pc_pprint_nest_depth: str = ...
|
||||
pc_multi_sparse_doc: str = ...
|
||||
float_format_doc: str = ...
|
||||
max_colwidth_doc: str = ...
|
||||
colheader_justify_doc: str = ...
|
||||
pc_expand_repr_doc: str = ...
|
||||
pc_show_dimensions_doc: str = ...
|
||||
pc_east_asian_width_doc: str = ...
|
||||
pc_ambiguous_as_wide_doc: str = ...
|
||||
pc_latex_repr_doc: str = ...
|
||||
pc_table_schema_doc: str = ...
|
||||
pc_html_border_doc: str = ...
|
||||
pc_html_use_mathjax_doc: str = ...
|
||||
pc_width_doc: str = ...
|
||||
pc_chop_threshold_doc: str = ...
|
||||
pc_max_seq_items: str = ...
|
||||
pc_max_info_rows_doc: str = ...
|
||||
pc_large_repr_doc: str = ...
|
||||
pc_memory_usage_doc: str = ...
|
||||
pc_latex_escape: str = ...
|
||||
pc_latex_longtable: str = ...
|
||||
pc_latex_multicolumn: str = ...
|
||||
pc_latex_multicolumn_format: str = ...
|
||||
pc_latex_multirow: str = ...
|
||||
|
||||
max_cols: int = ...
|
||||
tc_sim_interactive_doc: str = ...
|
||||
use_inf_as_null_doc: str = ...
|
||||
use_inf_as_na_doc: str = ...
|
||||
|
||||
chained_assignment: Literal["warn", "raise"] | None
|
||||
reader_engine_doc: str = ...
|
||||
writer_engine_doc: str = ...
|
||||
parquet_engine_doc: str = ...
|
||||
plotting_backend_doc: str = ...
|
||||
|
||||
register_converter_doc: str = ...
|
||||
@ -0,0 +1,12 @@
|
||||
from collections.abc import Sequence
|
||||
|
||||
import numpy as np
|
||||
from pandas.core.arrays.base import ExtensionArray
|
||||
|
||||
from pandas.core.dtypes.dtypes import ExtensionDtype
|
||||
|
||||
def array(
|
||||
data: Sequence[object],
|
||||
dtype: str | np.dtype | ExtensionDtype | None = None,
|
||||
copy: bool = True,
|
||||
) -> ExtensionArray: ...
|
||||
@ -0,0 +1,34 @@
|
||||
from pandas.core.dtypes.common import (
|
||||
is_any_real_numeric_dtype as is_any_real_numeric_dtype,
|
||||
is_bool as is_bool,
|
||||
is_bool_dtype as is_bool_dtype,
|
||||
is_complex as is_complex,
|
||||
is_complex_dtype as is_complex_dtype,
|
||||
is_datetime64_any_dtype as is_datetime64_any_dtype,
|
||||
is_datetime64_dtype as is_datetime64_dtype,
|
||||
is_datetime64_ns_dtype as is_datetime64_ns_dtype,
|
||||
is_dict_like as is_dict_like,
|
||||
is_dtype_equal as is_dtype_equal,
|
||||
is_extension_array_dtype as is_extension_array_dtype,
|
||||
is_file_like as is_file_like,
|
||||
is_float as is_float,
|
||||
is_float_dtype as is_float_dtype,
|
||||
is_hashable as is_hashable,
|
||||
is_integer as is_integer,
|
||||
is_integer_dtype as is_integer_dtype,
|
||||
is_iterator as is_iterator,
|
||||
is_list_like as is_list_like,
|
||||
is_named_tuple as is_named_tuple,
|
||||
is_number as is_number,
|
||||
is_numeric_dtype as is_numeric_dtype,
|
||||
is_object_dtype as is_object_dtype,
|
||||
is_re as is_re,
|
||||
is_re_compilable as is_re_compilable,
|
||||
is_scalar as is_scalar,
|
||||
is_signed_integer_dtype as is_signed_integer_dtype,
|
||||
is_string_dtype as is_string_dtype,
|
||||
is_timedelta64_dtype as is_timedelta64_dtype,
|
||||
is_timedelta64_ns_dtype as is_timedelta64_ns_dtype,
|
||||
is_unsigned_integer_dtype as is_unsigned_integer_dtype,
|
||||
pandas_dtype as pandas_dtype,
|
||||
)
|
||||
@ -0,0 +1,35 @@
|
||||
from typing import (
|
||||
ClassVar,
|
||||
Literal,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
from pandas.core.arrays import ExtensionArray
|
||||
|
||||
from pandas._typing import type_t
|
||||
|
||||
class ExtensionDtype:
|
||||
type: ClassVar[type_t]
|
||||
name: ClassVar[str]
|
||||
|
||||
@property
|
||||
def na_value(self) -> object: ...
|
||||
@property
|
||||
def kind(
|
||||
self,
|
||||
) -> Literal["b", "i", "u", "f", "c", "m", "M", "O", "S", "U", "V", "T"]: ...
|
||||
@property
|
||||
def names(self) -> list[str] | None: ...
|
||||
def empty(self, size: int | tuple[int, ...]) -> type_t[ExtensionArray]: ...
|
||||
@classmethod
|
||||
def construct_array_type(cls) -> type_t[ExtensionArray]: ...
|
||||
@classmethod
|
||||
def construct_from_string(cls, string: str) -> ExtensionDtype: ...
|
||||
@classmethod
|
||||
def is_dtype(cls, dtype: object) -> bool: ...
|
||||
|
||||
class StorageExtensionDtype(ExtensionDtype): ...
|
||||
|
||||
_ExtensionDtypeT = TypeVar("_ExtensionDtypeT", bound=ExtensionDtype)
|
||||
|
||||
def register_extension_dtype(cls: type[_ExtensionDtypeT]) -> type[_ExtensionDtypeT]: ...
|
||||
@ -0,0 +1,50 @@
|
||||
import pandas as pd
|
||||
from pandas.api.extensions import ExtensionDtype
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from pandas._typing import (
|
||||
ArrayLike,
|
||||
Dtype,
|
||||
DtypeObj,
|
||||
npt,
|
||||
)
|
||||
|
||||
from pandas.core.dtypes.inference import (
|
||||
is_bool as is_bool,
|
||||
is_complex as is_complex,
|
||||
is_dict_like as is_dict_like,
|
||||
is_file_like as is_file_like,
|
||||
is_float as is_float,
|
||||
is_hashable as is_hashable,
|
||||
is_integer as is_integer,
|
||||
is_iterator as is_iterator,
|
||||
is_list_like as is_list_like,
|
||||
is_named_tuple as is_named_tuple,
|
||||
is_number as is_number,
|
||||
is_re as is_re,
|
||||
is_re_compilable as is_re_compilable,
|
||||
is_scalar as is_scalar,
|
||||
)
|
||||
|
||||
_ArrayOrDtype: TypeAlias = (
|
||||
ArrayLike | npt.DTypeLike | pd.Series | pd.DataFrame | pd.Index | ExtensionDtype
|
||||
)
|
||||
|
||||
def is_object_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_datetime64_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_timedelta64_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_dtype_equal(source: Dtype, target: Dtype) -> bool: ...
|
||||
def is_string_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_integer_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_signed_integer_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_unsigned_integer_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_datetime64_any_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_datetime64_ns_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_timedelta64_ns_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_numeric_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_float_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_bool_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_extension_array_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_complex_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def is_any_real_numeric_dtype(arr_or_dtype: _ArrayOrDtype) -> bool: ...
|
||||
def pandas_dtype(dtype: object) -> DtypeObj: ...
|
||||
@ -0,0 +1,15 @@
|
||||
from typing import TypeVar
|
||||
|
||||
from pandas import (
|
||||
Categorical,
|
||||
CategoricalIndex,
|
||||
Series,
|
||||
)
|
||||
|
||||
_CatT = TypeVar("_CatT", bound=Categorical | CategoricalIndex | Series)
|
||||
|
||||
def union_categoricals(
|
||||
to_union: list[_CatT],
|
||||
sort_categories: bool = False,
|
||||
ignore_order: bool = False,
|
||||
) -> Categorical: ...
|
||||
@ -0,0 +1,63 @@
|
||||
import datetime as dt
|
||||
from typing import (
|
||||
Any,
|
||||
Literal,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas.core.indexes.base import Index
|
||||
from pandas.core.series import Series
|
||||
|
||||
from pandas._libs import NaTType
|
||||
from pandas._libs.tslibs import BaseOffset
|
||||
from pandas._libs.tslibs.offsets import (
|
||||
RelativeDeltaOffset,
|
||||
SingleConstructorOffset,
|
||||
)
|
||||
from pandas._typing import (
|
||||
Ordered,
|
||||
TimeZones,
|
||||
npt,
|
||||
)
|
||||
|
||||
from pandas.core.dtypes.base import (
|
||||
ExtensionDtype as ExtensionDtype,
|
||||
register_extension_dtype as register_extension_dtype,
|
||||
)
|
||||
|
||||
class BaseMaskedDtype(ExtensionDtype): ...
|
||||
class PandasExtensionDtype(ExtensionDtype): ...
|
||||
|
||||
class CategoricalDtype(PandasExtensionDtype, ExtensionDtype):
|
||||
def __init__(
|
||||
self,
|
||||
categories: Series | Index | list[Any] | None = ...,
|
||||
ordered: Ordered = ...,
|
||||
) -> None: ...
|
||||
@property
|
||||
def categories(self) -> Index: ...
|
||||
@property
|
||||
def ordered(self) -> Ordered: ...
|
||||
|
||||
class DatetimeTZDtype(PandasExtensionDtype):
|
||||
def __init__(self, unit: Literal["ns"] = ..., tz: TimeZones = ...) -> None: ...
|
||||
@property
|
||||
def unit(self) -> Literal["ns"]: ...
|
||||
@property
|
||||
def tz(self) -> dt.tzinfo: ...
|
||||
@property
|
||||
def na_value(self) -> NaTType: ...
|
||||
|
||||
class PeriodDtype(PandasExtensionDtype):
|
||||
def __init__(
|
||||
self, freq: str | SingleConstructorOffset | RelativeDeltaOffset = ...
|
||||
) -> None: ...
|
||||
@property
|
||||
def freq(self) -> BaseOffset: ...
|
||||
@property
|
||||
def na_value(self) -> NaTType: ...
|
||||
|
||||
class IntervalDtype(PandasExtensionDtype):
|
||||
def __init__(self, subtype: str | npt.DTypeLike | None = ...) -> None: ...
|
||||
@property
|
||||
def subtype(self) -> np.dtype | None: ...
|
||||
@ -0,0 +1,6 @@
|
||||
from pandas import Series
|
||||
from pandas.core.arrays import ExtensionArray
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
ABCSeries: TypeAlias = type[Series]
|
||||
ABCExtensionArray: TypeAlias = type[ExtensionArray]
|
||||
@ -0,0 +1,17 @@
|
||||
from pandas._libs import lib
|
||||
|
||||
is_bool = lib.is_bool
|
||||
is_integer = lib.is_integer
|
||||
is_float = lib.is_float
|
||||
is_complex = lib.is_complex
|
||||
is_scalar = lib.is_scalar
|
||||
is_list_like = lib.is_list_like
|
||||
is_iterator = lib.is_iterator
|
||||
|
||||
def is_number(obj: object) -> bool: ...
|
||||
def is_file_like(obj: object) -> bool: ...
|
||||
def is_re(obj: object) -> bool: ...
|
||||
def is_re_compilable(obj: object) -> bool: ...
|
||||
def is_dict_like(obj: object) -> bool: ...
|
||||
def is_named_tuple(obj: object) -> bool: ...
|
||||
def is_hashable(obj: object) -> bool: ...
|
||||
@ -0,0 +1,59 @@
|
||||
from typing import (
|
||||
Any,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
DataFrame,
|
||||
Index,
|
||||
Series,
|
||||
)
|
||||
from pandas.core.arrays import ExtensionArray
|
||||
from typing_extensions import TypeIs
|
||||
|
||||
from pandas._libs.missing import NAType
|
||||
from pandas._libs.tslibs import NaTType
|
||||
from pandas._typing import (
|
||||
Scalar,
|
||||
ScalarT,
|
||||
ShapeT,
|
||||
np_1darray,
|
||||
np_ndarray,
|
||||
np_ndarray_bool,
|
||||
)
|
||||
|
||||
isposinf_scalar = ...
|
||||
isneginf_scalar = ...
|
||||
|
||||
@overload
|
||||
def isna(obj: DataFrame) -> DataFrame: ...
|
||||
@overload
|
||||
def isna(obj: Series) -> Series[bool]: ...
|
||||
@overload
|
||||
def isna(obj: Index | ExtensionArray | list[ScalarT]) -> np_1darray[np.bool]: ...
|
||||
@overload
|
||||
def isna(obj: np_ndarray[ShapeT]) -> np_ndarray[ShapeT, np.bool]: ...
|
||||
@overload
|
||||
def isna(obj: list[Any]) -> np_ndarray_bool: ...
|
||||
@overload
|
||||
def isna(
|
||||
obj: Scalar | NaTType | NAType | None,
|
||||
) -> TypeIs[NaTType | NAType | None]: ...
|
||||
|
||||
isnull = isna
|
||||
|
||||
@overload
|
||||
def notna(obj: DataFrame) -> DataFrame: ...
|
||||
@overload
|
||||
def notna(obj: Series) -> Series[bool]: ...
|
||||
@overload
|
||||
def notna(obj: Index | ExtensionArray | list[ScalarT]) -> np_1darray[np.bool]: ...
|
||||
@overload
|
||||
def notna(obj: np_ndarray[ShapeT]) -> np_ndarray[ShapeT, np.bool]: ...
|
||||
@overload
|
||||
def notna(obj: list[Any]) -> np_ndarray_bool: ...
|
||||
@overload
|
||||
def notna(obj: ScalarT | NaTType | NAType | None) -> TypeIs[ScalarT]: ...
|
||||
|
||||
notnull = notna
|
||||
2651
lib/python3.11/site-packages/pandas-stubs/core/frame.pyi
Normal file
2651
lib/python3.11/site-packages/pandas-stubs/core/frame.pyi
Normal file
File diff suppressed because it is too large
Load Diff
435
lib/python3.11/site-packages/pandas-stubs/core/generic.pyi
Normal file
435
lib/python3.11/site-packages/pandas-stubs/core/generic.pyi
Normal file
@ -0,0 +1,435 @@
|
||||
from builtins import (
|
||||
bool as _bool,
|
||||
str as _str,
|
||||
)
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Mapping,
|
||||
Sequence,
|
||||
)
|
||||
import datetime as dt
|
||||
import sqlite3
|
||||
from typing import (
|
||||
Any,
|
||||
ClassVar,
|
||||
Literal,
|
||||
final,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import Index
|
||||
import pandas.core.indexing as indexing
|
||||
from pandas.core.resample import DatetimeIndexResampler
|
||||
from pandas.core.series import (
|
||||
Series,
|
||||
)
|
||||
import sqlalchemy.engine
|
||||
from typing_extensions import (
|
||||
Concatenate,
|
||||
Self,
|
||||
)
|
||||
|
||||
from pandas._libs.lib import _NoDefaultDoNotUse
|
||||
from pandas._typing import (
|
||||
Axis,
|
||||
CompressionOptions,
|
||||
CSVQuoting,
|
||||
DtypeArg,
|
||||
DtypeBackend,
|
||||
ExcelWriterMergeCells,
|
||||
FilePath,
|
||||
FileWriteMode,
|
||||
Frequency,
|
||||
HashableT1,
|
||||
HashableT2,
|
||||
HDFCompLib,
|
||||
IgnoreRaise,
|
||||
IndexLabel,
|
||||
Level,
|
||||
OpenFileErrors,
|
||||
P,
|
||||
StorageOptions,
|
||||
T,
|
||||
TakeIndexer,
|
||||
TimedeltaConvertibleTypes,
|
||||
TimeGrouperOrigin,
|
||||
TimestampConvertibleTypes,
|
||||
WriteBuffer,
|
||||
)
|
||||
|
||||
from pandas.io.pytables import HDFStore
|
||||
from pandas.io.sql import SQLTable
|
||||
|
||||
class NDFrame(indexing.IndexingMixin):
|
||||
__hash__: ClassVar[None] # type: ignore[assignment] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
|
||||
@final
|
||||
def set_flags(
|
||||
self,
|
||||
*,
|
||||
copy: _bool = ...,
|
||||
allows_duplicate_labels: _bool | None = ...,
|
||||
) -> Self: ...
|
||||
@property
|
||||
def attrs(self) -> dict[Hashable | None, Any]: ...
|
||||
@attrs.setter
|
||||
def attrs(self, value: Mapping[Hashable | None, Any]) -> None: ...
|
||||
@property
|
||||
def shape(self) -> tuple[int, ...]: ...
|
||||
@property
|
||||
def ndim(self) -> int: ...
|
||||
@property
|
||||
def size(self) -> int: ...
|
||||
def equals(self, other: Series) -> _bool: ...
|
||||
@final
|
||||
def __neg__(self) -> Self: ...
|
||||
@final
|
||||
def __pos__(self) -> Self: ...
|
||||
@final
|
||||
def __nonzero__(self) -> None: ...
|
||||
@final
|
||||
def bool(self) -> _bool: ...
|
||||
def __abs__(self) -> Self: ...
|
||||
@final
|
||||
def __round__(self, decimals: int = ...) -> Self: ...
|
||||
@final
|
||||
def __contains__(self, key) -> _bool: ...
|
||||
@property
|
||||
def empty(self) -> _bool: ...
|
||||
__array_priority__: int = ...
|
||||
def __array__(self, dtype=...) -> np.ndarray: ...
|
||||
@final
|
||||
def to_excel(
|
||||
self,
|
||||
excel_writer,
|
||||
sheet_name: _str = "Sheet1",
|
||||
na_rep: _str = "",
|
||||
float_format: _str | None = ...,
|
||||
columns: _str | Sequence[_str] | None = ...,
|
||||
header: _bool | list[_str] = True,
|
||||
index: _bool = True,
|
||||
index_label: _str | Sequence[_str] | None = ...,
|
||||
startrow: int = 0,
|
||||
startcol: int = 0,
|
||||
engine: _str | None = ...,
|
||||
merge_cells: ExcelWriterMergeCells = True,
|
||||
inf_rep: _str = "inf",
|
||||
freeze_panes: tuple[int, int] | None = ...,
|
||||
) -> None: ...
|
||||
@final
|
||||
def to_hdf(
|
||||
self,
|
||||
path_or_buf: FilePath | HDFStore,
|
||||
*,
|
||||
key: _str,
|
||||
mode: Literal["a", "w", "r+"] = ...,
|
||||
complevel: int | None = ...,
|
||||
complib: HDFCompLib | None = ...,
|
||||
append: _bool = ...,
|
||||
format: Literal["t", "table", "f", "fixed"] | None = ...,
|
||||
index: _bool = ...,
|
||||
min_itemsize: int | dict[HashableT1, int] | None = ...,
|
||||
nan_rep: _str | None = ...,
|
||||
dropna: _bool | None = ...,
|
||||
data_columns: Literal[True] | list[HashableT2] | None = ...,
|
||||
errors: OpenFileErrors = ...,
|
||||
encoding: _str = ...,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def to_markdown(
|
||||
self,
|
||||
buf: FilePath | WriteBuffer[str],
|
||||
*,
|
||||
mode: FileWriteMode = ...,
|
||||
index: _bool = ...,
|
||||
storage_options: StorageOptions = ...,
|
||||
**kwargs: Any,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def to_markdown(
|
||||
self,
|
||||
buf: None = ...,
|
||||
*,
|
||||
mode: FileWriteMode | None = ...,
|
||||
index: _bool = ...,
|
||||
storage_options: StorageOptions = ...,
|
||||
**kwargs: Any,
|
||||
) -> _str: ...
|
||||
@final
|
||||
def to_sql(
|
||||
self,
|
||||
name: _str,
|
||||
con: str | sqlalchemy.engine.Connectable | sqlite3.Connection,
|
||||
schema: _str | None = ...,
|
||||
if_exists: Literal["fail", "replace", "append"] = "fail",
|
||||
index: _bool = True,
|
||||
index_label: IndexLabel = None,
|
||||
chunksize: int | None = ...,
|
||||
dtype: DtypeArg | None = ...,
|
||||
method: (
|
||||
Literal["multi"]
|
||||
| Callable[
|
||||
[SQLTable, Any, list[str], Iterable[tuple[Any, ...]]],
|
||||
int | None,
|
||||
]
|
||||
| None
|
||||
) = ...,
|
||||
) -> int | None: ...
|
||||
@final
|
||||
def to_pickle(
|
||||
self,
|
||||
path: FilePath | WriteBuffer[bytes],
|
||||
compression: CompressionOptions = "infer",
|
||||
protocol: int = 5,
|
||||
storage_options: StorageOptions = ...,
|
||||
) -> None: ...
|
||||
@final
|
||||
def to_clipboard(
|
||||
self,
|
||||
excel: _bool = True,
|
||||
sep: _str | None = None,
|
||||
*,
|
||||
na_rep: _str = ...,
|
||||
float_format: _str | Callable[[object], _str] | None = ...,
|
||||
columns: list[HashableT1] | None = ...,
|
||||
header: _bool | list[_str] = ...,
|
||||
index: _bool = ...,
|
||||
index_label: Literal[False] | _str | list[HashableT2] | None = ...,
|
||||
mode: FileWriteMode = ...,
|
||||
encoding: _str | None = ...,
|
||||
compression: CompressionOptions = ...,
|
||||
quoting: CSVQuoting = ...,
|
||||
quotechar: _str = ...,
|
||||
lineterminator: _str | None = ...,
|
||||
chunksize: int | None = ...,
|
||||
date_format: _str | None = ...,
|
||||
doublequote: _bool = ...,
|
||||
escapechar: _str | None = ...,
|
||||
decimal: _str = ...,
|
||||
errors: _str = ...,
|
||||
storage_options: StorageOptions = ...,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def to_latex(
|
||||
self,
|
||||
buf: FilePath | WriteBuffer[str],
|
||||
columns: list[_str] | None = ...,
|
||||
header: _bool | list[_str] = ...,
|
||||
index: _bool = ...,
|
||||
na_rep: _str = ...,
|
||||
formatters=...,
|
||||
float_format=...,
|
||||
sparsify: _bool | None = ...,
|
||||
index_names: _bool = ...,
|
||||
bold_rows: _bool = ...,
|
||||
column_format: _str | None = ...,
|
||||
longtable: _bool | None = ...,
|
||||
escape: _bool | None = ...,
|
||||
encoding: _str | None = ...,
|
||||
decimal: _str = ...,
|
||||
multicolumn: _bool | None = ...,
|
||||
multicolumn_format: _str | None = ...,
|
||||
multirow: _bool | None = ...,
|
||||
caption: _str | tuple[_str, _str] | None = ...,
|
||||
label: _str | None = ...,
|
||||
position: _str | None = ...,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def to_latex(
|
||||
self,
|
||||
buf: None = ...,
|
||||
columns: list[_str] | None = ...,
|
||||
header: _bool | list[_str] = ...,
|
||||
index: _bool = ...,
|
||||
na_rep: _str = ...,
|
||||
formatters=...,
|
||||
float_format=...,
|
||||
sparsify: _bool | None = ...,
|
||||
index_names: _bool = ...,
|
||||
bold_rows: _bool = ...,
|
||||
column_format: _str | None = ...,
|
||||
longtable: _bool | None = ...,
|
||||
escape: _bool | None = ...,
|
||||
encoding: _str | None = ...,
|
||||
decimal: _str = ...,
|
||||
multicolumn: _bool | None = ...,
|
||||
multicolumn_format: _str | None = ...,
|
||||
multirow: _bool | None = ...,
|
||||
caption: _str | tuple[_str, _str] | None = ...,
|
||||
label: _str | None = ...,
|
||||
position: _str | None = ...,
|
||||
) -> _str: ...
|
||||
@overload
|
||||
def to_csv(
|
||||
self,
|
||||
path_or_buf: FilePath | WriteBuffer[bytes] | WriteBuffer[str],
|
||||
sep: _str = ...,
|
||||
na_rep: _str = ...,
|
||||
float_format: _str | Callable[[object], _str] | None = ...,
|
||||
columns: list[HashableT1] | None = ...,
|
||||
header: _bool | list[_str] = ...,
|
||||
index: _bool = ...,
|
||||
index_label: Literal[False] | _str | list[HashableT2] | None = ...,
|
||||
mode: FileWriteMode = ...,
|
||||
encoding: _str | None = ...,
|
||||
compression: CompressionOptions = ...,
|
||||
quoting: CSVQuoting = ...,
|
||||
quotechar: _str = ...,
|
||||
lineterminator: _str | None = ...,
|
||||
chunksize: int | None = ...,
|
||||
date_format: _str | None = ...,
|
||||
doublequote: _bool = ...,
|
||||
escapechar: _str | None = ...,
|
||||
decimal: _str = ...,
|
||||
errors: OpenFileErrors = ...,
|
||||
storage_options: StorageOptions = ...,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def to_csv(
|
||||
self,
|
||||
path_or_buf: None = ...,
|
||||
sep: _str = ...,
|
||||
na_rep: _str = ...,
|
||||
float_format: _str | Callable[[object], _str] | None = ...,
|
||||
columns: list[HashableT1] | None = ...,
|
||||
header: _bool | list[_str] = ...,
|
||||
index: _bool = ...,
|
||||
index_label: Literal[False] | _str | list[HashableT2] | None = ...,
|
||||
mode: FileWriteMode = ...,
|
||||
encoding: _str | None = ...,
|
||||
compression: CompressionOptions = ...,
|
||||
quoting: CSVQuoting = ...,
|
||||
quotechar: _str = ...,
|
||||
lineterminator: _str | None = ...,
|
||||
chunksize: int | None = ...,
|
||||
date_format: _str | None = ...,
|
||||
doublequote: _bool = ...,
|
||||
escapechar: _str | None = ...,
|
||||
decimal: _str = ...,
|
||||
errors: OpenFileErrors = ...,
|
||||
storage_options: StorageOptions = ...,
|
||||
) -> _str: ...
|
||||
@final
|
||||
def __delitem__(self, idx: Hashable) -> None: ...
|
||||
@overload
|
||||
def drop(
|
||||
self,
|
||||
labels: None = ...,
|
||||
*,
|
||||
axis: Axis = ...,
|
||||
index: Hashable | Sequence[Hashable] | Index = ...,
|
||||
columns: Hashable | Iterable[Hashable],
|
||||
level: Level | None = ...,
|
||||
inplace: Literal[True],
|
||||
errors: IgnoreRaise = ...,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def drop(
|
||||
self,
|
||||
labels: None = ...,
|
||||
*,
|
||||
axis: Axis = ...,
|
||||
index: Hashable | Sequence[Hashable] | Index,
|
||||
columns: Hashable | Iterable[Hashable] = ...,
|
||||
level: Level | None = ...,
|
||||
inplace: Literal[True],
|
||||
errors: IgnoreRaise = ...,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def drop(
|
||||
self,
|
||||
labels: Hashable | Sequence[Hashable] | Index,
|
||||
*,
|
||||
axis: Axis = ...,
|
||||
index: None = ...,
|
||||
columns: None = ...,
|
||||
level: Level | None = ...,
|
||||
inplace: Literal[True],
|
||||
errors: IgnoreRaise = ...,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def drop(
|
||||
self,
|
||||
labels: None = ...,
|
||||
*,
|
||||
axis: Axis = ...,
|
||||
index: Hashable | Sequence[Hashable] | Index = ...,
|
||||
columns: Hashable | Iterable[Hashable],
|
||||
level: Level | None = ...,
|
||||
inplace: Literal[False] = ...,
|
||||
errors: IgnoreRaise = ...,
|
||||
) -> Self: ...
|
||||
@overload
|
||||
def drop(
|
||||
self,
|
||||
labels: None = ...,
|
||||
*,
|
||||
axis: Axis = ...,
|
||||
index: Hashable | Sequence[Hashable] | Index,
|
||||
columns: Hashable | Iterable[Hashable] = ...,
|
||||
level: Level | None = ...,
|
||||
inplace: Literal[False] = ...,
|
||||
errors: IgnoreRaise = ...,
|
||||
) -> Self: ...
|
||||
@overload
|
||||
def drop(
|
||||
self,
|
||||
labels: Hashable | Sequence[Hashable] | Index,
|
||||
*,
|
||||
axis: Axis = ...,
|
||||
index: None = ...,
|
||||
columns: None = ...,
|
||||
level: Level | None = ...,
|
||||
inplace: Literal[False] = ...,
|
||||
errors: IgnoreRaise = ...,
|
||||
) -> Self: ...
|
||||
@overload
|
||||
def pipe(
|
||||
self,
|
||||
func: Callable[Concatenate[Self, P], T],
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> T: ...
|
||||
@overload
|
||||
def pipe(
|
||||
self,
|
||||
func: tuple[Callable[..., T], str],
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> T: ...
|
||||
@final
|
||||
def __finalize__(self, other, method=..., **kwargs) -> Self: ...
|
||||
@final
|
||||
def __setattr__(self, name: _str, value) -> None: ...
|
||||
@final
|
||||
def __copy__(self, deep: _bool = ...) -> Self: ...
|
||||
@final
|
||||
def __deepcopy__(self, memo=...) -> Self: ...
|
||||
@final
|
||||
def convert_dtypes(
|
||||
self,
|
||||
infer_objects: _bool = True,
|
||||
convert_string: _bool = True,
|
||||
convert_integer: _bool = True,
|
||||
convert_boolean: _bool = True,
|
||||
convert_floating: _bool = True,
|
||||
dtype_backend: DtypeBackend = "numpy_nullable",
|
||||
) -> Self: ...
|
||||
@final
|
||||
def resample(
|
||||
self,
|
||||
rule: Frequency | dt.timedelta,
|
||||
axis: Axis | _NoDefaultDoNotUse = 0,
|
||||
closed: Literal["right", "left"] | None = None,
|
||||
label: Literal["right", "left"] | None = None,
|
||||
on: Level | None = None,
|
||||
level: Level | None = None,
|
||||
origin: TimeGrouperOrigin | TimestampConvertibleTypes = "start_day",
|
||||
offset: TimedeltaConvertibleTypes | None = None,
|
||||
group_keys: _bool = False,
|
||||
) -> DatetimeIndexResampler[Self]: ...
|
||||
@final
|
||||
def take(self, indices: TakeIndexer, axis: Axis = 0, **kwargs: Any) -> Self: ...
|
||||
@ -0,0 +1,15 @@
|
||||
from pandas.core.groupby.generic import (
|
||||
DataFrameGroupBy as DataFrameGroupBy,
|
||||
NamedAgg as NamedAgg,
|
||||
SeriesGroupBy as SeriesGroupBy,
|
||||
)
|
||||
from pandas.core.groupby.groupby import GroupBy as GroupBy
|
||||
from pandas.core.groupby.grouper import Grouper as Grouper
|
||||
|
||||
__all__ = [
|
||||
"DataFrameGroupBy",
|
||||
"NamedAgg",
|
||||
"SeriesGroupBy",
|
||||
"GroupBy",
|
||||
"Grouper",
|
||||
]
|
||||
@ -0,0 +1,56 @@
|
||||
from collections.abc import Hashable
|
||||
import dataclasses
|
||||
from typing import (
|
||||
Literal,
|
||||
TypeAlias,
|
||||
)
|
||||
|
||||
@dataclasses.dataclass(order=True, frozen=True)
|
||||
class OutputKey:
|
||||
label: Hashable
|
||||
position: int
|
||||
|
||||
ReductionKernelType: TypeAlias = Literal[
|
||||
"all",
|
||||
"any",
|
||||
"corrwith",
|
||||
"count",
|
||||
"first",
|
||||
"idxmax",
|
||||
"idxmin",
|
||||
"last",
|
||||
"max",
|
||||
"mean",
|
||||
"median",
|
||||
"min",
|
||||
"nunique",
|
||||
"prod",
|
||||
# as long as `quantile`'s signature accepts only
|
||||
# a single quantile value, it's a reduction.
|
||||
# GH#27526 might change that.
|
||||
"quantile",
|
||||
"sem",
|
||||
"size",
|
||||
"skew",
|
||||
"std",
|
||||
"sum",
|
||||
"var",
|
||||
]
|
||||
|
||||
TransformationKernelType: TypeAlias = Literal[
|
||||
"bfill",
|
||||
"cumcount",
|
||||
"cummax",
|
||||
"cummin",
|
||||
"cumprod",
|
||||
"cumsum",
|
||||
"diff",
|
||||
"ffill",
|
||||
"fillna",
|
||||
"ngroup",
|
||||
"pct_change",
|
||||
"rank",
|
||||
"shift",
|
||||
]
|
||||
|
||||
TransformReductionListType: TypeAlias = ReductionKernelType | TransformationKernelType
|
||||
@ -0,0 +1,466 @@
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Sequence,
|
||||
)
|
||||
from typing import (
|
||||
Any,
|
||||
Concatenate,
|
||||
Generic,
|
||||
Literal,
|
||||
NamedTuple,
|
||||
Protocol,
|
||||
TypeVar,
|
||||
final,
|
||||
overload,
|
||||
)
|
||||
|
||||
from matplotlib.axes import Axes as PlotAxes
|
||||
import numpy as np
|
||||
from pandas.core.frame import DataFrame
|
||||
from pandas.core.groupby.base import TransformReductionListType
|
||||
from pandas.core.groupby.groupby import (
|
||||
GroupBy,
|
||||
GroupByPlot,
|
||||
)
|
||||
from pandas.core.series import Series
|
||||
from typing_extensions import (
|
||||
Self,
|
||||
TypeAlias,
|
||||
)
|
||||
|
||||
from pandas._libs.tslibs.timestamps import Timestamp
|
||||
from pandas._typing import (
|
||||
S2,
|
||||
S3,
|
||||
AggFuncTypeBase,
|
||||
AggFuncTypeFrame,
|
||||
ByT,
|
||||
CorrelationMethod,
|
||||
Dtype,
|
||||
IndexLabel,
|
||||
Level,
|
||||
ListLike,
|
||||
NsmallestNlargestKeep,
|
||||
P,
|
||||
Scalar,
|
||||
TakeIndexer,
|
||||
WindowingEngine,
|
||||
WindowingEngineKwargs,
|
||||
)
|
||||
|
||||
AggScalar: TypeAlias = str | Callable[..., Any]
|
||||
|
||||
class NamedAgg(NamedTuple):
|
||||
column: str
|
||||
aggfunc: AggScalar
|
||||
|
||||
class SeriesGroupBy(GroupBy[Series[S2]], Generic[S2, ByT]):
|
||||
@overload
|
||||
def aggregate( # pyrefly: ignore
|
||||
self,
|
||||
func: Callable[Concatenate[Series[S2], P], S3],
|
||||
/,
|
||||
*args,
|
||||
engine: WindowingEngine = ...,
|
||||
engine_kwargs: WindowingEngineKwargs = ...,
|
||||
**kwargs,
|
||||
) -> Series[S3]: ...
|
||||
@overload
|
||||
def aggregate(
|
||||
self,
|
||||
func: Callable[[Series], S3],
|
||||
*args,
|
||||
engine: WindowingEngine = ...,
|
||||
engine_kwargs: WindowingEngineKwargs = ...,
|
||||
**kwargs,
|
||||
) -> Series[S3]: ...
|
||||
@overload
|
||||
def aggregate(
|
||||
self,
|
||||
func: list[AggFuncTypeBase],
|
||||
/,
|
||||
*args,
|
||||
engine: WindowingEngine = ...,
|
||||
engine_kwargs: WindowingEngineKwargs = ...,
|
||||
**kwargs,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def aggregate(
|
||||
self,
|
||||
func: AggFuncTypeBase | None = ...,
|
||||
/,
|
||||
*args,
|
||||
engine: WindowingEngine = ...,
|
||||
engine_kwargs: WindowingEngineKwargs = ...,
|
||||
**kwargs,
|
||||
) -> Series: ...
|
||||
agg = aggregate
|
||||
@overload
|
||||
def transform(
|
||||
self,
|
||||
func: Callable[Concatenate[Series[S2], P], Series[S3]],
|
||||
/,
|
||||
*args: Any,
|
||||
engine: WindowingEngine = ...,
|
||||
engine_kwargs: WindowingEngineKwargs = ...,
|
||||
**kwargs: Any,
|
||||
) -> Series[S3]: ...
|
||||
@overload
|
||||
def transform(
|
||||
self,
|
||||
func: Callable,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Series: ...
|
||||
@overload
|
||||
def transform(
|
||||
self, func: TransformReductionListType, *args, **kwargs
|
||||
) -> Series: ...
|
||||
def filter(
|
||||
self, func: Callable | str, dropna: bool = ..., *args, **kwargs
|
||||
) -> Series: ...
|
||||
def nunique(self, dropna: bool = ...) -> Series[int]: ...
|
||||
# describe delegates to super() method but here it has keyword-only parameters
|
||||
def describe( # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self,
|
||||
*,
|
||||
percentiles: Iterable[float] | None = ...,
|
||||
include: Literal["all"] | list[Dtype] | None = ...,
|
||||
exclude: list[Dtype] | None = ...,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def value_counts(
|
||||
self,
|
||||
normalize: Literal[False] = ...,
|
||||
sort: bool = ...,
|
||||
ascending: bool = ...,
|
||||
bins: int | Sequence[int] | None = ...,
|
||||
dropna: bool = ...,
|
||||
) -> Series[int]: ...
|
||||
@overload
|
||||
def value_counts(
|
||||
self,
|
||||
normalize: Literal[True],
|
||||
sort: bool = ...,
|
||||
ascending: bool = ...,
|
||||
bins: int | Sequence[int] | None = ...,
|
||||
dropna: bool = ...,
|
||||
) -> Series[float]: ...
|
||||
def take(
|
||||
self,
|
||||
indices: TakeIndexer,
|
||||
**kwargs,
|
||||
) -> Series[S2]: ...
|
||||
def skew(
|
||||
self,
|
||||
skipna: bool = True,
|
||||
numeric_only: bool = False,
|
||||
**kwargs,
|
||||
) -> Series: ...
|
||||
@property
|
||||
def plot(self) -> GroupByPlot[Self]: ...
|
||||
def nlargest(
|
||||
self, n: int = 5, keep: NsmallestNlargestKeep = "first"
|
||||
) -> Series[S2]: ...
|
||||
def nsmallest(
|
||||
self, n: int = 5, keep: NsmallestNlargestKeep = "first"
|
||||
) -> Series[S2]: ...
|
||||
def idxmin(self, skipna: bool = True) -> Series: ...
|
||||
def idxmax(self, skipna: bool = True) -> Series: ...
|
||||
def corr(
|
||||
self,
|
||||
other: Series,
|
||||
method: CorrelationMethod = ...,
|
||||
min_periods: int | None = ...,
|
||||
) -> Series: ...
|
||||
def cov(
|
||||
self,
|
||||
other: Series,
|
||||
min_periods: int | None = None,
|
||||
ddof: int | None = 1,
|
||||
) -> Series: ...
|
||||
@property
|
||||
def is_monotonic_increasing(self) -> Series[bool]: ...
|
||||
@property
|
||||
def is_monotonic_decreasing(self) -> Series[bool]: ...
|
||||
def hist(
|
||||
self,
|
||||
by: IndexLabel | None = None,
|
||||
ax: PlotAxes | None = None,
|
||||
grid: bool = True,
|
||||
xlabelsize: float | str | None = None,
|
||||
xrot: float | None = None,
|
||||
ylabelsize: float | str | None = None,
|
||||
yrot: float | None = None,
|
||||
figsize: tuple[float, float] | None = None,
|
||||
bins: int | Sequence[int] = 10,
|
||||
backend: str | None = None,
|
||||
legend: bool = False,
|
||||
**kwargs,
|
||||
) -> Series: ... # Series[Axes] but this is not allowed
|
||||
@property
|
||||
def dtype(self) -> Series: ...
|
||||
def unique(self) -> Series: ...
|
||||
# Overrides that provide more precise return types over the GroupBy class
|
||||
@final # type: ignore[misc]
|
||||
def __iter__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self,
|
||||
) -> Iterator[tuple[ByT, Series[S2]]]: ...
|
||||
|
||||
_TT = TypeVar("_TT", bound=Literal[True, False])
|
||||
|
||||
# ty ignore needed because of https://github.com/astral-sh/ty/issues/157#issuecomment-3017337945
|
||||
class DFCallable1(Protocol[P]): # ty: ignore[invalid-argument-type]
|
||||
def __call__(
|
||||
self, df: DataFrame, /, *args: P.args, **kwargs: P.kwargs
|
||||
) -> Scalar | list | dict: ...
|
||||
|
||||
class DFCallable2(Protocol[P]): # ty: ignore[invalid-argument-type]
|
||||
def __call__(
|
||||
self, df: DataFrame, /, *args: P.args, **kwargs: P.kwargs
|
||||
) -> DataFrame | Series: ...
|
||||
|
||||
class DFCallable3(Protocol[P]): # ty: ignore[invalid-argument-type]
|
||||
def __call__(self, df: Iterable, /, *args: P.args, **kwargs: P.kwargs) -> float: ...
|
||||
|
||||
class DataFrameGroupBy(GroupBy[DataFrame], Generic[ByT, _TT]):
|
||||
# error: Overload 3 for "apply" will never be used because its parameters overlap overload 1
|
||||
@overload # type: ignore[override]
|
||||
def apply(
|
||||
self,
|
||||
func: DFCallable1[P],
|
||||
/,
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> Series: ...
|
||||
@overload
|
||||
def apply(
|
||||
self,
|
||||
func: DFCallable2[P],
|
||||
/,
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def apply(
|
||||
self,
|
||||
func: DFCallable3[P],
|
||||
/,
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> DataFrame: ...
|
||||
# error: overload 1 overlaps overload 2 because of different return types
|
||||
@overload
|
||||
def aggregate(self, func: Literal["size"]) -> Series: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def aggregate(
|
||||
self,
|
||||
func: AggFuncTypeFrame | None = ...,
|
||||
*args,
|
||||
engine: WindowingEngine = ...,
|
||||
engine_kwargs: WindowingEngineKwargs = ...,
|
||||
**kwargs,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def aggregate(
|
||||
self,
|
||||
func: AggFuncTypeFrame | None = None,
|
||||
/,
|
||||
**kwargs,
|
||||
) -> DataFrame: ...
|
||||
agg = aggregate
|
||||
@overload
|
||||
def transform(
|
||||
self,
|
||||
func: Callable[Concatenate[DataFrame, P], DataFrame],
|
||||
*args: Any,
|
||||
engine: WindowingEngine = ...,
|
||||
engine_kwargs: WindowingEngineKwargs = ...,
|
||||
**kwargs: Any,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def transform(
|
||||
self,
|
||||
func: Callable,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def transform(
|
||||
self, func: TransformReductionListType, *args, **kwargs
|
||||
) -> DataFrame: ...
|
||||
def filter(
|
||||
self, func: Callable, dropna: bool = ..., *args, **kwargs
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def __getitem__(self, key: Scalar) -> SeriesGroupBy[Any, ByT]: ... # type: ignore[overload-overlap] # pyright: ignore[reportOverlappingOverload]
|
||||
@overload
|
||||
def __getitem__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, key: Iterable[Hashable]
|
||||
) -> DataFrameGroupBy[ByT, _TT]: ...
|
||||
def nunique(self, dropna: bool = True) -> DataFrame: ...
|
||||
def idxmax(
|
||||
self,
|
||||
skipna: bool = True,
|
||||
numeric_only: bool = False,
|
||||
) -> DataFrame: ...
|
||||
def idxmin(
|
||||
self,
|
||||
skipna: bool = True,
|
||||
numeric_only: bool = False,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def boxplot(
|
||||
self,
|
||||
subplots: Literal[True] = ...,
|
||||
column: IndexLabel | None = ...,
|
||||
fontsize: float | str | None = ...,
|
||||
rot: float = ...,
|
||||
grid: bool = ...,
|
||||
ax: PlotAxes | None = ...,
|
||||
figsize: tuple[float, float] | None = ...,
|
||||
layout: tuple[int, int] | None = ...,
|
||||
sharex: bool = ...,
|
||||
sharey: bool = ...,
|
||||
backend: str | None = ...,
|
||||
**kwargs,
|
||||
) -> Series: ... # Series[PlotAxes] but this is not allowed
|
||||
@overload
|
||||
def boxplot(
|
||||
self,
|
||||
subplots: Literal[False],
|
||||
column: IndexLabel | None = ...,
|
||||
fontsize: float | str | None = ...,
|
||||
rot: float = ...,
|
||||
grid: bool = ...,
|
||||
ax: PlotAxes | None = ...,
|
||||
figsize: tuple[float, float] | None = ...,
|
||||
layout: tuple[int, int] | None = ...,
|
||||
sharex: bool = ...,
|
||||
sharey: bool = ...,
|
||||
backend: str | None = ...,
|
||||
**kwargs,
|
||||
) -> PlotAxes: ...
|
||||
@overload
|
||||
def boxplot(
|
||||
self,
|
||||
subplots: bool,
|
||||
column: IndexLabel | None = ...,
|
||||
fontsize: float | str | None = ...,
|
||||
rot: float = ...,
|
||||
grid: bool = ...,
|
||||
ax: PlotAxes | None = ...,
|
||||
figsize: tuple[float, float] | None = ...,
|
||||
layout: tuple[int, int] | None = ...,
|
||||
sharex: bool = ...,
|
||||
sharey: bool = ...,
|
||||
backend: str | None = ...,
|
||||
**kwargs,
|
||||
) -> PlotAxes | Series: ... # Series[PlotAxes]
|
||||
@overload
|
||||
def value_counts(
|
||||
self: DataFrameGroupBy[ByT, Literal[True]],
|
||||
subset: ListLike | None = ...,
|
||||
normalize: Literal[False] = ...,
|
||||
sort: bool = ...,
|
||||
ascending: bool = ...,
|
||||
dropna: bool = ...,
|
||||
) -> Series[int]: ...
|
||||
@overload
|
||||
def value_counts(
|
||||
self: DataFrameGroupBy[ByT, Literal[True]],
|
||||
subset: ListLike | None,
|
||||
normalize: Literal[True],
|
||||
sort: bool = ...,
|
||||
ascending: bool = ...,
|
||||
dropna: bool = ...,
|
||||
) -> Series[float]: ...
|
||||
@overload
|
||||
def value_counts(
|
||||
self: DataFrameGroupBy[ByT, Literal[False]],
|
||||
subset: ListLike | None = ...,
|
||||
normalize: Literal[False] = ...,
|
||||
sort: bool = ...,
|
||||
ascending: bool = ...,
|
||||
dropna: bool = ...,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def value_counts(
|
||||
self: DataFrameGroupBy[ByT, Literal[False]],
|
||||
subset: ListLike | None,
|
||||
normalize: Literal[True],
|
||||
sort: bool = ...,
|
||||
ascending: bool = ...,
|
||||
dropna: bool = ...,
|
||||
) -> DataFrame: ...
|
||||
def take(self, indices: TakeIndexer, **kwargs) -> DataFrame: ...
|
||||
@overload
|
||||
def skew(
|
||||
self,
|
||||
skipna: bool = ...,
|
||||
numeric_only: bool = ...,
|
||||
*,
|
||||
level: Level,
|
||||
**kwargs,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def skew(
|
||||
self,
|
||||
skipna: bool = ...,
|
||||
numeric_only: bool = ...,
|
||||
*,
|
||||
level: None = ...,
|
||||
**kwargs,
|
||||
) -> Series: ...
|
||||
@property
|
||||
def plot(self) -> GroupByPlot[Self]: ...
|
||||
def corr(
|
||||
self,
|
||||
method: str | Callable[[np.ndarray, np.ndarray], float] = ...,
|
||||
min_periods: int = ...,
|
||||
numeric_only: bool = False,
|
||||
) -> DataFrame: ...
|
||||
def cov(
|
||||
self,
|
||||
min_periods: int | None = ...,
|
||||
ddof: int | None = 1,
|
||||
numeric_only: bool = False,
|
||||
) -> DataFrame: ...
|
||||
def hist(
|
||||
self,
|
||||
column: IndexLabel | None = None,
|
||||
by: IndexLabel | None = None,
|
||||
grid: bool = True,
|
||||
xlabelsize: float | str | None = None,
|
||||
xrot: float | None = None,
|
||||
ylabelsize: float | str | None = None,
|
||||
yrot: float | None = None,
|
||||
ax: PlotAxes | None = None,
|
||||
sharex: bool = False,
|
||||
sharey: bool = False,
|
||||
figsize: tuple[float, float] | None = None,
|
||||
layout: tuple[int, int] | None = None,
|
||||
bins: int | Sequence[int] = 10,
|
||||
backend: str | None = None,
|
||||
legend: bool = False,
|
||||
**kwargs,
|
||||
) -> Series: ... # Series[Axes] but this is not allowed
|
||||
@property
|
||||
def dtypes(self) -> Series: ...
|
||||
def __getattr__(self, name: str) -> SeriesGroupBy[Any, ByT]: ...
|
||||
# Overrides that provide more precise return types over the GroupBy class
|
||||
@final # type: ignore[misc]
|
||||
def __iter__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self,
|
||||
) -> Iterator[tuple[ByT, DataFrame]]: ...
|
||||
@overload
|
||||
def size(self: DataFrameGroupBy[ByT, Literal[True]]) -> Series[int]: ...
|
||||
@overload
|
||||
def size(self: DataFrameGroupBy[ByT, Literal[False]]) -> DataFrame: ...
|
||||
@overload
|
||||
def size(self: DataFrameGroupBy[Timestamp, Literal[True]]) -> Series[int]: ...
|
||||
@overload
|
||||
def size(self: DataFrameGroupBy[Timestamp, Literal[False]]) -> DataFrame: ...
|
||||
@ -0,0 +1,393 @@
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Sequence,
|
||||
)
|
||||
import datetime as dt
|
||||
from typing import (
|
||||
Any,
|
||||
Generic,
|
||||
Literal,
|
||||
TypeVar,
|
||||
final,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas.core.base import SelectionMixin
|
||||
from pandas.core.frame import DataFrame
|
||||
from pandas.core.groupby import (
|
||||
generic,
|
||||
)
|
||||
from pandas.core.groupby.indexing import (
|
||||
GroupByIndexingMixin,
|
||||
GroupByNthSelector,
|
||||
)
|
||||
from pandas.core.indexers import BaseIndexer
|
||||
from pandas.core.indexes.api import Index
|
||||
from pandas.core.resample import (
|
||||
DatetimeIndexResamplerGroupby,
|
||||
PeriodIndexResamplerGroupby,
|
||||
TimedeltaIndexResamplerGroupby,
|
||||
)
|
||||
from pandas.core.series import Series
|
||||
from pandas.core.window import (
|
||||
ExpandingGroupby,
|
||||
ExponentialMovingWindowGroupby,
|
||||
RollingGroupby,
|
||||
)
|
||||
from typing_extensions import (
|
||||
Concatenate,
|
||||
Self,
|
||||
TypeAlias,
|
||||
)
|
||||
|
||||
from pandas._libs.lib import _NoDefaultDoNotUse
|
||||
from pandas._libs.tslibs import BaseOffset
|
||||
from pandas._typing import (
|
||||
S1,
|
||||
AnyArrayLike,
|
||||
Axis,
|
||||
AxisInt,
|
||||
CalculationMethod,
|
||||
Dtype,
|
||||
Frequency,
|
||||
IndexLabel,
|
||||
IntervalClosedType,
|
||||
MaskType,
|
||||
NDFrameT,
|
||||
P,
|
||||
RandomState,
|
||||
Scalar,
|
||||
T,
|
||||
TimedeltaConvertibleTypes,
|
||||
TimeGrouperOrigin,
|
||||
TimestampConvention,
|
||||
TimestampConvertibleTypes,
|
||||
WindowingEngine,
|
||||
WindowingEngineKwargs,
|
||||
npt,
|
||||
)
|
||||
|
||||
from pandas.plotting import PlotAccessor
|
||||
|
||||
_ResamplerGroupBy: TypeAlias = (
|
||||
DatetimeIndexResamplerGroupby[NDFrameT] # ty: ignore[invalid-argument-type]
|
||||
| PeriodIndexResamplerGroupby[NDFrameT] # ty: ignore[invalid-argument-type]
|
||||
| TimedeltaIndexResamplerGroupby[NDFrameT] # ty: ignore[invalid-argument-type]
|
||||
)
|
||||
|
||||
class GroupBy(BaseGroupBy[NDFrameT]):
|
||||
def __getattr__(self, attr: str) -> Any: ...
|
||||
def apply(self, func: Callable | str, *args, **kwargs) -> NDFrameT: ...
|
||||
@final
|
||||
@overload
|
||||
def any(self: GroupBy[Series], skipna: bool = ...) -> Series[bool]: ...
|
||||
@overload
|
||||
def any(self: GroupBy[DataFrame], skipna: bool = ...) -> DataFrame: ...
|
||||
@final
|
||||
@overload
|
||||
def all(self: GroupBy[Series], skipna: bool = ...) -> Series[bool]: ...
|
||||
@overload
|
||||
def all(self: GroupBy[DataFrame], skipna: bool = ...) -> DataFrame: ...
|
||||
@final
|
||||
@overload
|
||||
def count(self: GroupBy[Series]) -> Series[int]: ...
|
||||
@overload
|
||||
def count(self: GroupBy[DataFrame]) -> DataFrame: ...
|
||||
@final
|
||||
def mean(
|
||||
self,
|
||||
numeric_only: bool = False,
|
||||
engine: WindowingEngine = None,
|
||||
engine_kwargs: WindowingEngineKwargs = None,
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def median(self, numeric_only: bool = False) -> NDFrameT: ...
|
||||
@final
|
||||
@overload
|
||||
def std(
|
||||
self: GroupBy[Series],
|
||||
ddof: int = ...,
|
||||
engine: WindowingEngine = ...,
|
||||
engine_kwargs: WindowingEngineKwargs = ...,
|
||||
numeric_only: bool = ...,
|
||||
) -> Series[float]: ...
|
||||
@overload
|
||||
def std(
|
||||
self: GroupBy[DataFrame],
|
||||
ddof: int = ...,
|
||||
engine: WindowingEngine = ...,
|
||||
engine_kwargs: WindowingEngineKwargs = ...,
|
||||
numeric_only: bool = ...,
|
||||
) -> DataFrame: ...
|
||||
@final
|
||||
@overload
|
||||
def var(
|
||||
self: GroupBy[Series],
|
||||
ddof: int = ...,
|
||||
engine: WindowingEngine = ...,
|
||||
engine_kwargs: WindowingEngineKwargs = ...,
|
||||
numeric_only: bool = ...,
|
||||
) -> Series[float]: ...
|
||||
@overload
|
||||
def var(
|
||||
self: GroupBy[DataFrame],
|
||||
ddof: int = ...,
|
||||
engine: WindowingEngine = ...,
|
||||
engine_kwargs: WindowingEngineKwargs = ...,
|
||||
numeric_only: bool = ...,
|
||||
) -> DataFrame: ...
|
||||
@final
|
||||
@overload
|
||||
def sem(
|
||||
self: GroupBy[Series], ddof: int = ..., numeric_only: bool = ...
|
||||
) -> Series[float]: ...
|
||||
@overload
|
||||
def sem(
|
||||
self: GroupBy[DataFrame], ddof: int = ..., numeric_only: bool = ...
|
||||
) -> DataFrame: ...
|
||||
def size(self: GroupBy[Series]) -> Series[int]: ...
|
||||
@final
|
||||
def sum(
|
||||
self,
|
||||
numeric_only: bool = False,
|
||||
min_count: int = 0,
|
||||
engine: WindowingEngine = None,
|
||||
engine_kwargs: WindowingEngineKwargs = None,
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def prod(self, numeric_only: bool = False, min_count: int = 0) -> NDFrameT: ...
|
||||
@final
|
||||
def min(
|
||||
self,
|
||||
numeric_only: bool = False,
|
||||
min_count: int = -1,
|
||||
engine: WindowingEngine = None,
|
||||
engine_kwargs: WindowingEngineKwargs = None,
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def max(
|
||||
self,
|
||||
numeric_only: bool = False,
|
||||
min_count: int = -1,
|
||||
engine: WindowingEngine = None,
|
||||
engine_kwargs: WindowingEngineKwargs = None,
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def first(
|
||||
self, numeric_only: bool = False, min_count: int = -1, skipna: bool = True
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def last(
|
||||
self, numeric_only: bool = False, min_count: int = -1, skipna: bool = True
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def ohlc(self) -> DataFrame: ...
|
||||
def describe(
|
||||
self,
|
||||
percentiles: Iterable[float] | None = ...,
|
||||
include: Literal["all"] | list[Dtype] | None = ...,
|
||||
exclude: list[Dtype] | None = ...,
|
||||
) -> DataFrame: ...
|
||||
@final
|
||||
def resample(
|
||||
self,
|
||||
rule: Frequency | dt.timedelta,
|
||||
how: str | None = ...,
|
||||
fill_method: str | None = ...,
|
||||
limit: int | None = ...,
|
||||
kind: str | None = ...,
|
||||
on: Hashable | None = ...,
|
||||
*,
|
||||
closed: Literal["left", "right"] | None = ...,
|
||||
label: Literal["left", "right"] | None = ...,
|
||||
axis: Axis = ...,
|
||||
convention: TimestampConvention | None = ...,
|
||||
origin: TimeGrouperOrigin | TimestampConvertibleTypes = ...,
|
||||
offset: TimedeltaConvertibleTypes | None = ...,
|
||||
group_keys: bool = ...,
|
||||
**kwargs,
|
||||
) -> _ResamplerGroupBy[NDFrameT]: ...
|
||||
@final
|
||||
def rolling(
|
||||
self,
|
||||
window: int | dt.timedelta | str | BaseOffset | BaseIndexer | None = ...,
|
||||
min_periods: int | None = None,
|
||||
center: bool | None = False,
|
||||
win_type: str | None = None,
|
||||
axis: Axis = 0,
|
||||
on: str | Index | None = None,
|
||||
closed: IntervalClosedType | None = None,
|
||||
method: CalculationMethod = "single",
|
||||
*,
|
||||
selection: IndexLabel | None = None,
|
||||
) -> RollingGroupby[NDFrameT]: ...
|
||||
@final
|
||||
def expanding(
|
||||
self,
|
||||
min_periods: int = ...,
|
||||
axis: Axis = ...,
|
||||
method: CalculationMethod = ...,
|
||||
selection: IndexLabel | None = ...,
|
||||
) -> ExpandingGroupby[NDFrameT]: ...
|
||||
@final
|
||||
def ewm(
|
||||
self,
|
||||
com: float | None = ...,
|
||||
span: float | None = ...,
|
||||
halflife: TimedeltaConvertibleTypes | None = ...,
|
||||
alpha: float | None = ...,
|
||||
min_periods: int | None = ...,
|
||||
adjust: bool = ...,
|
||||
ignore_na: bool = ...,
|
||||
axis: Axis = ...,
|
||||
times: str | np.ndarray | Series | np.timedelta64 | None = ...,
|
||||
method: CalculationMethod = ...,
|
||||
*,
|
||||
selection: IndexLabel | None = ...,
|
||||
) -> ExponentialMovingWindowGroupby[NDFrameT]: ...
|
||||
@final
|
||||
def ffill(self, limit: int | None = ...) -> NDFrameT: ...
|
||||
@final
|
||||
def bfill(self, limit: int | None = ...) -> NDFrameT: ...
|
||||
@final
|
||||
@property
|
||||
def nth(self) -> GroupByNthSelector[Self]: ...
|
||||
@final
|
||||
def quantile(
|
||||
self,
|
||||
q: float | AnyArrayLike = 0.5,
|
||||
interpolation: str = "linear",
|
||||
numeric_only: bool = False,
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def ngroup(self, ascending: bool = True) -> Series[int]: ...
|
||||
@final
|
||||
def cumcount(self, ascending: bool = True) -> Series[int]: ...
|
||||
@final
|
||||
def rank(
|
||||
self,
|
||||
method: str = "average",
|
||||
ascending: bool = True,
|
||||
na_option: str = "keep",
|
||||
pct: bool = False,
|
||||
axis: AxisInt | _NoDefaultDoNotUse = 0,
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def cumprod(
|
||||
self, axis: Axis | _NoDefaultDoNotUse = ..., *args, **kwargs
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def cumsum(
|
||||
self, axis: Axis | _NoDefaultDoNotUse = ..., *args, **kwargs
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def cummin(
|
||||
self,
|
||||
axis: AxisInt | _NoDefaultDoNotUse = ...,
|
||||
numeric_only: bool = ...,
|
||||
**kwargs,
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def cummax(
|
||||
self,
|
||||
axis: AxisInt | _NoDefaultDoNotUse = ...,
|
||||
numeric_only: bool = ...,
|
||||
**kwargs,
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def shift(
|
||||
self,
|
||||
periods: int | Sequence[int] = 1,
|
||||
freq: Frequency | None = ...,
|
||||
axis: Axis | _NoDefaultDoNotUse = 0,
|
||||
fill_value=...,
|
||||
suffix: str | None = ...,
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def diff(
|
||||
self, periods: int = 1, axis: AxisInt | _NoDefaultDoNotUse = 0
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def pct_change(
|
||||
self,
|
||||
periods: int = ...,
|
||||
fill_method: Literal["bfill", "ffill"] | None | _NoDefaultDoNotUse = ...,
|
||||
limit: int | None | _NoDefaultDoNotUse = ...,
|
||||
freq=...,
|
||||
axis: Axis | _NoDefaultDoNotUse = ...,
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def head(self, n: int = ...) -> NDFrameT: ...
|
||||
@final
|
||||
def tail(self, n: int = ...) -> NDFrameT: ...
|
||||
@final
|
||||
def sample(
|
||||
self,
|
||||
n: int | None = None,
|
||||
frac: float | None = None,
|
||||
replace: bool = False,
|
||||
weights: Sequence | Series | None = ...,
|
||||
random_state: RandomState | None = ...,
|
||||
) -> NDFrameT: ...
|
||||
|
||||
_GroupByT = TypeVar("_GroupByT", bound=GroupBy)
|
||||
|
||||
# GroupByPlot does not really inherit from PlotAccessor but it delegates
|
||||
# to it using __call__ and __getattr__. We lie here to avoid repeating the
|
||||
# whole stub of PlotAccessor
|
||||
@final
|
||||
class GroupByPlot(PlotAccessor, Generic[_GroupByT]):
|
||||
def __init__(self, groupby: _GroupByT) -> None: ...
|
||||
# The following methods are inherited from the fake parent class PlotAccessor
|
||||
# def __call__(self, *args, **kwargs): ...
|
||||
# def __getattr__(self, name: str): ...
|
||||
|
||||
class BaseGroupBy(SelectionMixin[NDFrameT], GroupByIndexingMixin):
|
||||
@final
|
||||
def __len__(self) -> int: ...
|
||||
@final
|
||||
def __repr__(self) -> str: ... # noqa: PYI029 __repr__ here is final
|
||||
@final
|
||||
@property
|
||||
def groups(self) -> dict[Hashable, Index]: ...
|
||||
@final
|
||||
@property
|
||||
def ngroups(self) -> int: ...
|
||||
@final
|
||||
@property
|
||||
def indices(self) -> dict[Hashable, Index | npt.NDArray[np.int_] | list[int]]: ...
|
||||
@overload
|
||||
def pipe(
|
||||
self,
|
||||
func: Callable[Concatenate[Self, P], T],
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> T: ...
|
||||
@overload
|
||||
def pipe(
|
||||
self,
|
||||
func: tuple[Callable[..., T], str],
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> T: ...
|
||||
@final
|
||||
def get_group(self, name) -> NDFrameT: ...
|
||||
@final
|
||||
def __iter__(self) -> Iterator[tuple[Hashable, NDFrameT]]: ...
|
||||
@overload
|
||||
def __getitem__(self: BaseGroupBy[DataFrame], key: Scalar) -> generic.SeriesGroupBy: ... # type: ignore[overload-overlap] # pyright: ignore[reportOverlappingOverload]
|
||||
@overload
|
||||
def __getitem__(
|
||||
self: BaseGroupBy[DataFrame], key: Iterable[Hashable]
|
||||
) -> generic.DataFrameGroupBy: ...
|
||||
@overload
|
||||
def __getitem__(
|
||||
self: BaseGroupBy[Series[S1]],
|
||||
idx: list[str] | Index | Series[S1] | MaskType | tuple[Hashable | slice, ...],
|
||||
) -> generic.SeriesGroupBy: ...
|
||||
@overload
|
||||
def __getitem__(self: BaseGroupBy[Series[S1]], idx: Scalar) -> S1: ...
|
||||
@ -0,0 +1,74 @@
|
||||
from collections.abc import (
|
||||
Hashable,
|
||||
Iterator,
|
||||
)
|
||||
from typing import (
|
||||
final,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
DataFrame,
|
||||
Index,
|
||||
Series,
|
||||
)
|
||||
from pandas.core.resample import TimeGrouper
|
||||
from typing_extensions import Self
|
||||
|
||||
from pandas._libs.lib import _NoDefaultDoNotUse
|
||||
from pandas._typing import (
|
||||
ArrayLike,
|
||||
Axis,
|
||||
Frequency,
|
||||
Incomplete,
|
||||
KeysArgType,
|
||||
Level,
|
||||
ListLikeHashable,
|
||||
npt,
|
||||
)
|
||||
from pandas.util._decorators import cache_readonly
|
||||
|
||||
class Grouper:
|
||||
key: KeysArgType | None
|
||||
level: Level | ListLikeHashable[Level] | None
|
||||
freq: Frequency | None
|
||||
axis: Axis
|
||||
sort: bool
|
||||
dropna: bool
|
||||
binner: Incomplete
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
key: KeysArgType | None = ...,
|
||||
level: Level | ListLikeHashable[Level] | None = ...,
|
||||
axis: Axis | _NoDefaultDoNotUse = ...,
|
||||
sort: bool = ...,
|
||||
dropna: bool = ...,
|
||||
) -> Self: ...
|
||||
@overload
|
||||
def __new__(cls, *args, freq: Frequency, **kwargs) -> TimeGrouper: ...
|
||||
@final
|
||||
def __repr__(self) -> str: ... # noqa: PYI029 __repr__ here is final
|
||||
|
||||
@final
|
||||
class Grouping:
|
||||
level: Level | None
|
||||
obj: DataFrame | Series | None
|
||||
in_axis: bool
|
||||
grouping_vector: Incomplete
|
||||
def __iter__(self) -> Iterator[Hashable]: ...
|
||||
@cache_readonly
|
||||
def name(self) -> Hashable: ...
|
||||
@cache_readonly
|
||||
def ngroups(self) -> int: ...
|
||||
@cache_readonly
|
||||
def indices(self) -> dict[Hashable, npt.NDArray[np.intp]]: ...
|
||||
@property
|
||||
def codes(self) -> npt.NDArray[np.signedinteger]: ...
|
||||
@cache_readonly
|
||||
def group_arraylike(self) -> ArrayLike: ...
|
||||
@cache_readonly
|
||||
def result_index(self) -> Index: ...
|
||||
@cache_readonly
|
||||
def group_index(self) -> Index: ...
|
||||
@ -0,0 +1,32 @@
|
||||
from typing import (
|
||||
Any,
|
||||
Generic,
|
||||
Literal,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
from pandas import (
|
||||
DataFrame,
|
||||
Series,
|
||||
)
|
||||
from pandas.core.groupby import groupby
|
||||
|
||||
from pandas._typing import PositionalIndexer
|
||||
|
||||
_GroupByT = TypeVar("_GroupByT", bound=groupby.GroupBy[Any])
|
||||
|
||||
class GroupByIndexingMixin: ...
|
||||
|
||||
class GroupByPositionalSelector:
|
||||
groupby_object: groupby.GroupBy
|
||||
def __getitem__(self, arg: PositionalIndexer | tuple) -> DataFrame | Series: ...
|
||||
|
||||
class GroupByNthSelector(Generic[_GroupByT]):
|
||||
groupby_object: _GroupByT
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
n: PositionalIndexer | tuple,
|
||||
dropna: Literal["any", "all", None] = ...,
|
||||
) -> DataFrame | Series: ...
|
||||
def __getitem__(self, n: PositionalIndexer | tuple) -> DataFrame | Series: ...
|
||||
103
lib/python3.11/site-packages/pandas-stubs/core/groupby/ops.pyi
Normal file
103
lib/python3.11/site-packages/pandas-stubs/core/groupby/ops.pyi
Normal file
@ -0,0 +1,103 @@
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Hashable,
|
||||
Iterator,
|
||||
)
|
||||
from typing import (
|
||||
Generic,
|
||||
final,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
Index,
|
||||
Series,
|
||||
)
|
||||
from pandas.core.groupby import grouper
|
||||
|
||||
from pandas._typing import (
|
||||
ArrayLike,
|
||||
AxisInt,
|
||||
Incomplete,
|
||||
NDFrameT,
|
||||
Shape,
|
||||
T,
|
||||
npt,
|
||||
)
|
||||
from pandas.util._decorators import cache_readonly
|
||||
|
||||
class BaseGrouper:
|
||||
axis: Index
|
||||
dropna: bool
|
||||
@property
|
||||
def groupings(self) -> list[grouper.Grouping]: ...
|
||||
@property
|
||||
def shape(self) -> Shape: ...
|
||||
def __iter__(self) -> Iterator: ...
|
||||
@property
|
||||
def nkeys(self) -> int: ...
|
||||
def get_iterator(
|
||||
self, data: NDFrameT, axis: AxisInt = ...
|
||||
) -> Iterator[tuple[Hashable, NDFrameT]]: ...
|
||||
@final
|
||||
@cache_readonly
|
||||
def group_keys_seq(self): ...
|
||||
@cache_readonly
|
||||
def indices(self) -> dict[Hashable, npt.NDArray[np.intp]]: ...
|
||||
@final
|
||||
def result_ilocs(self) -> npt.NDArray[np.intp]: ...
|
||||
@final
|
||||
@property
|
||||
def codes(self) -> list[npt.NDArray[np.signedinteger]]: ...
|
||||
@property
|
||||
def levels(self) -> list[Index]: ...
|
||||
@property
|
||||
def names(self) -> list: ...
|
||||
@final
|
||||
def size(self) -> Series: ...
|
||||
@cache_readonly
|
||||
def groups(self) -> dict[Hashable, np.ndarray]: ...
|
||||
@final
|
||||
@cache_readonly
|
||||
def is_monotonic(self) -> bool: ...
|
||||
@final
|
||||
@cache_readonly
|
||||
def has_dropped_na(self) -> bool: ...
|
||||
@cache_readonly
|
||||
def group_info(self) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp], int]: ...
|
||||
@cache_readonly
|
||||
def codes_info(self) -> npt.NDArray[np.intp]: ...
|
||||
@final
|
||||
@cache_readonly
|
||||
def ngroups(self) -> int: ...
|
||||
@property
|
||||
def reconstructed_codes(self) -> list[npt.NDArray[np.intp]]: ...
|
||||
@cache_readonly
|
||||
def result_index(self) -> Index: ...
|
||||
@final
|
||||
def get_group_levels(self) -> list[ArrayLike]: ...
|
||||
@final
|
||||
def agg_series(
|
||||
self,
|
||||
obj: Series,
|
||||
func: Callable[[Series], object],
|
||||
preserve_dtype: bool = ...,
|
||||
) -> ArrayLike: ...
|
||||
@final
|
||||
def apply_groupwise(
|
||||
self, f: Callable[[NDFrameT], T], data: NDFrameT, axis: AxisInt = ...
|
||||
) -> tuple[list[T], bool]: ...
|
||||
|
||||
class BinGrouper(BaseGrouper):
|
||||
bins: npt.NDArray[np.int64]
|
||||
binlabels: Index
|
||||
indexer: npt.NDArray[np.intp]
|
||||
@cache_readonly
|
||||
def indices(self) -> dict[Incomplete, list[int]]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
|
||||
class DataSplitter(Generic[NDFrameT]):
|
||||
data: NDFrameT
|
||||
labels: npt.NDArray[np.intp]
|
||||
ngroups: int
|
||||
axis: AxisInt
|
||||
def __iter__(self) -> Iterator[NDFrameT]: ...
|
||||
44
lib/python3.11/site-packages/pandas-stubs/core/indexers.pyi
Normal file
44
lib/python3.11/site-packages/pandas-stubs/core/indexers.pyi
Normal file
@ -0,0 +1,44 @@
|
||||
import numpy as np
|
||||
|
||||
def check_array_indexer(arrayArrayLike, indexer): ...
|
||||
|
||||
class BaseIndexer:
|
||||
def __init__(
|
||||
self,
|
||||
index_array: np.ndarray | None = ...,
|
||||
window_size: int = ...,
|
||||
**kwargs,
|
||||
) -> None: ...
|
||||
def get_window_bounds(
|
||||
self,
|
||||
num_values: int = ...,
|
||||
min_periods: int | None = ...,
|
||||
center: bool | None = ...,
|
||||
closed: str | None = ...,
|
||||
) -> tuple[np.ndarray, np.ndarray]: ...
|
||||
|
||||
class VariableOffsetWindowIndexer(BaseIndexer):
|
||||
def __init__(
|
||||
self,
|
||||
index_array: np.ndarray | None = ...,
|
||||
window_size: int = ...,
|
||||
index=...,
|
||||
offset=...,
|
||||
**kwargs,
|
||||
) -> None: ...
|
||||
def get_window_bounds(
|
||||
self,
|
||||
num_values: int = ...,
|
||||
min_periods: int | None = ...,
|
||||
center: bool | None = ...,
|
||||
closed: str | None = ...,
|
||||
) -> tuple[np.ndarray, np.ndarray]: ...
|
||||
|
||||
class FixedForwardWindowIndexer(BaseIndexer):
|
||||
def get_window_bounds(
|
||||
self,
|
||||
num_values: int = ...,
|
||||
min_periods: int | None = ...,
|
||||
center: bool | None = ...,
|
||||
closed: str | None = ...,
|
||||
) -> tuple[np.ndarray, np.ndarray]: ...
|
||||
@ -0,0 +1,428 @@
|
||||
import datetime as dt
|
||||
from datetime import (
|
||||
timedelta,
|
||||
tzinfo as _tzinfo,
|
||||
)
|
||||
from typing import (
|
||||
Generic,
|
||||
Literal,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
DatetimeIndex,
|
||||
Index,
|
||||
PeriodIndex,
|
||||
Timedelta,
|
||||
TimedeltaIndex,
|
||||
)
|
||||
from pandas.core.accessor import PandasDelegate
|
||||
from pandas.core.arrays import (
|
||||
DatetimeArray,
|
||||
PeriodArray,
|
||||
)
|
||||
from pandas.core.base import NoNewAttributesMixin
|
||||
from pandas.core.frame import DataFrame
|
||||
from pandas.core.series import (
|
||||
PeriodSeries,
|
||||
Series,
|
||||
TimedeltaSeries,
|
||||
TimestampSeries,
|
||||
)
|
||||
|
||||
from pandas._libs.tslibs import BaseOffset
|
||||
from pandas._libs.tslibs.offsets import DateOffset
|
||||
from pandas._typing import (
|
||||
TimeAmbiguous,
|
||||
TimeNonexistent,
|
||||
TimestampConvention,
|
||||
TimeUnit,
|
||||
TimeZones,
|
||||
np_1darray,
|
||||
np_ndarray_bool,
|
||||
)
|
||||
|
||||
class Properties(PandasDelegate, NoNewAttributesMixin): ...
|
||||
|
||||
_DTFieldOpsReturnType = TypeVar("_DTFieldOpsReturnType", bound=Series[int] | Index[int])
|
||||
|
||||
class _DayLikeFieldOps(Generic[_DTFieldOpsReturnType]):
|
||||
@property
|
||||
def year(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def month(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def day(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def hour(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def minute(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def second(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def weekday(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def dayofweek(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def day_of_week(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def dayofyear(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def day_of_year(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def quarter(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def days_in_month(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def daysinmonth(self) -> _DTFieldOpsReturnType: ...
|
||||
|
||||
class _MiniSeconds(Generic[_DTFieldOpsReturnType]):
|
||||
@property
|
||||
def microsecond(self) -> _DTFieldOpsReturnType: ...
|
||||
@property
|
||||
def nanosecond(self) -> _DTFieldOpsReturnType: ...
|
||||
|
||||
class _DatetimeFieldOps(
|
||||
_DayLikeFieldOps[_DTFieldOpsReturnType], _MiniSeconds[_DTFieldOpsReturnType]
|
||||
): ...
|
||||
|
||||
_DTBoolOpsReturnType = TypeVar(
|
||||
"_DTBoolOpsReturnType", bound=Series[bool] | np_1darray[np.bool]
|
||||
)
|
||||
|
||||
class _IsLeapYearProperty(Generic[_DTBoolOpsReturnType]):
|
||||
@property
|
||||
def is_leap_year(self) -> _DTBoolOpsReturnType: ...
|
||||
|
||||
class _DatetimeBoolOps(
|
||||
_IsLeapYearProperty[_DTBoolOpsReturnType], Generic[_DTBoolOpsReturnType]
|
||||
):
|
||||
@property
|
||||
def is_month_start(self) -> _DTBoolOpsReturnType: ...
|
||||
@property
|
||||
def is_month_end(self) -> _DTBoolOpsReturnType: ...
|
||||
@property
|
||||
def is_quarter_start(self) -> _DTBoolOpsReturnType: ...
|
||||
@property
|
||||
def is_quarter_end(self) -> _DTBoolOpsReturnType: ...
|
||||
@property
|
||||
def is_year_start(self) -> _DTBoolOpsReturnType: ...
|
||||
@property
|
||||
def is_year_end(self) -> _DTBoolOpsReturnType: ...
|
||||
|
||||
_DTFreqReturnType = TypeVar("_DTFreqReturnType", bound=str | BaseOffset)
|
||||
|
||||
class _FreqProperty(Generic[_DTFreqReturnType]):
|
||||
@property
|
||||
def freq(self) -> _DTFreqReturnType | None: ...
|
||||
|
||||
class _TZProperty:
|
||||
@property
|
||||
def tz(self) -> _tzinfo | None: ...
|
||||
|
||||
class _DatetimeObjectOps(
|
||||
_FreqProperty[_DTFreqReturnType], _TZProperty, Generic[_DTFreqReturnType]
|
||||
): ...
|
||||
|
||||
_DTOtherOpsDateReturnType = TypeVar(
|
||||
"_DTOtherOpsDateReturnType", bound=Series[dt.date] | np_1darray[np.object_]
|
||||
)
|
||||
_DTOtherOpsTimeReturnType = TypeVar(
|
||||
"_DTOtherOpsTimeReturnType", bound=Series[dt.time] | np_1darray[np.object_]
|
||||
)
|
||||
|
||||
class _DatetimeOtherOps(Generic[_DTOtherOpsDateReturnType, _DTOtherOpsTimeReturnType]):
|
||||
@property
|
||||
def date(self) -> _DTOtherOpsDateReturnType: ...
|
||||
@property
|
||||
def time(self) -> _DTOtherOpsTimeReturnType: ...
|
||||
@property
|
||||
def timetz(self) -> _DTOtherOpsTimeReturnType: ...
|
||||
|
||||
class _DatetimeLikeOps(
|
||||
_DatetimeFieldOps[_DTFieldOpsReturnType],
|
||||
_DatetimeObjectOps[_DTFreqReturnType],
|
||||
_DatetimeBoolOps[_DTBoolOpsReturnType],
|
||||
_DatetimeOtherOps[_DTOtherOpsDateReturnType, _DTOtherOpsTimeReturnType],
|
||||
Generic[
|
||||
_DTFieldOpsReturnType,
|
||||
_DTBoolOpsReturnType,
|
||||
_DTOtherOpsDateReturnType,
|
||||
_DTOtherOpsTimeReturnType,
|
||||
_DTFreqReturnType,
|
||||
],
|
||||
): ...
|
||||
|
||||
# Ideally, the rounding methods would return TimestampSeries when `Series.dt.method`
|
||||
# is invoked, but because of how Series.dt is hooked in and that we may not know the
|
||||
# type of the series, we don't know which kind of series was ...ed
|
||||
# in to the dt accessor
|
||||
|
||||
_DTTimestampTimedeltaReturnType = TypeVar(
|
||||
"_DTTimestampTimedeltaReturnType",
|
||||
bound=Series | TimestampSeries | TimedeltaSeries | DatetimeIndex | TimedeltaIndex,
|
||||
)
|
||||
|
||||
class _DatetimeRoundingMethods(Generic[_DTTimestampTimedeltaReturnType]):
|
||||
def round(
|
||||
self,
|
||||
freq: str | BaseOffset | None,
|
||||
ambiguous: Literal["raise", "infer", "NaT"] | bool | np_ndarray_bool = ...,
|
||||
nonexistent: (
|
||||
Literal["shift_forward", "shift_backward", "NaT", "raise"]
|
||||
| timedelta
|
||||
| Timedelta
|
||||
) = ...,
|
||||
) -> _DTTimestampTimedeltaReturnType: ...
|
||||
def floor(
|
||||
self,
|
||||
freq: str | BaseOffset | None,
|
||||
ambiguous: Literal["raise", "infer", "NaT"] | bool | np_ndarray_bool = ...,
|
||||
nonexistent: (
|
||||
Literal["shift_forward", "shift_backward", "NaT", "raise"]
|
||||
| timedelta
|
||||
| Timedelta
|
||||
) = ...,
|
||||
) -> _DTTimestampTimedeltaReturnType: ...
|
||||
def ceil(
|
||||
self,
|
||||
freq: str | BaseOffset | None,
|
||||
ambiguous: Literal["raise", "infer", "NaT"] | bool | np_ndarray_bool = ...,
|
||||
nonexistent: (
|
||||
Literal["shift_forward", "shift_backward", "NaT", "raise"]
|
||||
| timedelta
|
||||
| Timedelta
|
||||
) = ...,
|
||||
) -> _DTTimestampTimedeltaReturnType: ...
|
||||
|
||||
_DTNormalizeReturnType = TypeVar(
|
||||
"_DTNormalizeReturnType", TimestampSeries, DatetimeIndex
|
||||
)
|
||||
_DTStrKindReturnType = TypeVar("_DTStrKindReturnType", bound=Series[str] | Index)
|
||||
_DTToPeriodReturnType = TypeVar(
|
||||
"_DTToPeriodReturnType", bound=PeriodSeries | PeriodIndex
|
||||
)
|
||||
|
||||
class _DatetimeLikeNoTZMethods(
|
||||
_DatetimeRoundingMethods[_DTTimestampTimedeltaReturnType],
|
||||
Generic[
|
||||
_DTTimestampTimedeltaReturnType,
|
||||
_DTNormalizeReturnType,
|
||||
_DTStrKindReturnType,
|
||||
_DTToPeriodReturnType,
|
||||
],
|
||||
):
|
||||
def to_period(
|
||||
self, freq: str | BaseOffset | None = ...
|
||||
) -> _DTToPeriodReturnType: ...
|
||||
def tz_localize(
|
||||
self,
|
||||
tz: TimeZones,
|
||||
ambiguous: TimeAmbiguous = ...,
|
||||
nonexistent: TimeNonexistent = ...,
|
||||
) -> _DTNormalizeReturnType: ...
|
||||
def tz_convert(self, tz: TimeZones) -> _DTNormalizeReturnType: ...
|
||||
def normalize(self) -> _DTNormalizeReturnType: ...
|
||||
def strftime(self, date_format: str) -> _DTStrKindReturnType: ...
|
||||
def month_name(self, locale: str | None = ...) -> _DTStrKindReturnType: ...
|
||||
def day_name(self, locale: str | None = ...) -> _DTStrKindReturnType: ...
|
||||
|
||||
class _DatetimeNoTZProperties(
|
||||
_DatetimeLikeOps[
|
||||
_DTFieldOpsReturnType,
|
||||
_DTBoolOpsReturnType,
|
||||
_DTOtherOpsDateReturnType,
|
||||
_DTOtherOpsTimeReturnType,
|
||||
_DTFreqReturnType,
|
||||
],
|
||||
_DatetimeLikeNoTZMethods[
|
||||
_DTTimestampTimedeltaReturnType,
|
||||
_DTNormalizeReturnType,
|
||||
_DTStrKindReturnType,
|
||||
_DTToPeriodReturnType,
|
||||
],
|
||||
Generic[
|
||||
_DTFieldOpsReturnType,
|
||||
_DTBoolOpsReturnType,
|
||||
_DTTimestampTimedeltaReturnType,
|
||||
_DTOtherOpsDateReturnType,
|
||||
_DTOtherOpsTimeReturnType,
|
||||
_DTFreqReturnType,
|
||||
_DTNormalizeReturnType,
|
||||
_DTStrKindReturnType,
|
||||
_DTToPeriodReturnType,
|
||||
],
|
||||
): ...
|
||||
|
||||
class DatetimeProperties(
|
||||
Properties,
|
||||
_DatetimeNoTZProperties[
|
||||
_DTFieldOpsReturnType,
|
||||
_DTBoolOpsReturnType,
|
||||
_DTTimestampTimedeltaReturnType,
|
||||
_DTOtherOpsDateReturnType,
|
||||
_DTOtherOpsTimeReturnType,
|
||||
_DTFreqReturnType,
|
||||
_DTNormalizeReturnType,
|
||||
_DTStrKindReturnType,
|
||||
_DTToPeriodReturnType,
|
||||
],
|
||||
Generic[
|
||||
_DTFieldOpsReturnType,
|
||||
_DTBoolOpsReturnType,
|
||||
_DTTimestampTimedeltaReturnType,
|
||||
_DTOtherOpsDateReturnType,
|
||||
_DTOtherOpsTimeReturnType,
|
||||
_DTFreqReturnType,
|
||||
_DTNormalizeReturnType,
|
||||
_DTStrKindReturnType,
|
||||
_DTToPeriodReturnType,
|
||||
],
|
||||
):
|
||||
def to_pydatetime(self) -> np_1darray[np.object_]: ...
|
||||
def isocalendar(self) -> DataFrame: ...
|
||||
@property
|
||||
def unit(self) -> TimeUnit: ...
|
||||
def as_unit(self, unit: TimeUnit) -> _DTTimestampTimedeltaReturnType: ...
|
||||
|
||||
_TDNoRoundingMethodReturnType = TypeVar(
|
||||
"_TDNoRoundingMethodReturnType", bound=Series[int] | Index
|
||||
)
|
||||
_TDTotalSecondsReturnType = TypeVar(
|
||||
"_TDTotalSecondsReturnType", bound=Series[float] | Index
|
||||
)
|
||||
|
||||
class _TimedeltaPropertiesNoRounding(
|
||||
Generic[_TDNoRoundingMethodReturnType, _TDTotalSecondsReturnType]
|
||||
):
|
||||
def to_pytimedelta(self) -> np_1darray[np.object_]: ...
|
||||
@property
|
||||
def components(self) -> DataFrame: ...
|
||||
@property
|
||||
def days(self) -> _TDNoRoundingMethodReturnType: ...
|
||||
@property
|
||||
def seconds(self) -> _TDNoRoundingMethodReturnType: ...
|
||||
@property
|
||||
def microseconds(self) -> _TDNoRoundingMethodReturnType: ...
|
||||
@property
|
||||
def nanoseconds(self) -> _TDNoRoundingMethodReturnType: ...
|
||||
def total_seconds(self) -> _TDTotalSecondsReturnType: ...
|
||||
|
||||
class TimedeltaProperties(
|
||||
Properties,
|
||||
_TimedeltaPropertiesNoRounding[Series[int], Series[float]],
|
||||
_DatetimeRoundingMethods[TimedeltaSeries],
|
||||
):
|
||||
@property
|
||||
def unit(self) -> TimeUnit: ...
|
||||
def as_unit(self, unit: TimeUnit) -> TimedeltaSeries: ...
|
||||
|
||||
_PeriodDTReturnTypes = TypeVar(
|
||||
"_PeriodDTReturnTypes", bound=TimestampSeries | DatetimeIndex
|
||||
)
|
||||
_PeriodIntReturnTypes = TypeVar("_PeriodIntReturnTypes", bound=Series[int] | Index[int])
|
||||
_PeriodStrReturnTypes = TypeVar("_PeriodStrReturnTypes", bound=Series[str] | Index)
|
||||
_PeriodDTAReturnTypes = TypeVar(
|
||||
"_PeriodDTAReturnTypes", bound=DatetimeArray | DatetimeIndex
|
||||
)
|
||||
_PeriodPAReturnTypes = TypeVar("_PeriodPAReturnTypes", bound=PeriodArray | PeriodIndex)
|
||||
|
||||
class _PeriodProperties(
|
||||
Generic[
|
||||
_PeriodDTReturnTypes,
|
||||
_PeriodIntReturnTypes,
|
||||
_PeriodStrReturnTypes,
|
||||
_PeriodDTAReturnTypes,
|
||||
_PeriodPAReturnTypes,
|
||||
]
|
||||
):
|
||||
@property
|
||||
def start_time(self) -> _PeriodDTReturnTypes: ...
|
||||
@property
|
||||
def end_time(self) -> _PeriodDTReturnTypes: ...
|
||||
@property
|
||||
def qyear(self) -> _PeriodIntReturnTypes: ...
|
||||
def strftime(self, date_format: str) -> _PeriodStrReturnTypes: ...
|
||||
def to_timestamp(
|
||||
self,
|
||||
freq: str | DateOffset | None = ...,
|
||||
how: TimestampConvention = ...,
|
||||
) -> _PeriodDTAReturnTypes: ...
|
||||
def asfreq(
|
||||
self,
|
||||
freq: str | DateOffset | None = ...,
|
||||
how: Literal["E", "END", "FINISH", "S", "START", "BEGIN"] = ...,
|
||||
) -> _PeriodPAReturnTypes: ...
|
||||
|
||||
class PeriodIndexFieldOps(
|
||||
_DayLikeFieldOps[Index[int]],
|
||||
_PeriodProperties[DatetimeIndex, Index[int], Index, DatetimeIndex, PeriodIndex],
|
||||
): ...
|
||||
class PeriodProperties(
|
||||
Properties,
|
||||
_PeriodProperties[
|
||||
TimestampSeries, Series[int], Series[str], DatetimeArray, PeriodArray
|
||||
],
|
||||
_DatetimeFieldOps[Series[int]],
|
||||
_IsLeapYearProperty,
|
||||
_FreqProperty[BaseOffset],
|
||||
): ...
|
||||
class CombinedDatetimelikeProperties(
|
||||
DatetimeProperties[
|
||||
Series[int],
|
||||
Series[bool],
|
||||
Series,
|
||||
Series[dt.date],
|
||||
Series[dt.time],
|
||||
str,
|
||||
TimestampSeries,
|
||||
Series[str],
|
||||
PeriodSeries,
|
||||
],
|
||||
_TimedeltaPropertiesNoRounding[Series[int], Series[float]],
|
||||
_PeriodProperties,
|
||||
): ...
|
||||
class TimestampProperties(
|
||||
DatetimeProperties[
|
||||
Series[int],
|
||||
Series[bool],
|
||||
TimestampSeries,
|
||||
Series[dt.date],
|
||||
Series[dt.time],
|
||||
str,
|
||||
TimestampSeries,
|
||||
Series[str],
|
||||
PeriodSeries,
|
||||
]
|
||||
): ...
|
||||
|
||||
class DatetimeIndexProperties(
|
||||
Properties,
|
||||
_DatetimeNoTZProperties[
|
||||
Index[int],
|
||||
np_1darray[np.bool],
|
||||
DatetimeIndex,
|
||||
np_1darray[np.object_],
|
||||
np_1darray[np.object_],
|
||||
BaseOffset,
|
||||
DatetimeIndex,
|
||||
Index,
|
||||
PeriodIndex,
|
||||
],
|
||||
_TZProperty,
|
||||
):
|
||||
@property
|
||||
def is_normalized(self) -> bool: ...
|
||||
@property
|
||||
def tzinfo(self) -> _tzinfo | None: ...
|
||||
def to_pydatetime(self) -> np_1darray[np.object_]: ...
|
||||
def std(
|
||||
self, axis: int | None = ..., ddof: int = ..., skipna: bool = ...
|
||||
) -> Timedelta: ...
|
||||
|
||||
class TimedeltaIndexProperties(
|
||||
Properties,
|
||||
_TimedeltaPropertiesNoRounding[Index, Index],
|
||||
_DatetimeRoundingMethods[TimedeltaIndex],
|
||||
): ...
|
||||
@ -0,0 +1,8 @@
|
||||
from pandas.core.indexes.base import Index as Index
|
||||
from pandas.core.indexes.category import CategoricalIndex as CategoricalIndex
|
||||
from pandas.core.indexes.datetimes import DatetimeIndex as DatetimeIndex
|
||||
from pandas.core.indexes.interval import IntervalIndex as IntervalIndex
|
||||
from pandas.core.indexes.multi import MultiIndex as MultiIndex
|
||||
from pandas.core.indexes.period import PeriodIndex as PeriodIndex
|
||||
from pandas.core.indexes.range import RangeIndex as RangeIndex
|
||||
from pandas.core.indexes.timedeltas import TimedeltaIndex as TimedeltaIndex
|
||||
535
lib/python3.11/site-packages/pandas-stubs/core/indexes/base.pyi
Normal file
535
lib/python3.11/site-packages/pandas-stubs/core/indexes/base.pyi
Normal file
@ -0,0 +1,535 @@
|
||||
from builtins import str as _str
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Sequence,
|
||||
)
|
||||
from datetime import (
|
||||
datetime,
|
||||
timedelta,
|
||||
)
|
||||
from typing import (
|
||||
Any,
|
||||
ClassVar,
|
||||
Generic,
|
||||
Literal,
|
||||
final,
|
||||
overload,
|
||||
type_check_only,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
DataFrame,
|
||||
DatetimeIndex,
|
||||
Interval,
|
||||
IntervalIndex,
|
||||
MultiIndex,
|
||||
Period,
|
||||
PeriodDtype,
|
||||
PeriodIndex,
|
||||
Series,
|
||||
TimedeltaIndex,
|
||||
)
|
||||
from pandas.core.arrays import ExtensionArray
|
||||
from pandas.core.base import IndexOpsMixin
|
||||
from pandas.core.strings.accessor import StringMethods
|
||||
from typing_extensions import (
|
||||
Never,
|
||||
Self,
|
||||
)
|
||||
|
||||
from pandas._libs.interval import _OrderableT
|
||||
from pandas._typing import (
|
||||
C2,
|
||||
S1,
|
||||
AnyAll,
|
||||
ArrayLike,
|
||||
AxesData,
|
||||
DropKeep,
|
||||
Dtype,
|
||||
DtypeArg,
|
||||
DTypeLike,
|
||||
DtypeObj,
|
||||
GenericT,
|
||||
GenericT_co,
|
||||
HashableT,
|
||||
IgnoreRaise,
|
||||
Label,
|
||||
Level,
|
||||
MaskType,
|
||||
NaPosition,
|
||||
ReindexMethod,
|
||||
Scalar,
|
||||
SequenceNotStr,
|
||||
SliceType,
|
||||
SupportsDType,
|
||||
TimedeltaDtypeArg,
|
||||
TimestampDtypeArg,
|
||||
np_1darray,
|
||||
np_ndarray_anyint,
|
||||
np_ndarray_complex,
|
||||
np_ndarray_float,
|
||||
type_t,
|
||||
)
|
||||
|
||||
class InvalidIndexError(Exception): ...
|
||||
|
||||
class Index(IndexOpsMixin[S1]):
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
# overloads with additional dtypes
|
||||
@overload
|
||||
def __new__( # pyright: ignore[reportOverlappingOverload]
|
||||
cls,
|
||||
data: Sequence[int | np.integer] | IndexOpsMixin[int] | np_ndarray_anyint,
|
||||
*,
|
||||
dtype: Literal["int"] | type_t[int | np.integer] = ...,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> Index[int]: ...
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: AxesData,
|
||||
*,
|
||||
dtype: Literal["int"] | type_t[int | np.integer],
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> Index[int]: ...
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: Sequence[float | np.floating] | IndexOpsMixin[float] | np_ndarray_float,
|
||||
*,
|
||||
dtype: Literal["float"] | type_t[float | np.floating] = ...,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> Index[float]: ...
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: AxesData,
|
||||
*,
|
||||
dtype: Literal["float"] | type_t[float | np.floating],
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> Index[float]: ...
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: (
|
||||
Sequence[complex | np.complexfloating]
|
||||
| IndexOpsMixin[complex]
|
||||
| np_ndarray_complex
|
||||
),
|
||||
*,
|
||||
dtype: Literal["complex"] | type_t[complex | np.complexfloating] = ...,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> Index[complex]: ...
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: AxesData,
|
||||
*,
|
||||
dtype: Literal["complex"] | type_t[complex | np.complexfloating],
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> Index[complex]: ...
|
||||
# special overloads with dedicated Index-subclasses
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: Sequence[np.datetime64 | datetime] | IndexOpsMixin[datetime],
|
||||
*,
|
||||
dtype: TimestampDtypeArg = ...,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> DatetimeIndex: ...
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: AxesData,
|
||||
*,
|
||||
dtype: TimestampDtypeArg,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> DatetimeIndex: ...
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: Sequence[Period] | IndexOpsMixin[Period],
|
||||
*,
|
||||
dtype: PeriodDtype = ...,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> PeriodIndex: ...
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: AxesData,
|
||||
*,
|
||||
dtype: PeriodDtype,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> PeriodIndex: ...
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: Sequence[np.timedelta64 | timedelta] | IndexOpsMixin[timedelta],
|
||||
*,
|
||||
dtype: TimedeltaDtypeArg = ...,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> TimedeltaIndex: ...
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: AxesData,
|
||||
*,
|
||||
dtype: TimedeltaDtypeArg,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> TimedeltaIndex: ...
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: Sequence[Interval[_OrderableT]] | IndexOpsMixin[Interval[_OrderableT]],
|
||||
*,
|
||||
dtype: Literal["Interval"] = ...,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> IntervalIndex[Interval[_OrderableT]]: ...
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: AxesData,
|
||||
*,
|
||||
dtype: Literal["Interval"],
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> IntervalIndex[Interval[Any]]: ...
|
||||
# generic overloads
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: Iterable[S1] | IndexOpsMixin[S1],
|
||||
*,
|
||||
dtype: type[S1] = ...,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> Self: ...
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: AxesData = ...,
|
||||
*,
|
||||
dtype: type[S1],
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> Self: ...
|
||||
# fallback overload
|
||||
@overload
|
||||
def __new__(
|
||||
cls,
|
||||
data: AxesData,
|
||||
*,
|
||||
dtype: Dtype = ...,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
tupleize_cols: bool = ...,
|
||||
) -> Self: ...
|
||||
@property
|
||||
def str(
|
||||
self,
|
||||
) -> StringMethods[
|
||||
Self,
|
||||
MultiIndex,
|
||||
np_1darray[np.bool],
|
||||
Index[list[_str]],
|
||||
Index[int],
|
||||
Index[bytes],
|
||||
Index[_str],
|
||||
Index,
|
||||
]: ...
|
||||
@final
|
||||
def is_(self, other) -> bool: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __array__(
|
||||
self, dtype: _str | np.dtype = ..., copy: bool | None = ...
|
||||
) -> np_1darray: ...
|
||||
def __array_wrap__(self, result, context=...): ...
|
||||
@property
|
||||
def dtype(self) -> DtypeObj: ...
|
||||
@final
|
||||
def ravel(self, order: _str = ...): ...
|
||||
def view(self, cls=...): ...
|
||||
def astype(self, dtype: DtypeArg, copy: bool = True) -> Index: ...
|
||||
def take(
|
||||
self,
|
||||
indices,
|
||||
axis: int = 0,
|
||||
allow_fill: bool = True,
|
||||
fill_value: Scalar | None = None,
|
||||
**kwargs,
|
||||
): ...
|
||||
def repeat(self, repeats, axis=...): ...
|
||||
def copy(self, name: Hashable = ..., deep: bool = False) -> Self: ...
|
||||
@final
|
||||
def __copy__(self, **kwargs): ...
|
||||
@final
|
||||
def __deepcopy__(self, memo=...): ...
|
||||
def format(
|
||||
self, name: bool = ..., formatter: Callable | None = ..., na_rep: _str = ...
|
||||
) -> list[_str]: ...
|
||||
def to_flat_index(self): ...
|
||||
def to_series(self, index=..., name: Hashable = ...) -> Series: ...
|
||||
def to_frame(self, index: bool = True, name=...) -> DataFrame: ...
|
||||
@property
|
||||
def name(self) -> Hashable | None: ...
|
||||
@name.setter
|
||||
def name(self, value: Hashable) -> None: ...
|
||||
@property
|
||||
def names(self) -> list[Hashable | None]: ...
|
||||
@names.setter
|
||||
def names(self, names: SequenceNotStr[Hashable | None]) -> None: ...
|
||||
def set_names(self, names, *, level=..., inplace: bool = ...): ...
|
||||
@overload
|
||||
def rename(self, name, *, inplace: Literal[False] = False) -> Self: ...
|
||||
@overload
|
||||
def rename(self, name, *, inplace: Literal[True]) -> None: ...
|
||||
@property
|
||||
def nlevels(self) -> int: ...
|
||||
def get_level_values(self, level: int | _str) -> Index: ...
|
||||
def droplevel(self, level: Level | list[Level] = 0): ...
|
||||
@property
|
||||
def is_monotonic_increasing(self) -> bool: ...
|
||||
@property
|
||||
def is_monotonic_decreasing(self) -> bool: ...
|
||||
@property
|
||||
def is_unique(self) -> bool: ...
|
||||
@property
|
||||
def has_duplicates(self) -> bool: ...
|
||||
@property
|
||||
def inferred_type(self) -> _str: ...
|
||||
def __reduce__(self): ...
|
||||
@property
|
||||
def hasnans(self) -> bool: ...
|
||||
@final
|
||||
def isna(self): ...
|
||||
isnull = ...
|
||||
@final
|
||||
def notna(self): ...
|
||||
notnull = ...
|
||||
def fillna(self, value=...): ...
|
||||
def dropna(self, how: AnyAll = "any") -> Self: ...
|
||||
def unique(self, level=...) -> Self: ...
|
||||
def drop_duplicates(self, *, keep: DropKeep = ...) -> Self: ...
|
||||
def duplicated(self, keep: DropKeep = "first") -> np_1darray[np.bool]: ...
|
||||
def __and__(self, other: Never) -> Never: ...
|
||||
def __rand__(self, other: Never) -> Never: ...
|
||||
def __or__(self, other: Never) -> Never: ...
|
||||
def __ror__(self, other: Never) -> Never: ...
|
||||
def __xor__(self, other: Never) -> Never: ...
|
||||
def __rxor__(self, other: Never) -> Never: ...
|
||||
def __neg__(self) -> Self: ...
|
||||
@final
|
||||
def __nonzero__(self) -> None: ...
|
||||
__bool__ = ...
|
||||
def union(
|
||||
self, other: list[HashableT] | Self, sort: bool | None = None
|
||||
) -> Index: ...
|
||||
def intersection(
|
||||
self, other: list[S1] | Self, sort: bool | None = False
|
||||
) -> Self: ...
|
||||
def difference(self, other: list | Self, sort: bool | None = None) -> Self: ...
|
||||
def symmetric_difference(
|
||||
self,
|
||||
other: list[S1] | Self,
|
||||
result_name: Hashable = ...,
|
||||
sort: bool | None = None,
|
||||
) -> Self: ...
|
||||
def get_loc(self, key: Label) -> int | slice | np_1darray[np.bool]: ...
|
||||
def get_indexer(
|
||||
self, target, method: ReindexMethod | None = ..., limit=..., tolerance=...
|
||||
): ...
|
||||
def reindex(
|
||||
self,
|
||||
target,
|
||||
method: ReindexMethod | None = ...,
|
||||
level=...,
|
||||
limit=...,
|
||||
tolerance=...,
|
||||
): ...
|
||||
def join(
|
||||
self,
|
||||
other,
|
||||
*,
|
||||
how: _str = ...,
|
||||
level=...,
|
||||
return_indexers: bool = ...,
|
||||
sort: bool = ...,
|
||||
): ...
|
||||
@property
|
||||
def values(self) -> np_1darray: ...
|
||||
@property
|
||||
def array(self) -> ExtensionArray: ...
|
||||
def memory_usage(self, deep: bool = False): ...
|
||||
def where(self, cond, other: Scalar | ArrayLike | None = None): ...
|
||||
def __contains__(self, key) -> bool: ...
|
||||
@final
|
||||
def __setitem__(self, key, value) -> None: ...
|
||||
@overload
|
||||
def __getitem__(
|
||||
self,
|
||||
idx: slice | np_ndarray_anyint | Sequence[int] | Index | MaskType,
|
||||
) -> Self: ...
|
||||
@overload
|
||||
def __getitem__(self, idx: int | tuple[np_ndarray_anyint, ...]) -> S1: ...
|
||||
@overload
|
||||
def append(
|
||||
self: Index[C2], other: Index[C2] | Sequence[Index[C2]]
|
||||
) -> Index[C2]: ...
|
||||
@overload
|
||||
def append(self, other: Index | Sequence[Index]) -> Index: ...
|
||||
def putmask(self, mask, value): ...
|
||||
def equals(self, other) -> bool: ...
|
||||
@final
|
||||
def identical(self, other) -> bool: ...
|
||||
@final
|
||||
def asof(self, label): ...
|
||||
def asof_locs(self, where, mask): ...
|
||||
def sort_values(
|
||||
self,
|
||||
*,
|
||||
return_indexer: bool = ...,
|
||||
ascending: bool = ...,
|
||||
na_position: NaPosition = ...,
|
||||
key: Callable[[Index], Index] | None = None,
|
||||
): ...
|
||||
@final
|
||||
def sort(self, *args, **kwargs) -> None: ...
|
||||
def argsort(self, *args, **kwargs): ...
|
||||
def get_indexer_non_unique(self, target): ...
|
||||
@final
|
||||
def get_indexer_for(self, target, **kwargs): ...
|
||||
@final
|
||||
def groupby(self, values) -> dict[Hashable, np.ndarray]: ...
|
||||
def map(self, mapper, na_action=...) -> Index: ...
|
||||
def isin(self, values, level=...) -> np_1darray[np.bool]: ...
|
||||
def slice_indexer(
|
||||
self,
|
||||
start: Label | None = None,
|
||||
end: Label | None = None,
|
||||
step: int | None = None,
|
||||
): ...
|
||||
def get_slice_bound(self, label, side): ...
|
||||
def slice_locs(
|
||||
self, start: SliceType = None, end: SliceType = None, step: int | None = None
|
||||
): ...
|
||||
def delete(self, loc) -> Self: ...
|
||||
def insert(self, loc, item) -> Self: ...
|
||||
def drop(self, labels, errors: IgnoreRaise = "raise") -> Self: ...
|
||||
@property
|
||||
def shape(self) -> tuple[int, ...]: ...
|
||||
# Extra methods from old stubs
|
||||
def __eq__(self, other: object) -> np_1darray[np.bool]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
def __iter__(self) -> Iterator[S1]: ...
|
||||
def __ne__(self, other: object) -> np_1darray[np.bool]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
def __le__(self, other: Self | S1) -> np_1darray[np.bool]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
def __ge__(self, other: Self | S1) -> np_1darray[np.bool]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
def __lt__(self, other: Self | S1) -> np_1darray[np.bool]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
def __gt__(self, other: Self | S1) -> np_1darray[np.bool]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
# overwrite inherited methods from OpsMixin
|
||||
@overload
|
||||
def __mul__(
|
||||
self: Index[int] | Index[float], other: timedelta
|
||||
) -> TimedeltaIndex: ...
|
||||
@overload
|
||||
def __mul__(self, other: Any) -> Self: ...
|
||||
def __floordiv__(
|
||||
self,
|
||||
other: (
|
||||
float
|
||||
| IndexOpsMixin[int]
|
||||
| IndexOpsMixin[float]
|
||||
| Sequence[int]
|
||||
| Sequence[float]
|
||||
),
|
||||
) -> Self: ...
|
||||
def __rfloordiv__(
|
||||
self,
|
||||
other: (
|
||||
float
|
||||
| IndexOpsMixin[int]
|
||||
| IndexOpsMixin[float]
|
||||
| Sequence[int]
|
||||
| Sequence[float]
|
||||
),
|
||||
) -> Self: ...
|
||||
def __truediv__(
|
||||
self,
|
||||
other: (
|
||||
float
|
||||
| IndexOpsMixin[int]
|
||||
| IndexOpsMixin[float]
|
||||
| Sequence[int]
|
||||
| Sequence[float]
|
||||
),
|
||||
) -> Self: ...
|
||||
def __rtruediv__(
|
||||
self,
|
||||
other: (
|
||||
float
|
||||
| IndexOpsMixin[int]
|
||||
| IndexOpsMixin[float]
|
||||
| Sequence[int]
|
||||
| Sequence[float]
|
||||
),
|
||||
) -> Self: ...
|
||||
def infer_objects(self, copy: bool = True) -> Self: ...
|
||||
|
||||
@type_check_only
|
||||
class _IndexSubclassBase(Index[S1], Generic[S1, GenericT_co]):
|
||||
@overload
|
||||
def to_numpy( # pyrefly: ignore
|
||||
self,
|
||||
dtype: None = None,
|
||||
copy: bool = False,
|
||||
na_value: Scalar = ...,
|
||||
**kwargs,
|
||||
) -> np_1darray[GenericT_co]: ...
|
||||
@overload
|
||||
def to_numpy(
|
||||
self,
|
||||
dtype: np.dtype[GenericT] | SupportsDType[GenericT] | type[GenericT],
|
||||
copy: bool = False,
|
||||
na_value: Scalar = ...,
|
||||
**kwargs,
|
||||
) -> np_1darray[GenericT]: ...
|
||||
@overload
|
||||
def to_numpy(
|
||||
self,
|
||||
dtype: DTypeLike,
|
||||
copy: bool = False,
|
||||
na_value: Scalar = ...,
|
||||
**kwargs,
|
||||
) -> np_1darray: ...
|
||||
@ -0,0 +1,53 @@
|
||||
from collections.abc import (
|
||||
Hashable,
|
||||
Iterable,
|
||||
)
|
||||
from typing import (
|
||||
final,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas.core import accessor
|
||||
from pandas.core.indexes.base import Index
|
||||
from pandas.core.indexes.extension import ExtensionIndex
|
||||
from typing_extensions import Self
|
||||
|
||||
from pandas._typing import (
|
||||
S1,
|
||||
DtypeArg,
|
||||
)
|
||||
|
||||
class CategoricalIndex(ExtensionIndex[S1], accessor.PandasDelegate):
|
||||
codes: np.ndarray = ...
|
||||
categories: Index = ...
|
||||
def __new__(
|
||||
cls,
|
||||
data: Iterable[S1] = ...,
|
||||
categories=...,
|
||||
ordered=...,
|
||||
dtype=...,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
) -> Self: ...
|
||||
def equals(self, other): ...
|
||||
@property
|
||||
def inferred_type(self) -> str: ...
|
||||
@property
|
||||
def values(self): ...
|
||||
def __contains__(self, key) -> bool: ...
|
||||
def __array__(
|
||||
self, dtype: DtypeArg = ..., copy: bool | None = ...
|
||||
) -> np.ndarray: ...
|
||||
@property
|
||||
def is_unique(self) -> bool: ...
|
||||
@property
|
||||
def is_monotonic_increasing(self) -> bool: ...
|
||||
@property
|
||||
def is_monotonic_decreasing(self) -> bool: ...
|
||||
def unique(self, level=...): ...
|
||||
def reindex(self, target, method=..., level=..., limit=..., tolerance=...): ...
|
||||
@final
|
||||
def get_indexer(self, target, method=..., limit=..., tolerance=...): ...
|
||||
def get_indexer_non_unique(self, target): ...
|
||||
def delete(self, loc): ...
|
||||
def insert(self, loc, item): ...
|
||||
@ -0,0 +1,40 @@
|
||||
import numpy as np
|
||||
from pandas.core.indexes.extension import ExtensionIndex
|
||||
from pandas.core.indexes.timedeltas import TimedeltaIndex
|
||||
from typing_extensions import Self
|
||||
|
||||
from pandas._libs.tslibs import BaseOffset
|
||||
from pandas._typing import (
|
||||
S1,
|
||||
AxisIndex,
|
||||
GenericT_co,
|
||||
TimeUnit,
|
||||
)
|
||||
|
||||
class DatetimeIndexOpsMixin(ExtensionIndex[S1, GenericT_co]):
|
||||
@property
|
||||
def freq(self) -> BaseOffset | None: ...
|
||||
@property
|
||||
def freqstr(self) -> str | None: ...
|
||||
@property
|
||||
def is_all_dates(self) -> bool: ...
|
||||
def min(
|
||||
self, axis: AxisIndex | None = None, skipna: bool = True, *args, **kwargs
|
||||
) -> S1: ...
|
||||
def argmin(
|
||||
self, axis: AxisIndex | None = None, skipna: bool = True, *args, **kwargs
|
||||
) -> np.int64: ...
|
||||
def max(
|
||||
self, axis: AxisIndex | None = None, skipna: bool = True, *args, **kwargs
|
||||
) -> S1: ...
|
||||
def argmax(
|
||||
self, axis: AxisIndex | None = None, skipna: bool = True, *args, **kwargs
|
||||
) -> np.int64: ...
|
||||
def __rsub__( # type: ignore[override]
|
||||
self, other: DatetimeIndexOpsMixin
|
||||
) -> TimedeltaIndex: ...
|
||||
|
||||
class DatetimeTimedeltaMixin(DatetimeIndexOpsMixin[S1, GenericT_co]):
|
||||
@property
|
||||
def unit(self) -> TimeUnit: ...
|
||||
def as_unit(self, unit: TimeUnit) -> Self: ...
|
||||
@ -0,0 +1,177 @@
|
||||
from collections.abc import (
|
||||
Hashable,
|
||||
Sequence,
|
||||
)
|
||||
from datetime import (
|
||||
datetime,
|
||||
timedelta,
|
||||
tzinfo as _tzinfo,
|
||||
)
|
||||
from typing import (
|
||||
final,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
DataFrame,
|
||||
Index,
|
||||
Timedelta,
|
||||
TimedeltaIndex,
|
||||
Timestamp,
|
||||
)
|
||||
from pandas.core.indexes.accessors import DatetimeIndexProperties
|
||||
from pandas.core.indexes.datetimelike import DatetimeTimedeltaMixin
|
||||
from pandas.core.series import (
|
||||
TimedeltaSeries,
|
||||
TimestampSeries,
|
||||
)
|
||||
from typing_extensions import Self
|
||||
|
||||
from pandas._libs.tslibs.offsets import DateOffset
|
||||
from pandas._typing import (
|
||||
AxesData,
|
||||
DateAndDatetimeLike,
|
||||
Dtype,
|
||||
Frequency,
|
||||
IntervalClosedType,
|
||||
TimeUnit,
|
||||
TimeZones,
|
||||
)
|
||||
|
||||
from pandas.core.dtypes.dtypes import DatetimeTZDtype
|
||||
|
||||
from pandas.tseries.offsets import BaseOffset
|
||||
|
||||
class DatetimeIndex(
|
||||
DatetimeTimedeltaMixin[Timestamp, np.datetime64], DatetimeIndexProperties
|
||||
):
|
||||
def __new__(
|
||||
cls,
|
||||
data: AxesData,
|
||||
freq: Frequency = ...,
|
||||
tz: TimeZones = ...,
|
||||
ambiguous: str = ...,
|
||||
dayfirst: bool = ...,
|
||||
yearfirst: bool = ...,
|
||||
dtype: Dtype = ...,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
) -> Self: ...
|
||||
def __reduce__(self): ...
|
||||
# various ignores needed for mypy, as we do want to restrict what can be used in
|
||||
# arithmetic for these types
|
||||
@overload
|
||||
def __add__(self, other: TimedeltaSeries) -> TimestampSeries: ...
|
||||
@overload
|
||||
def __add__(
|
||||
self, other: timedelta | Timedelta | TimedeltaIndex | BaseOffset
|
||||
) -> DatetimeIndex: ...
|
||||
@overload
|
||||
def __sub__(self, other: TimedeltaSeries) -> TimestampSeries: ...
|
||||
@overload
|
||||
def __sub__(
|
||||
self, other: timedelta | Timedelta | TimedeltaIndex | BaseOffset
|
||||
) -> DatetimeIndex: ...
|
||||
@overload
|
||||
def __sub__(
|
||||
self, other: datetime | Timestamp | DatetimeIndex
|
||||
) -> TimedeltaIndex: ...
|
||||
@final
|
||||
def to_series(self, index=..., name: Hashable = ...) -> TimestampSeries: ...
|
||||
def snap(self, freq: str = ...): ...
|
||||
def slice_indexer(self, start=..., end=..., step=...): ...
|
||||
def searchsorted(self, value, side: str = ..., sorter=...): ...
|
||||
@property
|
||||
def inferred_type(self) -> str: ...
|
||||
def indexer_at_time(self, time, asof: bool = ...): ...
|
||||
def indexer_between_time(
|
||||
self,
|
||||
start_time: datetime | str,
|
||||
end_time: datetime | str,
|
||||
include_start: bool = True,
|
||||
include_end: bool = True,
|
||||
): ...
|
||||
def to_julian_date(self) -> Index[float]: ...
|
||||
def isocalendar(self) -> DataFrame: ...
|
||||
@property
|
||||
def tzinfo(self) -> _tzinfo | None: ...
|
||||
@property
|
||||
def dtype(self) -> np.dtype | DatetimeTZDtype: ...
|
||||
def shift(
|
||||
self, periods: int = 1, freq: DateOffset | Timedelta | str | None = None
|
||||
) -> Self: ...
|
||||
|
||||
@overload
|
||||
def date_range(
|
||||
start: str | DateAndDatetimeLike,
|
||||
end: str | DateAndDatetimeLike,
|
||||
freq: str | timedelta | Timedelta | BaseOffset | None = None,
|
||||
tz: TimeZones = None,
|
||||
normalize: bool = False,
|
||||
name: Hashable | None = None,
|
||||
inclusive: IntervalClosedType = "both",
|
||||
unit: TimeUnit | None = None,
|
||||
) -> DatetimeIndex: ...
|
||||
@overload
|
||||
def date_range(
|
||||
start: str | DateAndDatetimeLike,
|
||||
end: str | DateAndDatetimeLike,
|
||||
periods: int,
|
||||
tz: TimeZones = None,
|
||||
normalize: bool = False,
|
||||
name: Hashable | None = None,
|
||||
inclusive: IntervalClosedType = "both",
|
||||
unit: TimeUnit | None = None,
|
||||
) -> DatetimeIndex: ...
|
||||
@overload
|
||||
def date_range(
|
||||
start: str | DateAndDatetimeLike,
|
||||
*,
|
||||
periods: int,
|
||||
freq: str | timedelta | Timedelta | BaseOffset | None = None,
|
||||
tz: TimeZones = None,
|
||||
normalize: bool = False,
|
||||
name: Hashable | None = None,
|
||||
inclusive: IntervalClosedType = "both",
|
||||
unit: TimeUnit | None = None,
|
||||
) -> DatetimeIndex: ...
|
||||
@overload
|
||||
def date_range(
|
||||
*,
|
||||
end: str | DateAndDatetimeLike,
|
||||
periods: int,
|
||||
freq: str | timedelta | Timedelta | BaseOffset | None = None,
|
||||
tz: TimeZones = None,
|
||||
normalize: bool = False,
|
||||
name: Hashable | None = None,
|
||||
inclusive: IntervalClosedType = "both",
|
||||
unit: TimeUnit | None = None,
|
||||
) -> DatetimeIndex: ...
|
||||
@overload
|
||||
def bdate_range(
|
||||
start: str | DateAndDatetimeLike | None = ...,
|
||||
end: str | DateAndDatetimeLike | None = ...,
|
||||
periods: int | None = ...,
|
||||
freq: str | timedelta | Timedelta | BaseOffset = ...,
|
||||
tz: TimeZones = ...,
|
||||
normalize: bool = ...,
|
||||
name: Hashable | None = ...,
|
||||
weekmask: str | None = ...,
|
||||
holidays: None = ...,
|
||||
inclusive: IntervalClosedType = ...,
|
||||
) -> DatetimeIndex: ...
|
||||
@overload
|
||||
def bdate_range(
|
||||
start: str | DateAndDatetimeLike | None = ...,
|
||||
end: str | DateAndDatetimeLike | None = ...,
|
||||
periods: int | None = ...,
|
||||
*,
|
||||
freq: str | timedelta | Timedelta | BaseOffset,
|
||||
tz: TimeZones = ...,
|
||||
normalize: bool = ...,
|
||||
name: Hashable | None = ...,
|
||||
weekmask: str | None = ...,
|
||||
holidays: Sequence[str | DateAndDatetimeLike],
|
||||
inclusive: IntervalClosedType = ...,
|
||||
) -> DatetimeIndex: ...
|
||||
@ -0,0 +1,8 @@
|
||||
from pandas.core.indexes.base import _IndexSubclassBase
|
||||
|
||||
from pandas._typing import (
|
||||
S1,
|
||||
GenericT_co,
|
||||
)
|
||||
|
||||
class ExtensionIndex(_IndexSubclassBase[S1, GenericT_co]): ...
|
||||
@ -0,0 +1,9 @@
|
||||
class FrozenList(list):
|
||||
def union(self, other) -> FrozenList: ...
|
||||
def difference(self, other) -> FrozenList: ...
|
||||
def __getitem__(self, n): ...
|
||||
def __radd__(self, other): ...
|
||||
def __eq__(self, other) -> bool: ...
|
||||
def __mul__(self, other): ...
|
||||
def __reduce__(self): ...
|
||||
def __hash__(self) -> int: ... # type: ignore[override] # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
@ -0,0 +1,367 @@
|
||||
from collections.abc import (
|
||||
Hashable,
|
||||
Sequence,
|
||||
)
|
||||
import datetime as dt
|
||||
from typing import (
|
||||
Literal,
|
||||
final,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from pandas import Index
|
||||
from pandas.core.indexes.extension import ExtensionIndex
|
||||
from pandas.core.series import (
|
||||
TimedeltaSeries,
|
||||
TimestampSeries,
|
||||
)
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from pandas._libs.interval import (
|
||||
Interval as Interval,
|
||||
IntervalMixin,
|
||||
)
|
||||
from pandas._libs.tslibs.offsets import BaseOffset
|
||||
from pandas._typing import (
|
||||
DatetimeLike,
|
||||
DtypeArg,
|
||||
FillnaOptions,
|
||||
IntervalClosedType,
|
||||
IntervalT,
|
||||
Label,
|
||||
MaskType,
|
||||
np_1darray,
|
||||
np_ndarray_anyint,
|
||||
np_ndarray_bool,
|
||||
npt,
|
||||
)
|
||||
|
||||
from pandas.core.dtypes.dtypes import IntervalDtype as IntervalDtype
|
||||
|
||||
_EdgesInt: TypeAlias = (
|
||||
Sequence[int]
|
||||
| npt.NDArray[np.int64]
|
||||
| npt.NDArray[np.int32]
|
||||
| npt.NDArray[np.intp]
|
||||
| pd.Series[int]
|
||||
| Index[int]
|
||||
)
|
||||
_EdgesFloat: TypeAlias = (
|
||||
Sequence[float] | npt.NDArray[np.float64] | pd.Series[float] | Index[float]
|
||||
)
|
||||
_EdgesTimestamp: TypeAlias = (
|
||||
Sequence[DatetimeLike]
|
||||
| npt.NDArray[np.datetime64]
|
||||
| TimestampSeries
|
||||
| pd.DatetimeIndex
|
||||
)
|
||||
_EdgesTimedelta: TypeAlias = (
|
||||
Sequence[pd.Timedelta]
|
||||
| npt.NDArray[np.timedelta64]
|
||||
| TimedeltaSeries
|
||||
| pd.TimedeltaIndex
|
||||
)
|
||||
_TimestampLike: TypeAlias = pd.Timestamp | np.datetime64 | dt.datetime
|
||||
_TimedeltaLike: TypeAlias = pd.Timedelta | np.timedelta64 | dt.timedelta
|
||||
|
||||
class IntervalIndex(ExtensionIndex[IntervalT, np.object_], IntervalMixin):
|
||||
closed: IntervalClosedType
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
data: Sequence[IntervalT],
|
||||
closed: IntervalClosedType = ...,
|
||||
dtype: IntervalDtype | None = ...,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
verify_integrity: bool = ...,
|
||||
) -> IntervalIndex[IntervalT]: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def from_breaks( # pyright: ignore[reportOverlappingOverload]
|
||||
cls,
|
||||
breaks: _EdgesInt,
|
||||
closed: IntervalClosedType = ...,
|
||||
name: Hashable = ...,
|
||||
copy: bool = ...,
|
||||
dtype: IntervalDtype | None = ...,
|
||||
) -> IntervalIndex[Interval[int]]: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def from_breaks(
|
||||
cls,
|
||||
breaks: _EdgesFloat,
|
||||
closed: IntervalClosedType = ...,
|
||||
name: Hashable = ...,
|
||||
copy: bool = ...,
|
||||
dtype: IntervalDtype | None = ...,
|
||||
) -> IntervalIndex[Interval[float]]: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def from_breaks(
|
||||
cls,
|
||||
breaks: _EdgesTimestamp,
|
||||
closed: IntervalClosedType = ...,
|
||||
name: Hashable = ...,
|
||||
copy: bool = ...,
|
||||
dtype: IntervalDtype | None = ...,
|
||||
) -> IntervalIndex[Interval[pd.Timestamp]]: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def from_breaks(
|
||||
cls,
|
||||
breaks: _EdgesTimedelta,
|
||||
closed: IntervalClosedType = ...,
|
||||
name: Hashable = ...,
|
||||
copy: bool = ...,
|
||||
dtype: IntervalDtype | None = ...,
|
||||
) -> IntervalIndex[Interval[pd.Timedelta]]: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def from_arrays( # pyright: ignore[reportOverlappingOverload]
|
||||
cls,
|
||||
left: _EdgesInt,
|
||||
right: _EdgesInt,
|
||||
closed: IntervalClosedType = ...,
|
||||
name: Hashable = ...,
|
||||
copy: bool = ...,
|
||||
dtype: IntervalDtype | None = ...,
|
||||
) -> IntervalIndex[Interval[int]]: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def from_arrays(
|
||||
cls,
|
||||
left: _EdgesFloat,
|
||||
right: _EdgesFloat,
|
||||
closed: IntervalClosedType = ...,
|
||||
name: Hashable = ...,
|
||||
copy: bool = ...,
|
||||
dtype: IntervalDtype | None = ...,
|
||||
) -> IntervalIndex[Interval[float]]: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def from_arrays(
|
||||
cls,
|
||||
left: _EdgesTimestamp,
|
||||
right: _EdgesTimestamp,
|
||||
closed: IntervalClosedType = ...,
|
||||
name: Hashable = ...,
|
||||
copy: bool = ...,
|
||||
dtype: IntervalDtype | None = ...,
|
||||
) -> IntervalIndex[Interval[pd.Timestamp]]: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def from_arrays(
|
||||
cls,
|
||||
left: _EdgesTimedelta,
|
||||
right: _EdgesTimedelta,
|
||||
closed: IntervalClosedType = ...,
|
||||
name: Hashable = ...,
|
||||
copy: bool = ...,
|
||||
dtype: IntervalDtype | None = ...,
|
||||
) -> IntervalIndex[Interval[pd.Timedelta]]: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def from_tuples( # pyright: ignore[reportOverlappingOverload]
|
||||
cls,
|
||||
data: Sequence[tuple[int, int]],
|
||||
closed: IntervalClosedType = ...,
|
||||
name: Hashable = ...,
|
||||
copy: bool = ...,
|
||||
dtype: IntervalDtype | None = ...,
|
||||
) -> IntervalIndex[pd.Interval[int]]: ...
|
||||
# Ignore misc here due to intentional overlap between int and float
|
||||
@overload
|
||||
@classmethod
|
||||
def from_tuples(
|
||||
cls,
|
||||
data: Sequence[tuple[float, float]],
|
||||
closed: IntervalClosedType = ...,
|
||||
name: Hashable = ...,
|
||||
copy: bool = ...,
|
||||
dtype: IntervalDtype | None = ...,
|
||||
) -> IntervalIndex[pd.Interval[float]]: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def from_tuples(
|
||||
cls,
|
||||
data: Sequence[
|
||||
tuple[pd.Timestamp, pd.Timestamp]
|
||||
| tuple[dt.datetime, dt.datetime]
|
||||
| tuple[np.datetime64, np.datetime64]
|
||||
],
|
||||
closed: IntervalClosedType = ...,
|
||||
name: Hashable = ...,
|
||||
copy: bool = ...,
|
||||
dtype: IntervalDtype | None = ...,
|
||||
) -> IntervalIndex[pd.Interval[pd.Timestamp]]: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def from_tuples(
|
||||
cls,
|
||||
data: Sequence[
|
||||
tuple[pd.Timedelta, pd.Timedelta]
|
||||
| tuple[dt.timedelta, dt.timedelta]
|
||||
| tuple[np.timedelta64, np.timedelta64]
|
||||
],
|
||||
closed: IntervalClosedType = ...,
|
||||
name: Hashable = ...,
|
||||
copy: bool = ...,
|
||||
dtype: IntervalDtype | None = ...,
|
||||
) -> IntervalIndex[pd.Interval[pd.Timedelta]]: ...
|
||||
def to_tuples(self, na_tuple: bool = True) -> pd.Index: ...
|
||||
@overload
|
||||
def __contains__(self, key: IntervalT) -> bool: ... # type: ignore[overload-overlap] # pyright: ignore[reportOverlappingOverload]
|
||||
@overload
|
||||
def __contains__(self, key: object) -> Literal[False]: ...
|
||||
def astype(self, dtype: DtypeArg, copy: bool = True) -> IntervalIndex: ...
|
||||
@property
|
||||
def inferred_type(self) -> str: ...
|
||||
def memory_usage(self, deep: bool = False) -> int: ...
|
||||
@property
|
||||
def is_overlapping(self) -> bool: ...
|
||||
def get_loc(self, key: Label) -> int | slice | np_1darray[np.bool]: ...
|
||||
@final
|
||||
def get_indexer(
|
||||
self,
|
||||
target: Index,
|
||||
method: FillnaOptions | Literal["nearest"] | None = ...,
|
||||
limit: int | None = ...,
|
||||
tolerance=...,
|
||||
) -> npt.NDArray[np.intp]: ...
|
||||
def get_indexer_non_unique(
|
||||
self, target: Index
|
||||
) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
|
||||
@property
|
||||
def left(self) -> Index: ...
|
||||
@property
|
||||
def right(self) -> Index: ...
|
||||
@property
|
||||
def mid(self) -> Index: ...
|
||||
@property
|
||||
def length(self) -> Index: ...
|
||||
@overload # type: ignore[override]
|
||||
def __getitem__(
|
||||
self,
|
||||
idx: (
|
||||
slice
|
||||
| np_ndarray_anyint
|
||||
| Sequence[int]
|
||||
| Index
|
||||
| MaskType
|
||||
| np_ndarray_bool
|
||||
),
|
||||
) -> IntervalIndex[IntervalT]: ...
|
||||
@overload
|
||||
def __getitem__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, idx: int
|
||||
) -> IntervalT: ...
|
||||
@overload # type: ignore[override]
|
||||
def __gt__(
|
||||
self, other: IntervalT | IntervalIndex[IntervalT]
|
||||
) -> np_1darray[np.bool]: ...
|
||||
@overload
|
||||
def __gt__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, other: pd.Series[IntervalT]
|
||||
) -> pd.Series[bool]: ...
|
||||
@overload # type: ignore[override]
|
||||
def __ge__(
|
||||
self, other: IntervalT | IntervalIndex[IntervalT]
|
||||
) -> np_1darray[np.bool]: ...
|
||||
@overload
|
||||
def __ge__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, other: pd.Series[IntervalT]
|
||||
) -> pd.Series[bool]: ...
|
||||
@overload # type: ignore[override]
|
||||
def __le__(
|
||||
self, other: IntervalT | IntervalIndex[IntervalT]
|
||||
) -> np_1darray[np.bool]: ...
|
||||
@overload
|
||||
def __le__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, other: pd.Series[IntervalT]
|
||||
) -> pd.Series[bool]: ...
|
||||
@overload # type: ignore[override]
|
||||
def __lt__(
|
||||
self, other: IntervalT | IntervalIndex[IntervalT]
|
||||
) -> np_1darray[np.bool]: ...
|
||||
@overload
|
||||
def __lt__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, other: pd.Series[IntervalT]
|
||||
) -> pd.Series[bool]: ...
|
||||
@overload # type: ignore[override]
|
||||
def __eq__(self, other: IntervalT | IntervalIndex[IntervalT]) -> np_1darray[np.bool]: ... # type: ignore[overload-overlap] # pyright: ignore[reportOverlappingOverload]
|
||||
@overload
|
||||
def __eq__(self, other: pd.Series[IntervalT]) -> pd.Series[bool]: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def __eq__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, other: object
|
||||
) -> Literal[False]: ...
|
||||
@overload # type: ignore[override]
|
||||
def __ne__(self, other: IntervalT | IntervalIndex[IntervalT]) -> np_1darray[np.bool]: ... # type: ignore[overload-overlap] # pyright: ignore[reportOverlappingOverload]
|
||||
@overload
|
||||
def __ne__(self, other: pd.Series[IntervalT]) -> pd.Series[bool]: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def __ne__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, other: object
|
||||
) -> Literal[True]: ...
|
||||
|
||||
# misc here because int and float overlap but interval has distinct types
|
||||
# int gets hit first and so the correct type is returned
|
||||
@overload
|
||||
def interval_range( # pyright: ignore[reportOverlappingOverload]
|
||||
start: int | None = ...,
|
||||
end: int | None = ...,
|
||||
periods: int | None = ...,
|
||||
freq: int | None = ...,
|
||||
name: Hashable = ...,
|
||||
closed: IntervalClosedType = ...,
|
||||
) -> IntervalIndex[Interval[int]]: ...
|
||||
@overload
|
||||
def interval_range(
|
||||
start: float | None = ...,
|
||||
end: float | None = ...,
|
||||
periods: int | None = ...,
|
||||
freq: int | None = ...,
|
||||
name: Hashable = ...,
|
||||
closed: IntervalClosedType = ...,
|
||||
) -> IntervalIndex[Interval[float]]: ...
|
||||
@overload
|
||||
def interval_range(
|
||||
start: _TimestampLike,
|
||||
end: _TimestampLike | None = ...,
|
||||
periods: int | None = ...,
|
||||
freq: str | BaseOffset | pd.Timedelta | dt.timedelta | None = ...,
|
||||
name: Hashable = ...,
|
||||
closed: IntervalClosedType = ...,
|
||||
) -> IntervalIndex[Interval[pd.Timestamp]]: ...
|
||||
@overload
|
||||
def interval_range(
|
||||
*,
|
||||
start: None = ...,
|
||||
end: _TimestampLike,
|
||||
periods: int | None = ...,
|
||||
freq: str | BaseOffset | pd.Timedelta | dt.timedelta | None = ...,
|
||||
name: Hashable = ...,
|
||||
closed: IntervalClosedType = ...,
|
||||
) -> IntervalIndex[Interval[pd.Timestamp]]: ...
|
||||
@overload
|
||||
def interval_range(
|
||||
start: _TimedeltaLike,
|
||||
end: _TimedeltaLike | None = ...,
|
||||
periods: int | None = ...,
|
||||
freq: str | BaseOffset | pd.Timedelta | dt.timedelta | None = ...,
|
||||
name: Hashable = ...,
|
||||
closed: IntervalClosedType = ...,
|
||||
) -> IntervalIndex[Interval[pd.Timedelta]]: ...
|
||||
@overload
|
||||
def interval_range(
|
||||
*,
|
||||
start: None = ...,
|
||||
end: _TimedeltaLike,
|
||||
periods: int | None = ...,
|
||||
freq: str | BaseOffset | pd.Timedelta | dt.timedelta | None = ...,
|
||||
name: Hashable = ...,
|
||||
closed: IntervalClosedType = ...,
|
||||
) -> IntervalIndex[Interval[pd.Timedelta]]: ...
|
||||
164
lib/python3.11/site-packages/pandas-stubs/core/indexes/multi.pyi
Normal file
164
lib/python3.11/site-packages/pandas-stubs/core/indexes/multi.pyi
Normal file
@ -0,0 +1,164 @@
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Sequence,
|
||||
)
|
||||
from typing import (
|
||||
final,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from pandas.core.indexes.base import Index
|
||||
from typing_extensions import Self
|
||||
|
||||
from pandas._typing import (
|
||||
AnyAll,
|
||||
Axes,
|
||||
DropKeep,
|
||||
Dtype,
|
||||
HashableT,
|
||||
IndexLabel,
|
||||
Level,
|
||||
MaskType,
|
||||
NaPosition,
|
||||
SequenceNotStr,
|
||||
np_1darray,
|
||||
np_ndarray_anyint,
|
||||
)
|
||||
|
||||
class MultiIndex(Index):
|
||||
def __new__(
|
||||
cls,
|
||||
levels: Sequence[SequenceNotStr[Hashable]] = ...,
|
||||
codes: Sequence[Sequence[int]] = ...,
|
||||
sortorder: int | None = ...,
|
||||
names: SequenceNotStr[Hashable] = ...,
|
||||
copy: bool = ...,
|
||||
name: SequenceNotStr[Hashable] = ...,
|
||||
verify_integrity: bool = ...,
|
||||
) -> Self: ...
|
||||
@classmethod
|
||||
def from_arrays(
|
||||
cls,
|
||||
arrays: Sequence[Axes],
|
||||
sortorder: int | None = ...,
|
||||
names: SequenceNotStr[Hashable] = ...,
|
||||
) -> Self: ...
|
||||
@classmethod
|
||||
def from_tuples(
|
||||
cls,
|
||||
tuples: Iterable[tuple[Hashable, ...]],
|
||||
sortorder: int | None = ...,
|
||||
names: SequenceNotStr[Hashable] = ...,
|
||||
) -> Self: ...
|
||||
@classmethod
|
||||
def from_product(
|
||||
cls,
|
||||
iterables: Sequence[SequenceNotStr[Hashable] | pd.Series | pd.Index | range],
|
||||
sortorder: int | None = ...,
|
||||
names: SequenceNotStr[Hashable] = ...,
|
||||
) -> Self: ...
|
||||
@classmethod
|
||||
def from_frame(
|
||||
cls,
|
||||
df: pd.DataFrame,
|
||||
sortorder: int | None = ...,
|
||||
names: SequenceNotStr[Hashable] = ...,
|
||||
) -> Self: ...
|
||||
@property
|
||||
def shape(self): ...
|
||||
@property # Should be read-only
|
||||
def levels(self) -> list[Index]: ...
|
||||
def set_levels(self, levels, *, level=..., verify_integrity: bool = ...): ...
|
||||
@property
|
||||
def codes(self): ...
|
||||
def set_codes(self, codes, *, level=..., verify_integrity: bool = ...): ...
|
||||
def copy( # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride] # pyrefly: ignore
|
||||
self, names: SequenceNotStr[Hashable] = ..., deep: bool = False
|
||||
) -> Self: ...
|
||||
def view(self, cls=...): ...
|
||||
def __contains__(self, key) -> bool: ...
|
||||
@property
|
||||
def dtype(self) -> np.dtype: ...
|
||||
@property
|
||||
def dtypes(self) -> pd.Series[Dtype]: ...
|
||||
def memory_usage(self, deep: bool = False) -> int: ...
|
||||
@property
|
||||
def nbytes(self) -> int: ...
|
||||
def format(
|
||||
self,
|
||||
name: bool | None = ...,
|
||||
formatter: Callable | None = ...,
|
||||
na_rep: str | None = ...,
|
||||
names: bool = ...,
|
||||
space: int = ...,
|
||||
sparsify: bool | None = ...,
|
||||
adjoin: bool = ...,
|
||||
) -> list: ...
|
||||
def __len__(self) -> int: ...
|
||||
@property
|
||||
def values(self): ...
|
||||
@property
|
||||
def is_monotonic_increasing(self) -> bool: ...
|
||||
@property
|
||||
def is_monotonic_decreasing(self) -> bool: ...
|
||||
def duplicated(self, keep: DropKeep = "first"): ...
|
||||
def dropna(self, how: AnyAll = "any") -> Self: ...
|
||||
def get_level_values(self, level: str | int) -> Index: ...
|
||||
def unique(self, level=...): ...
|
||||
def to_frame( # pyrefly: ignore
|
||||
self,
|
||||
index: bool = True,
|
||||
name: list[HashableT] = ...,
|
||||
allow_duplicates: bool = False,
|
||||
) -> pd.DataFrame: ...
|
||||
def to_flat_index(self): ...
|
||||
def remove_unused_levels(self): ...
|
||||
@property
|
||||
def nlevels(self) -> int: ...
|
||||
@property
|
||||
def levshape(self): ...
|
||||
def __reduce__(self): ...
|
||||
@overload # type: ignore[override]
|
||||
def __getitem__(
|
||||
self,
|
||||
idx: slice | np_ndarray_anyint | Sequence[int] | Index | MaskType,
|
||||
) -> Self: ...
|
||||
@overload
|
||||
def __getitem__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, key: int
|
||||
) -> tuple: ...
|
||||
def append(self, other): ... # pyrefly: ignore
|
||||
def repeat(self, repeats, axis=...): ...
|
||||
def drop(self, codes, level: Level | None = None, errors: str = "raise") -> Self: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
def swaplevel(self, i: int = -2, j: int = -1): ...
|
||||
def reorder_levels(self, order): ...
|
||||
def sortlevel(
|
||||
self,
|
||||
level: Level | Sequence[Level] = 0,
|
||||
ascending: bool = True,
|
||||
sort_remaining: bool = True,
|
||||
na_position: NaPosition = "first",
|
||||
): ...
|
||||
@final
|
||||
def get_indexer(self, target, method=..., limit=..., tolerance=...): ...
|
||||
def get_indexer_non_unique(self, target): ...
|
||||
def reindex(self, target, method=..., level=..., limit=..., tolerance=...): ...
|
||||
def get_slice_bound(
|
||||
self, label: Hashable | Sequence[Hashable], side: str
|
||||
) -> int: ...
|
||||
def get_loc_level(
|
||||
self, key, level: Level | list[Level] | None = None, drop_level: bool = True
|
||||
): ...
|
||||
def get_locs(self, seq): ...
|
||||
def truncate(
|
||||
self, before: IndexLabel | None = None, after: IndexLabel | None = None
|
||||
): ...
|
||||
def equals(self, other) -> bool: ...
|
||||
def equal_levels(self, other): ...
|
||||
def insert(self, loc, item): ...
|
||||
def delete(self, loc): ...
|
||||
def isin(self, values, level=...) -> np_1darray[np.bool]: ...
|
||||
@ -0,0 +1,80 @@
|
||||
from collections.abc import Hashable
|
||||
import datetime
|
||||
from typing import (
|
||||
Any,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from pandas import Index
|
||||
from pandas.core.indexes.accessors import PeriodIndexFieldOps
|
||||
from pandas.core.indexes.datetimelike import DatetimeIndexOpsMixin
|
||||
from pandas.core.indexes.timedeltas import TimedeltaIndex
|
||||
from typing_extensions import Self
|
||||
|
||||
from pandas._libs.tslibs import (
|
||||
NaTType,
|
||||
Period,
|
||||
)
|
||||
from pandas._libs.tslibs.period import _PeriodAddSub
|
||||
from pandas._typing import (
|
||||
AxesData,
|
||||
Dtype,
|
||||
Frequency,
|
||||
np_1darray,
|
||||
)
|
||||
|
||||
class PeriodIndex(DatetimeIndexOpsMixin[pd.Period, np.object_], PeriodIndexFieldOps):
|
||||
def __new__(
|
||||
cls,
|
||||
data: AxesData[Any] | None = None,
|
||||
freq: Frequency | None = None,
|
||||
dtype: Dtype | None = None,
|
||||
copy: bool = False,
|
||||
name: Hashable | None = None,
|
||||
) -> Self: ...
|
||||
@property
|
||||
def values(self) -> np_1darray[np.object_]: ...
|
||||
@overload
|
||||
def __sub__(self, other: Period) -> Index: ...
|
||||
@overload
|
||||
def __sub__(self, other: Self) -> Index: ...
|
||||
@overload
|
||||
def __sub__(self, other: _PeriodAddSub) -> Self: ...
|
||||
@overload
|
||||
def __sub__(self, other: NaTType) -> NaTType: ...
|
||||
@overload
|
||||
def __sub__(self, other: TimedeltaIndex | pd.Timedelta) -> Self: ...
|
||||
@overload # type: ignore[override]
|
||||
def __rsub__(self, other: Period) -> Index: ...
|
||||
@overload
|
||||
def __rsub__(self, other: Self) -> Index: ...
|
||||
@overload
|
||||
def __rsub__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, other: NaTType
|
||||
) -> NaTType: ...
|
||||
def asof_locs(
|
||||
self,
|
||||
where: pd.DatetimeIndex | PeriodIndex,
|
||||
mask: np_1darray[np.bool_],
|
||||
) -> np_1darray[np.intp]: ...
|
||||
@property
|
||||
def is_full(self) -> bool: ...
|
||||
@property
|
||||
def inferred_type(self) -> str: ...
|
||||
@property
|
||||
def freqstr(self) -> str: ...
|
||||
def shift(self, periods: int = 1, freq: Frequency | None = None) -> Self: ...
|
||||
|
||||
def period_range(
|
||||
start: (
|
||||
str | datetime.datetime | datetime.date | pd.Timestamp | pd.Period | None
|
||||
) = None,
|
||||
end: (
|
||||
str | datetime.datetime | datetime.date | pd.Timestamp | pd.Period | None
|
||||
) = None,
|
||||
periods: int | None = None,
|
||||
freq: Frequency | None = None,
|
||||
name: Hashable | None = None,
|
||||
) -> PeriodIndex: ...
|
||||
@ -0,0 +1,94 @@
|
||||
from collections.abc import (
|
||||
Hashable,
|
||||
Sequence,
|
||||
)
|
||||
from typing import (
|
||||
final,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas.core.indexes.base import (
|
||||
Index,
|
||||
_IndexSubclassBase,
|
||||
)
|
||||
|
||||
from pandas._typing import (
|
||||
HashableT,
|
||||
MaskType,
|
||||
np_1darray,
|
||||
np_ndarray_anyint,
|
||||
)
|
||||
|
||||
class RangeIndex(_IndexSubclassBase[int, np.int64]):
|
||||
def __new__(
|
||||
cls,
|
||||
start: int | RangeIndex | range = ...,
|
||||
stop: int = ...,
|
||||
step: int = ...,
|
||||
dtype=...,
|
||||
copy: bool = ...,
|
||||
name: Hashable = ...,
|
||||
): ...
|
||||
@classmethod
|
||||
def from_range(cls, data, name: Hashable = ..., dtype=...): ...
|
||||
def __reduce__(self): ...
|
||||
@property
|
||||
def start(self) -> int: ...
|
||||
@property
|
||||
def stop(self) -> int: ...
|
||||
@property
|
||||
def step(self) -> int: ...
|
||||
@property
|
||||
def nbytes(self) -> int: ...
|
||||
def memory_usage(self, deep: bool = ...) -> int: ...
|
||||
@property
|
||||
def dtype(self) -> np.dtype: ...
|
||||
@property
|
||||
def is_unique(self) -> bool: ...
|
||||
@property
|
||||
def is_monotonic_increasing(self) -> bool: ...
|
||||
@property
|
||||
def is_monotonic_decreasing(self) -> bool: ...
|
||||
@property
|
||||
def has_duplicates(self) -> bool: ...
|
||||
def __contains__(self, key: int | np.integer) -> bool: ...
|
||||
@final
|
||||
def get_indexer(self, target, method=..., limit=..., tolerance=...): ...
|
||||
def tolist(self): ...
|
||||
def min(self, axis=..., skipna: bool = ..., *args, **kwargs): ...
|
||||
def max(self, axis=..., skipna: bool = ..., *args, **kwargs): ...
|
||||
def argsort(self, *args, **kwargs): ...
|
||||
def factorize(
|
||||
self, sort: bool = False, use_na_sentinel: bool = True
|
||||
) -> tuple[np_1darray[np.intp], RangeIndex]: ...
|
||||
def equals(self, other): ...
|
||||
@final
|
||||
def join(
|
||||
self,
|
||||
other,
|
||||
*,
|
||||
how: str = ...,
|
||||
level=...,
|
||||
return_indexers: bool = ...,
|
||||
sort: bool = ...,
|
||||
): ...
|
||||
def __len__(self) -> int: ...
|
||||
@property
|
||||
def size(self) -> int: ...
|
||||
def __floordiv__(self, other): ...
|
||||
def all(self, *args, **kwargs) -> bool: ...
|
||||
def any(self, *args, **kwargs) -> bool: ...
|
||||
@final
|
||||
def union( # pyrefly: ignore
|
||||
self, other: list[HashableT] | Index, sort: bool | None = None
|
||||
) -> Index | Index[int] | RangeIndex: ...
|
||||
@overload # type: ignore[override]
|
||||
def __getitem__(
|
||||
self,
|
||||
idx: slice | np_ndarray_anyint | Sequence[int] | Index | MaskType,
|
||||
) -> Index: ...
|
||||
@overload
|
||||
def __getitem__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, idx: int
|
||||
) -> int: ...
|
||||
@ -0,0 +1,121 @@
|
||||
from collections.abc import (
|
||||
Hashable,
|
||||
Sequence,
|
||||
)
|
||||
import datetime as dt
|
||||
from typing import (
|
||||
Literal,
|
||||
final,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
DateOffset,
|
||||
Index,
|
||||
Period,
|
||||
)
|
||||
from pandas.core.indexes.accessors import TimedeltaIndexProperties
|
||||
from pandas.core.indexes.datetimelike import DatetimeTimedeltaMixin
|
||||
from pandas.core.indexes.datetimes import DatetimeIndex
|
||||
from pandas.core.indexes.period import PeriodIndex
|
||||
from pandas.core.series import TimedeltaSeries
|
||||
from typing_extensions import Self
|
||||
|
||||
from pandas._libs import (
|
||||
Timedelta,
|
||||
Timestamp,
|
||||
)
|
||||
from pandas._libs.tslibs import BaseOffset
|
||||
from pandas._typing import (
|
||||
AxesData,
|
||||
TimedeltaConvertibleTypes,
|
||||
num,
|
||||
)
|
||||
|
||||
class TimedeltaIndex(
|
||||
DatetimeTimedeltaMixin[Timedelta, np.timedelta64], TimedeltaIndexProperties
|
||||
):
|
||||
def __new__(
|
||||
cls,
|
||||
data: (
|
||||
Sequence[dt.timedelta | Timedelta | np.timedelta64 | float] | AxesData
|
||||
) = ...,
|
||||
freq: str | BaseOffset = ...,
|
||||
closed: object = ...,
|
||||
dtype: Literal["<m8[ns]"] = ...,
|
||||
copy: bool = ...,
|
||||
name: str = ...,
|
||||
) -> Self: ...
|
||||
# various ignores needed for mypy, as we do want to restrict what can be used in
|
||||
# arithmetic for these types
|
||||
@overload
|
||||
def __add__(self, other: Period) -> PeriodIndex: ...
|
||||
@overload
|
||||
def __add__(self, other: DatetimeIndex) -> DatetimeIndex: ...
|
||||
@overload
|
||||
def __add__(self, other: dt.timedelta | Timedelta | Self) -> Self: ...
|
||||
def __radd__(self, other: dt.datetime | Timestamp | DatetimeIndex) -> DatetimeIndex: ... # type: ignore[override]
|
||||
def __sub__(self, other: dt.timedelta | Timedelta | Self) -> Self: ...
|
||||
def __mul__(self, other: num) -> Self: ...
|
||||
@overload # type: ignore[override]
|
||||
def __truediv__(self, other: num | Sequence[float]) -> Self: ...
|
||||
@overload
|
||||
def __truediv__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, other: dt.timedelta | Sequence[dt.timedelta]
|
||||
) -> Index[float]: ...
|
||||
def __rtruediv__(self, other: dt.timedelta | Sequence[dt.timedelta]) -> Index[float]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
@overload # type: ignore[override]
|
||||
def __floordiv__(self, other: num | Sequence[float]) -> Self: ...
|
||||
@overload
|
||||
def __floordiv__( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
self, other: dt.timedelta | Sequence[dt.timedelta]
|
||||
) -> Index[int]: ...
|
||||
def __rfloordiv__(self, other: dt.timedelta | Sequence[dt.timedelta]) -> Index[int]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
def searchsorted(self, value, side: str = ..., sorter=...): ...
|
||||
@property
|
||||
def inferred_type(self) -> str: ...
|
||||
@final
|
||||
def to_series(self, index=..., name: Hashable = ...) -> TimedeltaSeries: ...
|
||||
def shift(self, periods: int = 1, freq=...) -> Self: ...
|
||||
|
||||
@overload
|
||||
def timedelta_range(
|
||||
start: TimedeltaConvertibleTypes,
|
||||
end: TimedeltaConvertibleTypes,
|
||||
*,
|
||||
freq: str | DateOffset | Timedelta | dt.timedelta | None = None,
|
||||
name: Hashable | None = None,
|
||||
closed: Literal["left", "right"] | None = None,
|
||||
unit: None | str = ...,
|
||||
) -> TimedeltaIndex: ...
|
||||
@overload
|
||||
def timedelta_range(
|
||||
*,
|
||||
end: TimedeltaConvertibleTypes,
|
||||
periods: int,
|
||||
freq: str | DateOffset | Timedelta | dt.timedelta | None = None,
|
||||
name: Hashable | None = None,
|
||||
closed: Literal["left", "right"] | None = None,
|
||||
unit: None | str = ...,
|
||||
) -> TimedeltaIndex: ...
|
||||
@overload
|
||||
def timedelta_range(
|
||||
start: TimedeltaConvertibleTypes,
|
||||
*,
|
||||
periods: int,
|
||||
freq: str | DateOffset | Timedelta | dt.timedelta | None = None,
|
||||
name: Hashable | None = None,
|
||||
closed: Literal["left", "right"] | None = None,
|
||||
unit: None | str = ...,
|
||||
) -> TimedeltaIndex: ...
|
||||
@overload
|
||||
def timedelta_range(
|
||||
start: TimedeltaConvertibleTypes,
|
||||
end: TimedeltaConvertibleTypes,
|
||||
periods: int,
|
||||
*,
|
||||
name: Hashable | None = None,
|
||||
closed: Literal["left", "right"] | None = None,
|
||||
unit: None | str = ...,
|
||||
) -> TimedeltaIndex: ...
|
||||
55
lib/python3.11/site-packages/pandas-stubs/core/indexing.pyi
Normal file
55
lib/python3.11/site-packages/pandas-stubs/core/indexing.pyi
Normal file
@ -0,0 +1,55 @@
|
||||
from typing import TypeVar
|
||||
|
||||
from pandas.core.indexes.api import Index
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from pandas._libs.indexing import _NDFrameIndexerBase
|
||||
from pandas._typing import (
|
||||
MaskType,
|
||||
Scalar,
|
||||
ScalarT,
|
||||
)
|
||||
|
||||
_IndexSliceTuple: TypeAlias = tuple[
|
||||
Index | MaskType | Scalar | list[ScalarT] | slice | tuple[Scalar, ...], ...
|
||||
]
|
||||
|
||||
_IndexSliceUnion: TypeAlias = slice | _IndexSliceTuple
|
||||
|
||||
_IndexSliceUnionT = TypeVar(
|
||||
"_IndexSliceUnionT", bound=_IndexSliceUnion # pyrefly: ignore
|
||||
)
|
||||
|
||||
class _IndexSlice:
|
||||
def __getitem__(self, arg: _IndexSliceUnionT) -> _IndexSliceUnionT: ...
|
||||
|
||||
IndexSlice: _IndexSlice
|
||||
|
||||
class IndexingMixin:
|
||||
@property
|
||||
def iloc(self) -> _iLocIndexer: ...
|
||||
@property
|
||||
def loc(self) -> _LocIndexer: ...
|
||||
@property
|
||||
def at(self) -> _AtIndexer: ...
|
||||
@property
|
||||
def iat(self) -> _iAtIndexer: ...
|
||||
|
||||
class _NDFrameIndexer(_NDFrameIndexerBase):
|
||||
axis = ...
|
||||
def __call__(self, axis=...): ...
|
||||
def __getitem__(self, key): ...
|
||||
def __setitem__(self, key, value) -> None: ...
|
||||
|
||||
class _LocationIndexer(_NDFrameIndexer):
|
||||
def __getitem__(self, key): ...
|
||||
|
||||
class _LocIndexer(_LocationIndexer): ...
|
||||
class _iLocIndexer(_LocationIndexer): ...
|
||||
|
||||
class _ScalarAccessIndexer(_NDFrameIndexerBase):
|
||||
def __getitem__(self, key): ...
|
||||
def __setitem__(self, key, value) -> None: ...
|
||||
|
||||
class _AtIndexer(_ScalarAccessIndexer): ...
|
||||
class _iAtIndexer(_ScalarAccessIndexer): ...
|
||||
@ -0,0 +1,120 @@
|
||||
import abc
|
||||
from abc import (
|
||||
ABC,
|
||||
abstractmethod,
|
||||
)
|
||||
from collections.abc import (
|
||||
Iterable,
|
||||
Sequence,
|
||||
)
|
||||
import enum
|
||||
from typing import (
|
||||
Any,
|
||||
TypedDict,
|
||||
cast,
|
||||
)
|
||||
|
||||
class DlpackDeviceType(enum.IntEnum):
|
||||
CPU = cast(int, ...)
|
||||
CUDA = cast(int, ...)
|
||||
CPU_PINNED = cast(int, ...)
|
||||
OPENCL = cast(int, ...)
|
||||
VULKAN = cast(int, ...)
|
||||
METAL = cast(int, ...)
|
||||
VPI = cast(int, ...)
|
||||
ROCM = cast(int, ...)
|
||||
|
||||
class DtypeKind(enum.IntEnum):
|
||||
INT = cast(int, ...)
|
||||
UINT = cast(int, ...)
|
||||
FLOAT = cast(int, ...)
|
||||
BOOL = cast(int, ...)
|
||||
STRING = cast(int, ...)
|
||||
DATETIME = cast(int, ...)
|
||||
CATEGORICAL = cast(int, ...)
|
||||
|
||||
class ColumnNullType(enum.IntEnum):
|
||||
NON_NULLABLE = cast(int, ...)
|
||||
USE_NAN = cast(int, ...)
|
||||
USE_SENTINEL = cast(int, ...)
|
||||
USE_BITMASK = cast(int, ...)
|
||||
USE_BYTEMASK = cast(int, ...)
|
||||
|
||||
class ColumnBuffers(TypedDict):
|
||||
data: tuple[Buffer, Any]
|
||||
validity: tuple[Buffer, Any] | None
|
||||
offsets: tuple[Buffer, Any] | None
|
||||
|
||||
class CategoricalDescription(TypedDict):
|
||||
is_ordered: bool
|
||||
is_dictionary: bool
|
||||
categories: Column | None
|
||||
|
||||
class Buffer(ABC, metaclass=abc.ABCMeta):
|
||||
@property
|
||||
@abstractmethod
|
||||
def bufsize(self) -> int: ...
|
||||
@property
|
||||
@abstractmethod
|
||||
def ptr(self) -> int: ...
|
||||
@abstractmethod
|
||||
def __dlpack__(self): ...
|
||||
@abstractmethod
|
||||
def __dlpack_device__(self) -> tuple[DlpackDeviceType, int | None]: ...
|
||||
|
||||
class Column(ABC, metaclass=abc.ABCMeta):
|
||||
@property
|
||||
@abstractmethod
|
||||
def size(self) -> int: ...
|
||||
@property
|
||||
@abstractmethod
|
||||
def offset(self) -> int: ...
|
||||
@property
|
||||
@abstractmethod
|
||||
def dtype(self) -> tuple[DtypeKind, int, str, str]: ...
|
||||
@property
|
||||
@abstractmethod
|
||||
def describe_categorical(self) -> CategoricalDescription: ...
|
||||
@property
|
||||
@abstractmethod
|
||||
def describe_null(self) -> tuple[ColumnNullType, Any]: ...
|
||||
@property
|
||||
@abstractmethod
|
||||
def null_count(self) -> int | None: ...
|
||||
@property
|
||||
@abstractmethod
|
||||
def metadata(self) -> dict[str, Any]: ...
|
||||
@abstractmethod
|
||||
def num_chunks(self) -> int: ...
|
||||
@abstractmethod
|
||||
def get_chunks(self, n_chunks: int | None = ...) -> Iterable[Column]: ...
|
||||
@abstractmethod
|
||||
def get_buffers(self) -> ColumnBuffers: ...
|
||||
|
||||
class DataFrame(ABC, metaclass=abc.ABCMeta):
|
||||
version: int
|
||||
@abstractmethod
|
||||
def __dataframe__(self, nan_as_null: bool = ..., allow_copy: bool = ...): ...
|
||||
@property
|
||||
@abstractmethod
|
||||
def metadata(self) -> dict[str, Any]: ...
|
||||
@abstractmethod
|
||||
def num_columns(self) -> int: ...
|
||||
@abstractmethod
|
||||
def num_rows(self) -> int | None: ...
|
||||
@abstractmethod
|
||||
def num_chunks(self) -> int: ...
|
||||
@abstractmethod
|
||||
def column_names(self) -> Iterable[str]: ...
|
||||
@abstractmethod
|
||||
def get_column(self, i: int) -> Column: ...
|
||||
@abstractmethod
|
||||
def get_column_by_name(self, name: str) -> Column: ...
|
||||
@abstractmethod
|
||||
def get_columns(self) -> Iterable[Column]: ...
|
||||
@abstractmethod
|
||||
def select_columns(self, indices: Sequence[int]) -> DataFrame: ...
|
||||
@abstractmethod
|
||||
def select_columns_by_name(self, names: Sequence[str]) -> DataFrame: ...
|
||||
@abstractmethod
|
||||
def get_chunks(self, n_chunks: int | None = ...) -> Iterable[DataFrame]: ...
|
||||
@ -0,0 +1,3 @@
|
||||
import pandas as pd
|
||||
|
||||
def from_dataframe(df, allow_copy: bool = True) -> pd.DataFrame: ...
|
||||
209
lib/python3.11/site-packages/pandas-stubs/core/resample.pyi
Normal file
209
lib/python3.11/site-packages/pandas-stubs/core/resample.pyi
Normal file
@ -0,0 +1,209 @@
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Hashable,
|
||||
Mapping,
|
||||
)
|
||||
from typing import (
|
||||
Literal,
|
||||
final,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
DataFrame,
|
||||
Series,
|
||||
Timedelta,
|
||||
)
|
||||
from pandas.core.groupby.generic import SeriesGroupBy
|
||||
from pandas.core.groupby.groupby import BaseGroupBy
|
||||
from pandas.core.groupby.grouper import Grouper
|
||||
from typing_extensions import (
|
||||
Self,
|
||||
TypeAlias,
|
||||
)
|
||||
|
||||
from pandas._typing import (
|
||||
S1,
|
||||
Axis,
|
||||
InterpolateOptions,
|
||||
NDFrameT,
|
||||
Scalar,
|
||||
TimeGrouperOrigin,
|
||||
TimestampConvention,
|
||||
npt,
|
||||
)
|
||||
|
||||
_FrameGroupByFunc: TypeAlias = (
|
||||
Callable[[DataFrame], Scalar]
|
||||
| Callable[[DataFrame], Series]
|
||||
| Callable[[DataFrame], DataFrame]
|
||||
| np.ufunc
|
||||
)
|
||||
_FrameGroupByFuncTypes: TypeAlias = (
|
||||
_FrameGroupByFunc | str | list[_FrameGroupByFunc | str]
|
||||
)
|
||||
_FrameGroupByFuncArgs: TypeAlias = (
|
||||
_FrameGroupByFuncTypes | Mapping[Hashable, _FrameGroupByFuncTypes]
|
||||
)
|
||||
|
||||
_SeriesGroupByFunc: TypeAlias = (
|
||||
Callable[[Series], Scalar] | Callable[[Series], Series] | np.ufunc
|
||||
)
|
||||
_SeriesGroupByFuncTypes: TypeAlias = (
|
||||
_SeriesGroupByFunc | str | list[_SeriesGroupByFunc | str]
|
||||
)
|
||||
_SeriesGroupByFuncArgs: TypeAlias = (
|
||||
_SeriesGroupByFuncTypes | Mapping[Hashable, _SeriesGroupByFunc | str]
|
||||
)
|
||||
|
||||
class Resampler(BaseGroupBy[NDFrameT]):
|
||||
def __getattr__(self, attr: str) -> SeriesGroupBy: ...
|
||||
@overload
|
||||
def aggregate(
|
||||
self: Resampler[DataFrame],
|
||||
func: _FrameGroupByFuncArgs | None = ...,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def aggregate(
|
||||
self: Resampler[Series],
|
||||
func: _SeriesGroupByFuncArgs | None = ...,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> Series | DataFrame: ...
|
||||
agg = aggregate
|
||||
apply = aggregate
|
||||
@overload
|
||||
def transform(
|
||||
self: Resampler[Series], arg: Callable[[Series], Series[S1]], *args, **kwargs
|
||||
) -> Series[S1]: ...
|
||||
@overload
|
||||
def transform(
|
||||
self: Resampler[DataFrame], arg: Callable[[Series], Series[S1]], *args, **kwargs
|
||||
) -> DataFrame: ...
|
||||
@final
|
||||
def ffill(self, limit: int | None = ...) -> NDFrameT: ...
|
||||
@final
|
||||
def nearest(self, limit: int | None = ...) -> NDFrameT: ...
|
||||
@final
|
||||
def bfill(self, limit: int | None = ...) -> NDFrameT: ...
|
||||
@overload
|
||||
def interpolate(
|
||||
self,
|
||||
method: InterpolateOptions = ...,
|
||||
*,
|
||||
axis: Axis = ...,
|
||||
limit: int | None = ...,
|
||||
inplace: Literal[True],
|
||||
limit_direction: Literal["forward", "backward", "both"] = ...,
|
||||
limit_area: Literal["inside", "outside"] | None = ...,
|
||||
**kwargs,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def interpolate(
|
||||
self,
|
||||
method: InterpolateOptions = ...,
|
||||
*,
|
||||
axis: Axis = ...,
|
||||
limit: int | None = ...,
|
||||
inplace: Literal[False] = ...,
|
||||
limit_direction: Literal["forward", "backward", "both"] = ...,
|
||||
limit_area: Literal["inside", "outside"] | None = ...,
|
||||
**kwargs,
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def asfreq(self, fill_value: Scalar | None = ...) -> NDFrameT: ...
|
||||
@final
|
||||
def sum(self, numeric_only: bool = False, min_count: int = 0) -> NDFrameT: ...
|
||||
@final
|
||||
def prod(self, numeric_only: bool = False, min_count: int = 0) -> NDFrameT: ...
|
||||
@final
|
||||
def min(self, numeric_only: bool = ..., min_count: int = ...) -> NDFrameT: ...
|
||||
@final
|
||||
def max(self, numeric_only: bool = ..., min_count: int = ...) -> NDFrameT: ...
|
||||
@final
|
||||
def first(
|
||||
self,
|
||||
numeric_only: bool = False,
|
||||
min_count: int = -1,
|
||||
skipna: bool = True,
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def last(
|
||||
self,
|
||||
numeric_only: bool = False,
|
||||
min_count: int = -1,
|
||||
skipna: bool = True,
|
||||
) -> NDFrameT: ...
|
||||
@final
|
||||
def median(self, numeric_only: bool = False) -> NDFrameT: ...
|
||||
@final
|
||||
def mean(self, numeric_only: bool = False) -> NDFrameT: ...
|
||||
@final
|
||||
def std(self, ddof: int = 1, numeric_only: bool = False) -> NDFrameT: ...
|
||||
@final
|
||||
def var(self, ddof: int = 1, numeric_only: bool = False) -> NDFrameT: ...
|
||||
@final
|
||||
def sem(self, ddof: int = 1, numeric_only: bool = False) -> NDFrameT: ...
|
||||
@final
|
||||
def ohlc(self) -> DataFrame: ...
|
||||
@overload
|
||||
def nunique(self: Resampler[Series]) -> Series[int]: ...
|
||||
@overload
|
||||
def nunique(self: Resampler[DataFrame]) -> DataFrame: ...
|
||||
@final
|
||||
def size(self) -> Series[int]: ...
|
||||
@overload
|
||||
def count(self: Resampler[Series]) -> Series[int]: ...
|
||||
@overload
|
||||
def count(self: Resampler[DataFrame]) -> DataFrame: ...
|
||||
@final
|
||||
def quantile(
|
||||
self,
|
||||
q: float | list[float] | npt.NDArray[np.double] | Series[float] = 0.5,
|
||||
**kwargs,
|
||||
) -> NDFrameT: ...
|
||||
|
||||
# We lie about inheriting from Resampler because at runtime inherits all Resampler
|
||||
# attributes via setattr
|
||||
class _GroupByMixin(Resampler[NDFrameT]):
|
||||
key: str | list[str] | None
|
||||
def __getitem__(self, key) -> Self: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
|
||||
class DatetimeIndexResampler(Resampler[NDFrameT]): ...
|
||||
|
||||
class DatetimeIndexResamplerGroupby(
|
||||
_GroupByMixin[NDFrameT], DatetimeIndexResampler[NDFrameT]
|
||||
):
|
||||
@final
|
||||
def __getattr__(self, attr: str) -> Self: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
|
||||
class PeriodIndexResampler(DatetimeIndexResampler[NDFrameT]): ...
|
||||
|
||||
class PeriodIndexResamplerGroupby(
|
||||
_GroupByMixin[NDFrameT], PeriodIndexResampler[NDFrameT]
|
||||
):
|
||||
@final
|
||||
def __getattr__(self, attr: str) -> Self: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
|
||||
class TimedeltaIndexResampler(DatetimeIndexResampler[NDFrameT]): ...
|
||||
|
||||
class TimedeltaIndexResamplerGroupby(
|
||||
_GroupByMixin[NDFrameT], TimedeltaIndexResampler[NDFrameT]
|
||||
):
|
||||
@final
|
||||
def __getattr__(self, attr: str) -> Self: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
|
||||
class TimeGrouper(Grouper):
|
||||
closed: Literal["left", "right"]
|
||||
label: Literal["left", "right"]
|
||||
kind: str | None
|
||||
convention: TimestampConvention
|
||||
how: str
|
||||
fill_method: str | None
|
||||
limit: int | None
|
||||
group_keys: bool
|
||||
origin: TimeGrouperOrigin
|
||||
offset: Timedelta | None
|
||||
@ -0,0 +1,24 @@
|
||||
from pandas.core.reshape.concat import concat as concat
|
||||
from pandas.core.reshape.encoding import (
|
||||
from_dummies as from_dummies,
|
||||
get_dummies as get_dummies,
|
||||
)
|
||||
from pandas.core.reshape.melt import (
|
||||
lreshape as lreshape,
|
||||
melt as melt,
|
||||
wide_to_long as wide_to_long,
|
||||
)
|
||||
from pandas.core.reshape.merge import (
|
||||
merge as merge,
|
||||
merge_asof as merge_asof,
|
||||
merge_ordered as merge_ordered,
|
||||
)
|
||||
from pandas.core.reshape.pivot import (
|
||||
crosstab as crosstab,
|
||||
pivot as pivot,
|
||||
pivot_table as pivot_table,
|
||||
)
|
||||
from pandas.core.reshape.tile import (
|
||||
cut as cut,
|
||||
qcut as qcut,
|
||||
)
|
||||
@ -0,0 +1,179 @@
|
||||
from collections.abc import (
|
||||
Iterable,
|
||||
Mapping,
|
||||
Sequence,
|
||||
)
|
||||
from typing import (
|
||||
Literal,
|
||||
overload,
|
||||
)
|
||||
|
||||
from pandas import (
|
||||
DataFrame,
|
||||
Series,
|
||||
)
|
||||
from typing_extensions import Never
|
||||
|
||||
from pandas._typing import (
|
||||
S2,
|
||||
Axis,
|
||||
AxisIndex,
|
||||
HashableT1,
|
||||
HashableT2,
|
||||
HashableT3,
|
||||
HashableT4,
|
||||
)
|
||||
|
||||
@overload
|
||||
def concat( # type: ignore[overload-overlap]
|
||||
objs: Iterable[DataFrame] | Mapping[HashableT1, DataFrame],
|
||||
*,
|
||||
axis: Axis = ...,
|
||||
join: Literal["inner", "outer"] = ...,
|
||||
ignore_index: bool = ...,
|
||||
keys: Iterable[HashableT2] | None = ...,
|
||||
levels: Sequence[list[HashableT3] | tuple[HashableT3, ...]] | None = ...,
|
||||
names: list[HashableT4] | None = ...,
|
||||
verify_integrity: bool = ...,
|
||||
sort: bool = ...,
|
||||
copy: bool = ...,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def concat( # pyright: ignore[reportOverlappingOverload]
|
||||
objs: Iterable[Series[S2]],
|
||||
*,
|
||||
axis: AxisIndex = ...,
|
||||
join: Literal["inner", "outer"] = ...,
|
||||
ignore_index: bool = ...,
|
||||
keys: Iterable[HashableT2] | None = ...,
|
||||
levels: Sequence[list[HashableT3] | tuple[HashableT3, ...]] | None = ...,
|
||||
names: list[HashableT4] | None = ...,
|
||||
verify_integrity: bool = ...,
|
||||
sort: bool = ...,
|
||||
copy: bool = ...,
|
||||
) -> Series[S2]: ...
|
||||
@overload
|
||||
def concat( # type: ignore[overload-overlap]
|
||||
objs: Iterable[Series] | Mapping[HashableT1, Series],
|
||||
*,
|
||||
axis: AxisIndex = ...,
|
||||
join: Literal["inner", "outer"] = ...,
|
||||
ignore_index: bool = ...,
|
||||
keys: Iterable[HashableT2] | None = ...,
|
||||
levels: Sequence[list[HashableT3] | tuple[HashableT3, ...]] | None = ...,
|
||||
names: list[HashableT4] | None = ...,
|
||||
verify_integrity: bool = ...,
|
||||
sort: bool = ...,
|
||||
copy: bool = ...,
|
||||
) -> Series: ...
|
||||
@overload
|
||||
def concat( # type: ignore[overload-overlap] # pyright: ignore[reportOverlappingOverload]
|
||||
objs: Iterable[Series | DataFrame] | Mapping[HashableT1, Series | DataFrame],
|
||||
*,
|
||||
axis: Axis = ...,
|
||||
join: Literal["inner", "outer"] = ...,
|
||||
ignore_index: bool = ...,
|
||||
keys: Iterable[HashableT2] | None = ...,
|
||||
levels: Sequence[list[HashableT3] | tuple[HashableT3, ...]] | None = ...,
|
||||
names: list[HashableT4] | None = ...,
|
||||
verify_integrity: bool = ...,
|
||||
sort: bool = ...,
|
||||
copy: bool = ...,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def concat(
|
||||
objs: Iterable[None] | Mapping[HashableT1, None],
|
||||
*,
|
||||
axis: Axis = ...,
|
||||
join: Literal["inner", "outer"] = ...,
|
||||
ignore_index: bool = ...,
|
||||
keys: Iterable[HashableT2] | None = ...,
|
||||
levels: Sequence[list[HashableT3] | tuple[HashableT3, ...]] | None = ...,
|
||||
names: list[HashableT4] | None = ...,
|
||||
verify_integrity: bool = ...,
|
||||
sort: bool = ...,
|
||||
copy: bool = ...,
|
||||
) -> Never: ...
|
||||
@overload
|
||||
def concat( # type: ignore[overload-overlap]
|
||||
objs: Iterable[DataFrame | None] | Mapping[HashableT1, DataFrame | None],
|
||||
*,
|
||||
axis: Axis = ...,
|
||||
join: Literal["inner", "outer"] = ...,
|
||||
ignore_index: bool = ...,
|
||||
keys: Iterable[HashableT2] | None = ...,
|
||||
levels: Sequence[list[HashableT3] | tuple[HashableT3, ...]] | None = ...,
|
||||
names: list[HashableT4] | None = ...,
|
||||
verify_integrity: bool = ...,
|
||||
sort: bool = ...,
|
||||
copy: bool = ...,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def concat( # type: ignore[overload-overlap]
|
||||
objs: Iterable[Series | None] | Mapping[HashableT1, Series | None],
|
||||
*,
|
||||
axis: AxisIndex = ...,
|
||||
join: Literal["inner", "outer"] = ...,
|
||||
ignore_index: bool = ...,
|
||||
keys: Iterable[HashableT2] | None = ...,
|
||||
levels: Sequence[list[HashableT3] | tuple[HashableT3, ...]] | None = ...,
|
||||
names: list[HashableT4] | None = ...,
|
||||
verify_integrity: bool = ...,
|
||||
sort: bool = ...,
|
||||
copy: bool = ...,
|
||||
) -> Series: ...
|
||||
@overload
|
||||
def concat(
|
||||
objs: (
|
||||
Iterable[Series | DataFrame | None]
|
||||
| Mapping[HashableT1, Series | DataFrame | None]
|
||||
),
|
||||
*,
|
||||
axis: Axis = ...,
|
||||
join: Literal["inner", "outer"] = ...,
|
||||
ignore_index: bool = ...,
|
||||
keys: Iterable[HashableT2] | None = ...,
|
||||
levels: Sequence[list[HashableT3] | tuple[HashableT3, ...]] | None = ...,
|
||||
names: list[HashableT4] | None = ...,
|
||||
verify_integrity: bool = ...,
|
||||
sort: bool = ...,
|
||||
copy: bool = ...,
|
||||
) -> DataFrame: ...
|
||||
|
||||
# Including either of the next 2 overloads causes mypy to complain about
|
||||
# test_pandas.py:test_types_concat() in assert_type(pd.concat([s, s2]), pd.Series)
|
||||
# It thinks that pd.concat([s, s2]) is Any . May be due to Series being
|
||||
# Generic, or the axis argument being unspecified, and then there is partial
|
||||
# overlap with the first 2 overloads.
|
||||
#
|
||||
# @overload
|
||||
# def concat(
|
||||
# objs: Union[
|
||||
# Iterable[Union[Series, DataFrame]], Mapping[HashableT, Union[Series, DataFrame]]
|
||||
# ],
|
||||
# axis: Literal[0, "index"] = ...,
|
||||
# join: str = ...,
|
||||
# ignore_index: bool = ...,
|
||||
# keys=...,
|
||||
# levels=...,
|
||||
# names=...,
|
||||
# verify_integrity: bool = ...,
|
||||
# sort: bool = ...,
|
||||
# copy: bool = ...,
|
||||
# ) -> Union[DataFrame, Series]: ...
|
||||
|
||||
# @overload
|
||||
# def concat(
|
||||
# objs: Union[
|
||||
# Iterable[Union[Series, DataFrame]], Mapping[HashableT, Union[Series, DataFrame]]
|
||||
# ],
|
||||
# axis: Axis = ...,
|
||||
# join: str = ...,
|
||||
# ignore_index: bool = ...,
|
||||
# keys=...,
|
||||
# levels=...,
|
||||
# names=...,
|
||||
# verify_integrity: bool = ...,
|
||||
# sort: bool = ...,
|
||||
# copy: bool = ...,
|
||||
# ) -> Union[DataFrame, Series]: ...
|
||||
@ -0,0 +1,29 @@
|
||||
from collections.abc import (
|
||||
Hashable,
|
||||
Iterable,
|
||||
)
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
from pandas._typing import (
|
||||
AnyArrayLike,
|
||||
Dtype,
|
||||
HashableT1,
|
||||
HashableT2,
|
||||
)
|
||||
|
||||
def get_dummies(
|
||||
data: AnyArrayLike | DataFrame,
|
||||
prefix: str | Iterable[str] | dict[HashableT1, str] | None = None,
|
||||
prefix_sep: str = "_",
|
||||
dummy_na: bool = False,
|
||||
columns: list[HashableT2] | None = None,
|
||||
sparse: bool = False,
|
||||
drop_first: bool = False,
|
||||
dtype: Dtype | None = None,
|
||||
) -> DataFrame: ...
|
||||
def from_dummies(
|
||||
data: DataFrame,
|
||||
sep: str | None = None,
|
||||
default_category: Hashable | dict[str, Hashable] | None = None,
|
||||
) -> DataFrame: ...
|
||||
@ -0,0 +1,29 @@
|
||||
from collections.abc import Hashable
|
||||
|
||||
import numpy as np
|
||||
from pandas.core.frame import DataFrame
|
||||
|
||||
from pandas._typing import HashableT
|
||||
|
||||
def melt(
|
||||
frame: DataFrame,
|
||||
id_vars: tuple | list | np.ndarray | None = None,
|
||||
value_vars: tuple | list | np.ndarray | None = None,
|
||||
var_name: str | None = None,
|
||||
value_name: Hashable = "value",
|
||||
col_level: int | str | None = None,
|
||||
ignore_index: bool = True,
|
||||
) -> DataFrame: ...
|
||||
def lreshape(
|
||||
data: DataFrame,
|
||||
groups: dict[HashableT, list[HashableT]],
|
||||
dropna: bool = True,
|
||||
) -> DataFrame: ...
|
||||
def wide_to_long(
|
||||
df: DataFrame,
|
||||
stubnames: str | list[str],
|
||||
i: str | list[str],
|
||||
j: str,
|
||||
sep: str = "",
|
||||
suffix: str = "\\d+",
|
||||
) -> DataFrame: ...
|
||||
@ -0,0 +1,93 @@
|
||||
from datetime import timedelta
|
||||
from typing import (
|
||||
Literal,
|
||||
overload,
|
||||
)
|
||||
|
||||
from pandas import (
|
||||
DataFrame,
|
||||
Series,
|
||||
Timedelta,
|
||||
)
|
||||
|
||||
from pandas._typing import (
|
||||
AnyArrayLike,
|
||||
HashableT,
|
||||
JoinHow,
|
||||
Label,
|
||||
MergeHow,
|
||||
Suffixes,
|
||||
ValidationOptions,
|
||||
)
|
||||
|
||||
def merge(
|
||||
left: DataFrame | Series,
|
||||
right: DataFrame | Series,
|
||||
how: MergeHow = "inner",
|
||||
on: Label | list[HashableT] | AnyArrayLike | None = None,
|
||||
left_on: Label | list[HashableT] | AnyArrayLike | None = None,
|
||||
right_on: Label | list[HashableT] | AnyArrayLike | None = None,
|
||||
left_index: bool = False,
|
||||
right_index: bool = False,
|
||||
sort: bool = False,
|
||||
suffixes: Suffixes = ...,
|
||||
indicator: bool | str = False,
|
||||
validate: ValidationOptions | None = None,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def merge_ordered(
|
||||
left: DataFrame,
|
||||
right: DataFrame,
|
||||
on: Label | list[HashableT] | None = ...,
|
||||
left_on: Label | list[HashableT] | None = ...,
|
||||
right_on: Label | list[HashableT] | None = ...,
|
||||
left_by: Label | list[HashableT] | None = ...,
|
||||
right_by: Label | list[HashableT] | None = ...,
|
||||
fill_method: Literal["ffill"] | None = ...,
|
||||
suffixes: Suffixes = ...,
|
||||
how: JoinHow = ...,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def merge_ordered(
|
||||
left: Series,
|
||||
right: DataFrame | Series,
|
||||
on: Label | list[HashableT] | None = ...,
|
||||
left_on: Label | list[HashableT] | None = ...,
|
||||
right_on: Label | list[HashableT] | None = ...,
|
||||
left_by: None = ...,
|
||||
right_by: None = ...,
|
||||
fill_method: Literal["ffill"] | None = ...,
|
||||
suffixes: (
|
||||
list[str | None] | tuple[str, str] | tuple[None, str] | tuple[str, None]
|
||||
) = ...,
|
||||
how: JoinHow = ...,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def merge_ordered(
|
||||
left: DataFrame | Series,
|
||||
right: Series,
|
||||
on: Label | list[HashableT] | None = ...,
|
||||
left_on: Label | list[HashableT] | None = ...,
|
||||
right_on: Label | list[HashableT] | None = ...,
|
||||
left_by: None = ...,
|
||||
right_by: None = ...,
|
||||
fill_method: Literal["ffill"] | None = ...,
|
||||
suffixes: Suffixes = ...,
|
||||
how: JoinHow = ...,
|
||||
) -> DataFrame: ...
|
||||
def merge_asof(
|
||||
left: DataFrame | Series,
|
||||
right: DataFrame | Series,
|
||||
on: Label | None = None,
|
||||
left_on: Label | None = None,
|
||||
right_on: Label | None = None,
|
||||
left_index: bool = False,
|
||||
right_index: bool = False,
|
||||
by: Label | list[HashableT] | None = None,
|
||||
left_by: Label | list[HashableT] | None = None,
|
||||
right_by: Label | list[HashableT] | None = None,
|
||||
suffixes: Suffixes = ...,
|
||||
tolerance: int | timedelta | Timedelta | None = None,
|
||||
allow_exact_matches: bool = True,
|
||||
direction: Literal["backward", "forward", "nearest"] = "backward",
|
||||
) -> DataFrame: ...
|
||||
149
lib/python3.11/site-packages/pandas-stubs/core/reshape/pivot.pyi
Normal file
149
lib/python3.11/site-packages/pandas-stubs/core/reshape/pivot.pyi
Normal file
@ -0,0 +1,149 @@
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Hashable,
|
||||
Mapping,
|
||||
Sequence,
|
||||
)
|
||||
import datetime
|
||||
from typing import (
|
||||
Literal,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from pandas.core.frame import DataFrame
|
||||
from pandas.core.groupby.grouper import Grouper
|
||||
from pandas.core.indexes.base import Index
|
||||
from pandas.core.series import Series
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from pandas._typing import (
|
||||
AnyArrayLike,
|
||||
ArrayLike,
|
||||
HashableT1,
|
||||
HashableT2,
|
||||
HashableT3,
|
||||
Label,
|
||||
Scalar,
|
||||
ScalarT,
|
||||
npt,
|
||||
)
|
||||
|
||||
_PivotAggCallable: TypeAlias = Callable[[Series], ScalarT]
|
||||
|
||||
_PivotAggFunc: TypeAlias = (
|
||||
_PivotAggCallable
|
||||
| np.ufunc
|
||||
| Literal["mean", "sum", "count", "min", "max", "median", "std", "var"]
|
||||
)
|
||||
|
||||
_NonIterableHashable: TypeAlias = (
|
||||
str
|
||||
| datetime.date
|
||||
| datetime.datetime
|
||||
| datetime.timedelta
|
||||
| bool
|
||||
| int
|
||||
| float
|
||||
| complex
|
||||
| pd.Timestamp
|
||||
| pd.Timedelta
|
||||
)
|
||||
|
||||
_PivotTableIndexTypes: TypeAlias = (
|
||||
Label | Sequence[HashableT1] | Series | Grouper | None
|
||||
)
|
||||
_PivotTableColumnsTypes: TypeAlias = (
|
||||
Label | Sequence[HashableT2] | Series | Grouper | None
|
||||
)
|
||||
_PivotTableValuesTypes: TypeAlias = Label | Sequence[HashableT3] | None
|
||||
|
||||
_ExtendedAnyArrayLike: TypeAlias = AnyArrayLike | ArrayLike
|
||||
|
||||
@overload
|
||||
def pivot_table(
|
||||
data: DataFrame,
|
||||
values: _PivotTableValuesTypes = ...,
|
||||
index: _PivotTableIndexTypes = ...,
|
||||
columns: _PivotTableColumnsTypes = ...,
|
||||
aggfunc: (
|
||||
_PivotAggFunc | Sequence[_PivotAggFunc] | Mapping[Hashable, _PivotAggFunc]
|
||||
) = ...,
|
||||
fill_value: Scalar | None = ...,
|
||||
margins: bool = ...,
|
||||
dropna: bool = ...,
|
||||
margins_name: str = ...,
|
||||
observed: bool = ...,
|
||||
sort: bool = ...,
|
||||
) -> DataFrame: ...
|
||||
|
||||
# Can only use Index or ndarray when index or columns is a Grouper
|
||||
@overload
|
||||
def pivot_table(
|
||||
data: DataFrame,
|
||||
values: _PivotTableValuesTypes = ...,
|
||||
*,
|
||||
index: Grouper,
|
||||
columns: _PivotTableColumnsTypes | Index | npt.NDArray = ...,
|
||||
aggfunc: (
|
||||
_PivotAggFunc | Sequence[_PivotAggFunc] | Mapping[Hashable, _PivotAggFunc]
|
||||
) = ...,
|
||||
fill_value: Scalar | None = ...,
|
||||
margins: bool = ...,
|
||||
dropna: bool = ...,
|
||||
margins_name: str = ...,
|
||||
observed: bool = ...,
|
||||
sort: bool = ...,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def pivot_table(
|
||||
data: DataFrame,
|
||||
values: _PivotTableValuesTypes = ...,
|
||||
index: _PivotTableIndexTypes | Index | npt.NDArray = ...,
|
||||
*,
|
||||
columns: Grouper,
|
||||
aggfunc: (
|
||||
_PivotAggFunc | Sequence[_PivotAggFunc] | Mapping[Hashable, _PivotAggFunc]
|
||||
) = ...,
|
||||
fill_value: Scalar | None = ...,
|
||||
margins: bool = ...,
|
||||
dropna: bool = ...,
|
||||
margins_name: str = ...,
|
||||
observed: bool = ...,
|
||||
sort: bool = ...,
|
||||
) -> DataFrame: ...
|
||||
def pivot(
|
||||
data: DataFrame,
|
||||
*,
|
||||
index: _NonIterableHashable | Sequence[HashableT1] = ...,
|
||||
columns: _NonIterableHashable | Sequence[HashableT2] = ...,
|
||||
values: _NonIterableHashable | Sequence[HashableT3] = ...,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def crosstab(
|
||||
index: list | _ExtendedAnyArrayLike | list[Sequence | _ExtendedAnyArrayLike],
|
||||
columns: list | _ExtendedAnyArrayLike | list[Sequence | _ExtendedAnyArrayLike],
|
||||
values: list | _ExtendedAnyArrayLike,
|
||||
rownames: list[HashableT1] | None = ...,
|
||||
colnames: list[HashableT2] | None = ...,
|
||||
*,
|
||||
aggfunc: str | np.ufunc | Callable[[Series], float],
|
||||
margins: bool = ...,
|
||||
margins_name: str = ...,
|
||||
dropna: bool = ...,
|
||||
normalize: bool | Literal[0, 1, "all", "index", "columns"] = ...,
|
||||
) -> DataFrame: ...
|
||||
@overload
|
||||
def crosstab(
|
||||
index: list | _ExtendedAnyArrayLike | list[Sequence | _ExtendedAnyArrayLike],
|
||||
columns: list | _ExtendedAnyArrayLike | list[Sequence | _ExtendedAnyArrayLike],
|
||||
values: None = ...,
|
||||
rownames: list[HashableT1] | None = ...,
|
||||
colnames: list[HashableT2] | None = ...,
|
||||
aggfunc: None = ...,
|
||||
margins: bool = ...,
|
||||
margins_name: str = ...,
|
||||
dropna: bool = ...,
|
||||
normalize: bool | Literal[0, 1, "all", "index", "columns"] = ...,
|
||||
) -> DataFrame: ...
|
||||
273
lib/python3.11/site-packages/pandas-stubs/core/reshape/tile.pyi
Normal file
273
lib/python3.11/site-packages/pandas-stubs/core/reshape/tile.pyi
Normal file
@ -0,0 +1,273 @@
|
||||
from collections.abc import Sequence
|
||||
from typing import (
|
||||
Literal,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
Categorical,
|
||||
CategoricalDtype,
|
||||
DatetimeIndex,
|
||||
Index,
|
||||
Interval,
|
||||
IntervalIndex,
|
||||
Series,
|
||||
Timestamp,
|
||||
)
|
||||
from pandas.core.series import TimestampSeries
|
||||
|
||||
from pandas._typing import (
|
||||
IntervalT,
|
||||
Label,
|
||||
npt,
|
||||
)
|
||||
|
||||
@overload
|
||||
def cut(
|
||||
x: Index | npt.NDArray | Sequence[int] | Sequence[float],
|
||||
bins: int | Series | Index[int] | Index[float] | Sequence[int] | Sequence[float],
|
||||
right: bool = ...,
|
||||
*,
|
||||
labels: Literal[False],
|
||||
retbins: Literal[True],
|
||||
precision: int = ...,
|
||||
include_lowest: bool = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
ordered: bool = ...,
|
||||
) -> tuple[npt.NDArray[np.intp], npt.NDArray]: ...
|
||||
@overload
|
||||
def cut(
|
||||
x: Index | npt.NDArray | Sequence[int] | Sequence[float],
|
||||
bins: IntervalIndex[IntervalT],
|
||||
right: bool = ...,
|
||||
*,
|
||||
labels: Literal[False],
|
||||
retbins: Literal[True],
|
||||
precision: int = ...,
|
||||
include_lowest: bool = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
ordered: bool = ...,
|
||||
) -> tuple[npt.NDArray[np.intp], IntervalIndex[IntervalT]]: ...
|
||||
@overload
|
||||
def cut( # pyright: ignore[reportOverlappingOverload]
|
||||
x: TimestampSeries,
|
||||
bins: (
|
||||
int
|
||||
| TimestampSeries
|
||||
| DatetimeIndex
|
||||
| Sequence[Timestamp]
|
||||
| Sequence[np.datetime64]
|
||||
),
|
||||
right: bool = ...,
|
||||
labels: Literal[False] | Sequence[Label] | None = ...,
|
||||
*,
|
||||
retbins: Literal[True],
|
||||
precision: int = ...,
|
||||
include_lowest: bool = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
ordered: bool = ...,
|
||||
) -> tuple[Series, DatetimeIndex]: ...
|
||||
@overload
|
||||
def cut(
|
||||
x: TimestampSeries,
|
||||
bins: IntervalIndex[Interval[Timestamp]],
|
||||
right: bool = ...,
|
||||
labels: Sequence[Label] | None = ...,
|
||||
*,
|
||||
retbins: Literal[True],
|
||||
precision: int = ...,
|
||||
include_lowest: bool = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
ordered: bool = ...,
|
||||
) -> tuple[Series, DatetimeIndex]: ...
|
||||
@overload
|
||||
def cut(
|
||||
x: Series,
|
||||
bins: int | Series | Index[int] | Index[float] | Sequence[int] | Sequence[float],
|
||||
right: bool = ...,
|
||||
labels: Literal[False] | Sequence[Label] | None = ...,
|
||||
*,
|
||||
retbins: Literal[True],
|
||||
precision: int = ...,
|
||||
include_lowest: bool = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
ordered: bool = ...,
|
||||
) -> tuple[Series, npt.NDArray]: ...
|
||||
@overload
|
||||
def cut(
|
||||
x: Series,
|
||||
bins: IntervalIndex[Interval[int]] | IntervalIndex[Interval[float]],
|
||||
right: bool = ...,
|
||||
labels: Sequence[Label] | None = ...,
|
||||
*,
|
||||
retbins: Literal[True],
|
||||
precision: int = ...,
|
||||
include_lowest: bool = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
ordered: bool = ...,
|
||||
) -> tuple[Series, IntervalIndex]: ...
|
||||
@overload
|
||||
def cut(
|
||||
x: Index | npt.NDArray | Sequence[int] | Sequence[float],
|
||||
bins: int | Series | Index[int] | Index[float] | Sequence[int] | Sequence[float],
|
||||
right: bool = ...,
|
||||
labels: Sequence[Label] | None = ...,
|
||||
*,
|
||||
retbins: Literal[True],
|
||||
precision: int = ...,
|
||||
include_lowest: bool = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
ordered: bool = ...,
|
||||
) -> tuple[Categorical, npt.NDArray]: ...
|
||||
@overload
|
||||
def cut(
|
||||
x: Index | npt.NDArray | Sequence[int] | Sequence[float],
|
||||
bins: IntervalIndex[IntervalT],
|
||||
right: bool = ...,
|
||||
labels: Sequence[Label] | None = ...,
|
||||
*,
|
||||
retbins: Literal[True],
|
||||
precision: int = ...,
|
||||
include_lowest: bool = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
ordered: bool = ...,
|
||||
) -> tuple[Categorical, IntervalIndex[IntervalT]]: ...
|
||||
@overload
|
||||
def cut(
|
||||
x: Index | npt.NDArray | Sequence[int] | Sequence[float],
|
||||
bins: (
|
||||
int
|
||||
| Series
|
||||
| Index[int]
|
||||
| Index[float]
|
||||
| Sequence[int]
|
||||
| Sequence[float]
|
||||
| IntervalIndex
|
||||
),
|
||||
right: bool = ...,
|
||||
*,
|
||||
labels: Literal[False],
|
||||
retbins: Literal[False] = ...,
|
||||
precision: int = ...,
|
||||
include_lowest: bool = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
ordered: bool = ...,
|
||||
) -> npt.NDArray[np.intp]: ...
|
||||
@overload
|
||||
def cut(
|
||||
x: TimestampSeries,
|
||||
bins: (
|
||||
int
|
||||
| TimestampSeries
|
||||
| DatetimeIndex
|
||||
| Sequence[Timestamp]
|
||||
| Sequence[np.datetime64]
|
||||
| IntervalIndex[Interval[Timestamp]]
|
||||
),
|
||||
right: bool = ...,
|
||||
labels: Literal[False] | Sequence[Label] | None = ...,
|
||||
retbins: Literal[False] = ...,
|
||||
precision: int = ...,
|
||||
include_lowest: bool = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
ordered: bool = ...,
|
||||
) -> Series[CategoricalDtype]: ...
|
||||
@overload
|
||||
def cut(
|
||||
x: Series,
|
||||
bins: (
|
||||
int
|
||||
| Series
|
||||
| Index[int]
|
||||
| Index[float]
|
||||
| Sequence[int]
|
||||
| Sequence[float]
|
||||
| IntervalIndex
|
||||
),
|
||||
right: bool = ...,
|
||||
labels: Literal[False] | Sequence[Label] | None = ...,
|
||||
retbins: Literal[False] = ...,
|
||||
precision: int = ...,
|
||||
include_lowest: bool = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
ordered: bool = ...,
|
||||
) -> Series: ...
|
||||
@overload
|
||||
def cut(
|
||||
x: Index | npt.NDArray | Sequence[int] | Sequence[float],
|
||||
bins: (
|
||||
int
|
||||
| Series
|
||||
| Index[int]
|
||||
| Index[float]
|
||||
| Sequence[int]
|
||||
| Sequence[float]
|
||||
| IntervalIndex
|
||||
),
|
||||
right: bool = ...,
|
||||
labels: Sequence[Label] | None = ...,
|
||||
retbins: Literal[False] = ...,
|
||||
precision: int = ...,
|
||||
include_lowest: bool = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
ordered: bool = ...,
|
||||
) -> Categorical: ...
|
||||
@overload
|
||||
def qcut(
|
||||
x: Index | npt.NDArray | Sequence[int] | Sequence[float],
|
||||
q: int | Sequence[float] | Series[float] | Index[float] | npt.NDArray,
|
||||
*,
|
||||
labels: Literal[False],
|
||||
retbins: Literal[False] = ...,
|
||||
precision: int = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
) -> npt.NDArray[np.intp]: ...
|
||||
@overload
|
||||
def qcut(
|
||||
x: Index | npt.NDArray | Sequence[int] | Sequence[float],
|
||||
q: int | Sequence[float] | Series[float] | Index[float] | npt.NDArray,
|
||||
labels: Sequence[Label] | None = ...,
|
||||
retbins: Literal[False] = ...,
|
||||
precision: int = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
) -> Categorical: ...
|
||||
@overload
|
||||
def qcut(
|
||||
x: Series,
|
||||
q: int | Sequence[float] | Series[float] | Index[float] | npt.NDArray,
|
||||
labels: Literal[False] | Sequence[Label] | None = ...,
|
||||
retbins: Literal[False] = ...,
|
||||
precision: int = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
) -> Series: ...
|
||||
@overload
|
||||
def qcut(
|
||||
x: Index | npt.NDArray | Sequence[int] | Sequence[float],
|
||||
q: int | Sequence[float] | Series[float] | Index[float] | npt.NDArray,
|
||||
*,
|
||||
labels: Literal[False],
|
||||
retbins: Literal[True],
|
||||
precision: int = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.double]]: ...
|
||||
@overload
|
||||
def qcut(
|
||||
x: Series,
|
||||
q: int | Sequence[float] | Series[float] | Index[float] | npt.NDArray,
|
||||
labels: Literal[False] | Sequence[Label] | None = ...,
|
||||
*,
|
||||
retbins: Literal[True],
|
||||
precision: int = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
) -> tuple[Series, npt.NDArray[np.double]]: ...
|
||||
@overload
|
||||
def qcut(
|
||||
x: Index | npt.NDArray | Sequence[int] | Sequence[float],
|
||||
q: int | Sequence[float] | Series[float] | Index[float] | npt.NDArray,
|
||||
labels: Sequence[Label] | None = ...,
|
||||
*,
|
||||
retbins: Literal[True],
|
||||
precision: int = ...,
|
||||
duplicates: Literal["raise", "drop"] = ...,
|
||||
) -> tuple[Categorical, npt.NDArray[np.double]]: ...
|
||||
@ -0,0 +1 @@
|
||||
def cartesian_product(X): ...
|
||||
3939
lib/python3.11/site-packages/pandas-stubs/core/series.pyi
Normal file
3939
lib/python3.11/site-packages/pandas-stubs/core/series.pyi
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,234 @@
|
||||
# pyright: strict
|
||||
from builtins import slice as _slice
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Hashable,
|
||||
Mapping,
|
||||
Sequence,
|
||||
)
|
||||
import re
|
||||
from typing import (
|
||||
Generic,
|
||||
Literal,
|
||||
TypeVar,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
import numpy.typing as npt
|
||||
import pandas as pd
|
||||
from pandas import (
|
||||
DataFrame,
|
||||
Index,
|
||||
MultiIndex,
|
||||
Series,
|
||||
)
|
||||
from pandas.core.base import NoNewAttributesMixin
|
||||
|
||||
from pandas._libs.tslibs.nattype import NaTType
|
||||
from pandas._typing import (
|
||||
AlignJoin,
|
||||
DtypeObj,
|
||||
Scalar,
|
||||
T,
|
||||
np_1darray,
|
||||
)
|
||||
|
||||
# Used for the result of str.split with expand=True
|
||||
_T_EXPANDING = TypeVar("_T_EXPANDING", bound=DataFrame | MultiIndex)
|
||||
# Used for the result of str.split with expand=False
|
||||
_T_LIST_STR = TypeVar("_T_LIST_STR", bound=Series[list[str]] | Index[list[str]])
|
||||
# Used for the result of str.match
|
||||
_T_BOOL = TypeVar("_T_BOOL", bound=Series[bool] | np_1darray[np.bool])
|
||||
# Used for the result of str.index / str.find
|
||||
_T_INT = TypeVar("_T_INT", bound=Series[int] | Index[int])
|
||||
# Used for the result of str.encode
|
||||
_T_BYTES = TypeVar("_T_BYTES", bound=Series[bytes] | Index[bytes])
|
||||
# Used for the result of str.decode
|
||||
_T_STR = TypeVar("_T_STR", bound=Series[str] | Index[str])
|
||||
# Used for the result of str.partition
|
||||
_T_OBJECT = TypeVar("_T_OBJECT", bound=Series | Index)
|
||||
|
||||
class StringMethods(
|
||||
NoNewAttributesMixin,
|
||||
Generic[T, _T_EXPANDING, _T_BOOL, _T_LIST_STR, _T_INT, _T_BYTES, _T_STR, _T_OBJECT],
|
||||
):
|
||||
def __init__(self, data: T) -> None: ...
|
||||
def __getitem__(self, key: _slice | int) -> _T_STR: ...
|
||||
def __iter__(self) -> _T_STR: ...
|
||||
@overload
|
||||
def cat(
|
||||
self,
|
||||
others: None = None,
|
||||
sep: str | None = None,
|
||||
na_rep: str | None = None,
|
||||
join: AlignJoin = "left",
|
||||
) -> str: ...
|
||||
@overload
|
||||
def cat(
|
||||
self,
|
||||
others: (
|
||||
Series[str] | Index[str] | pd.DataFrame | npt.NDArray[np.str_] | list[str]
|
||||
),
|
||||
sep: str | None = None,
|
||||
na_rep: str | None = None,
|
||||
join: AlignJoin = "left",
|
||||
) -> _T_STR: ...
|
||||
@overload
|
||||
def split(
|
||||
self,
|
||||
pat: str | re.Pattern[str] | None = None,
|
||||
*,
|
||||
n: int = -1,
|
||||
expand: Literal[True],
|
||||
regex: bool | None = None,
|
||||
) -> _T_EXPANDING: ...
|
||||
@overload
|
||||
def split(
|
||||
self,
|
||||
pat: str | re.Pattern[str] | None = None,
|
||||
*,
|
||||
n: int = -1,
|
||||
expand: Literal[False] = False,
|
||||
regex: bool | None = None,
|
||||
) -> _T_LIST_STR: ...
|
||||
@overload
|
||||
def rsplit(
|
||||
self, pat: str | None = None, *, n: int = -1, expand: Literal[True]
|
||||
) -> _T_EXPANDING: ...
|
||||
@overload
|
||||
def rsplit(
|
||||
self, pat: str | None = None, *, n: int = -1, expand: Literal[False] = False
|
||||
) -> _T_LIST_STR: ...
|
||||
@overload # expand=True
|
||||
def partition(
|
||||
self, sep: str = " ", expand: Literal[True] = True
|
||||
) -> _T_EXPANDING: ...
|
||||
@overload # expand=False (positional argument)
|
||||
def partition(self, sep: str, expand: Literal[False]) -> _T_OBJECT: ...
|
||||
@overload # expand=False (keyword argument)
|
||||
def partition(self, sep: str = " ", *, expand: Literal[False]) -> _T_OBJECT: ...
|
||||
@overload # expand=True
|
||||
def rpartition(
|
||||
self, sep: str = " ", expand: Literal[True] = True
|
||||
) -> _T_EXPANDING: ...
|
||||
@overload # expand=False (positional argument)
|
||||
def rpartition(self, sep: str, expand: Literal[False]) -> _T_OBJECT: ...
|
||||
@overload # expand=False (keyword argument)
|
||||
def rpartition(self, sep: str = " ", *, expand: Literal[False]) -> _T_OBJECT: ...
|
||||
def get(self, i: int | Hashable) -> _T_STR: ...
|
||||
def join(self, sep: str) -> _T_STR: ...
|
||||
def contains(
|
||||
self,
|
||||
pat: str | re.Pattern[str],
|
||||
case: bool = True,
|
||||
flags: int = 0,
|
||||
na: Scalar | NaTType | None = ...,
|
||||
regex: bool = True,
|
||||
) -> _T_BOOL: ...
|
||||
def match(
|
||||
self,
|
||||
pat: str | re.Pattern[str],
|
||||
case: bool = True,
|
||||
flags: int = 0,
|
||||
na: Scalar | NaTType | None = ...,
|
||||
) -> _T_BOOL: ...
|
||||
def fullmatch(
|
||||
self,
|
||||
pat: str | re.Pattern[str],
|
||||
case: bool = True,
|
||||
flags: int = 0,
|
||||
na: Scalar | NaTType | None = ...,
|
||||
) -> _T_BOOL: ...
|
||||
def replace(
|
||||
self,
|
||||
pat: str | re.Pattern[str],
|
||||
repl: str | Callable[[re.Match[str]], str],
|
||||
n: int = -1,
|
||||
case: bool | None = None,
|
||||
flags: int = 0,
|
||||
regex: bool = False,
|
||||
) -> _T_STR: ...
|
||||
def repeat(self, repeats: int | Sequence[int]) -> _T_STR: ...
|
||||
def pad(
|
||||
self,
|
||||
width: int,
|
||||
side: Literal["left", "right", "both"] = "left",
|
||||
fillchar: str = " ",
|
||||
) -> _T_STR: ...
|
||||
def center(self, width: int, fillchar: str = " ") -> _T_STR: ...
|
||||
def ljust(self, width: int, fillchar: str = " ") -> _T_STR: ...
|
||||
def rjust(self, width: int, fillchar: str = " ") -> _T_STR: ...
|
||||
def zfill(self, width: int) -> _T_STR: ...
|
||||
def slice(
|
||||
self, start: int | None = None, stop: int | None = None, step: int | None = None
|
||||
) -> T: ...
|
||||
def slice_replace(
|
||||
self, start: int | None = None, stop: int | None = None, repl: str | None = None
|
||||
) -> _T_STR: ...
|
||||
def decode(
|
||||
self, encoding: str, errors: str = "strict", dtype: str | DtypeObj | None = None
|
||||
) -> _T_STR: ...
|
||||
def encode(self, encoding: str, errors: str = "strict") -> _T_BYTES: ...
|
||||
def strip(self, to_strip: str | None = None) -> _T_STR: ...
|
||||
def lstrip(self, to_strip: str | None = None) -> _T_STR: ...
|
||||
def rstrip(self, to_strip: str | None = None) -> _T_STR: ...
|
||||
def removeprefix(self, prefix: str) -> _T_STR: ...
|
||||
def removesuffix(self, suffix: str) -> _T_STR: ...
|
||||
def wrap(
|
||||
self,
|
||||
width: int,
|
||||
*,
|
||||
# kwargs passed to textwrap.TextWrapper
|
||||
expand_tabs: bool = True,
|
||||
replace_whitespace: bool = True,
|
||||
drop_whitespace: bool = True,
|
||||
break_long_words: bool = True,
|
||||
break_on_hyphens: bool = True,
|
||||
) -> _T_STR: ...
|
||||
def get_dummies(self, sep: str = "|") -> _T_EXPANDING: ...
|
||||
def translate(self, table: Mapping[int, int | str | None] | None) -> _T_STR: ...
|
||||
def count(self, pat: str, flags: int = 0) -> _T_INT: ...
|
||||
def startswith(
|
||||
self, pat: str | tuple[str, ...], na: Scalar | NaTType | None = ...
|
||||
) -> _T_BOOL: ...
|
||||
def endswith(
|
||||
self, pat: str | tuple[str, ...], na: Scalar | NaTType | None = ...
|
||||
) -> _T_BOOL: ...
|
||||
def findall(self, pat: str | re.Pattern[str], flags: int = 0) -> _T_LIST_STR: ...
|
||||
@overload # expand=True
|
||||
def extract(
|
||||
self, pat: str | re.Pattern[str], flags: int = 0, expand: Literal[True] = True
|
||||
) -> pd.DataFrame: ...
|
||||
@overload # expand=False (positional argument)
|
||||
def extract(
|
||||
self, pat: str | re.Pattern[str], flags: int, expand: Literal[False]
|
||||
) -> _T_OBJECT: ...
|
||||
@overload # expand=False (keyword argument)
|
||||
def extract(
|
||||
self, pat: str | re.Pattern[str], flags: int = 0, *, expand: Literal[False]
|
||||
) -> _T_OBJECT: ...
|
||||
def extractall(
|
||||
self, pat: str | re.Pattern[str], flags: int = 0
|
||||
) -> pd.DataFrame: ...
|
||||
def find(self, sub: str, start: int = 0, end: int | None = None) -> _T_INT: ...
|
||||
def rfind(self, sub: str, start: int = 0, end: int | None = None) -> _T_INT: ...
|
||||
def normalize(self, form: Literal["NFC", "NFKC", "NFD", "NFKD"]) -> _T_STR: ...
|
||||
def index(self, sub: str, start: int = 0, end: int | None = None) -> _T_INT: ...
|
||||
def rindex(self, sub: str, start: int = 0, end: int | None = None) -> _T_INT: ...
|
||||
def len(self) -> _T_INT: ...
|
||||
def lower(self) -> _T_STR: ...
|
||||
def upper(self) -> _T_STR: ...
|
||||
def title(self) -> _T_STR: ...
|
||||
def capitalize(self) -> _T_STR: ...
|
||||
def swapcase(self) -> _T_STR: ...
|
||||
def casefold(self) -> _T_STR: ...
|
||||
def isalnum(self) -> _T_BOOL: ...
|
||||
def isalpha(self) -> _T_BOOL: ...
|
||||
def isdigit(self) -> _T_BOOL: ...
|
||||
def isspace(self) -> _T_BOOL: ...
|
||||
def islower(self) -> _T_BOOL: ...
|
||||
def isupper(self) -> _T_BOOL: ...
|
||||
def istitle(self) -> _T_BOOL: ...
|
||||
def isnumeric(self) -> _T_BOOL: ...
|
||||
def isdecimal(self) -> _T_BOOL: ...
|
||||
@ -0,0 +1,119 @@
|
||||
from collections.abc import Sequence
|
||||
from datetime import (
|
||||
date,
|
||||
datetime,
|
||||
)
|
||||
from typing import (
|
||||
Literal,
|
||||
TypedDict,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
Index,
|
||||
Timestamp,
|
||||
)
|
||||
from pandas.core.arrays import ExtensionArray
|
||||
from pandas.core.indexes.datetimes import DatetimeIndex
|
||||
from pandas.core.series import (
|
||||
Series,
|
||||
TimestampSeries,
|
||||
)
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from pandas._libs.tslibs import NaTType
|
||||
from pandas._typing import (
|
||||
AnyArrayLike,
|
||||
DictConvertible,
|
||||
IgnoreRaise,
|
||||
RaiseCoerce,
|
||||
TimestampConvertibleTypes,
|
||||
npt,
|
||||
)
|
||||
|
||||
ArrayConvertible: TypeAlias = list | tuple | AnyArrayLike
|
||||
Scalar: TypeAlias = float | str
|
||||
DatetimeScalar: TypeAlias = Scalar | datetime | np.datetime64 | date
|
||||
|
||||
DatetimeScalarOrArrayConvertible: TypeAlias = DatetimeScalar | ArrayConvertible
|
||||
|
||||
DatetimeDictArg: TypeAlias = list[Scalar] | tuple[Scalar, ...] | AnyArrayLike
|
||||
|
||||
class YearMonthDayDict(TypedDict, total=True):
|
||||
year: DatetimeDictArg
|
||||
month: DatetimeDictArg
|
||||
day: DatetimeDictArg
|
||||
|
||||
class FulldatetimeDict(YearMonthDayDict, total=False):
|
||||
hour: DatetimeDictArg
|
||||
hours: DatetimeDictArg
|
||||
minute: DatetimeDictArg
|
||||
minutes: DatetimeDictArg
|
||||
second: DatetimeDictArg
|
||||
seconds: DatetimeDictArg
|
||||
ms: DatetimeDictArg
|
||||
us: DatetimeDictArg
|
||||
ns: DatetimeDictArg
|
||||
|
||||
@overload
|
||||
def to_datetime(
|
||||
arg: DatetimeScalar,
|
||||
errors: IgnoreRaise = ...,
|
||||
dayfirst: bool = ...,
|
||||
yearfirst: bool = ...,
|
||||
utc: bool = ...,
|
||||
format: str | None = ...,
|
||||
exact: bool = ...,
|
||||
unit: str | None = ...,
|
||||
origin: Literal["julian", "unix"] | TimestampConvertibleTypes = ...,
|
||||
cache: bool = ...,
|
||||
) -> Timestamp: ...
|
||||
@overload
|
||||
def to_datetime(
|
||||
arg: DatetimeScalar,
|
||||
errors: Literal["coerce"],
|
||||
dayfirst: bool = ...,
|
||||
yearfirst: bool = ...,
|
||||
utc: bool = ...,
|
||||
format: str | None = ...,
|
||||
exact: bool = ...,
|
||||
unit: str | None = ...,
|
||||
origin: Literal["julian", "unix"] | TimestampConvertibleTypes = ...,
|
||||
cache: bool = ...,
|
||||
) -> Timestamp | NaTType: ...
|
||||
@overload
|
||||
def to_datetime(
|
||||
arg: Series | DictConvertible,
|
||||
errors: RaiseCoerce = ...,
|
||||
dayfirst: bool = ...,
|
||||
yearfirst: bool = ...,
|
||||
utc: bool = ...,
|
||||
format: str | None = ...,
|
||||
exact: bool = ...,
|
||||
unit: str | None = ...,
|
||||
origin: Literal["julian", "unix"] | TimestampConvertibleTypes = ...,
|
||||
cache: bool = ...,
|
||||
) -> TimestampSeries: ...
|
||||
@overload
|
||||
def to_datetime(
|
||||
arg: (
|
||||
Sequence[float | date]
|
||||
| list[str]
|
||||
| tuple[float | str | date, ...]
|
||||
| npt.NDArray[np.datetime64]
|
||||
| npt.NDArray[np.str_]
|
||||
| npt.NDArray[np.int_]
|
||||
| Index
|
||||
| ExtensionArray
|
||||
),
|
||||
errors: RaiseCoerce = ...,
|
||||
dayfirst: bool = ...,
|
||||
yearfirst: bool = ...,
|
||||
utc: bool = ...,
|
||||
format: str | None = ...,
|
||||
exact: bool = ...,
|
||||
unit: str | None = ...,
|
||||
origin: Literal["julian", "unix"] | TimestampConvertibleTypes = ...,
|
||||
cache: bool = ...,
|
||||
) -> DatetimeIndex: ...
|
||||
@ -0,0 +1,40 @@
|
||||
from typing import (
|
||||
Literal,
|
||||
overload,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from pandas._libs.lib import _NoDefaultDoNotUse
|
||||
from pandas._typing import (
|
||||
DtypeBackend,
|
||||
RaiseCoerce,
|
||||
Scalar,
|
||||
npt,
|
||||
)
|
||||
|
||||
_Downcast: TypeAlias = Literal["integer", "signed", "unsigned", "float"] | None
|
||||
|
||||
@overload
|
||||
def to_numeric(
|
||||
arg: Scalar,
|
||||
errors: Literal["raise", "coerce"] = ...,
|
||||
downcast: _Downcast = ...,
|
||||
dtype_backend: DtypeBackend | _NoDefaultDoNotUse = ...,
|
||||
) -> float: ...
|
||||
@overload
|
||||
def to_numeric(
|
||||
arg: list | tuple | np.ndarray,
|
||||
errors: RaiseCoerce = ...,
|
||||
downcast: _Downcast = ...,
|
||||
dtype_backend: DtypeBackend | _NoDefaultDoNotUse = ...,
|
||||
) -> npt.NDArray: ...
|
||||
@overload
|
||||
def to_numeric(
|
||||
arg: pd.Series,
|
||||
errors: RaiseCoerce = ...,
|
||||
downcast: _Downcast = ...,
|
||||
dtype_backend: DtypeBackend | _NoDefaultDoNotUse = ...,
|
||||
) -> pd.Series: ...
|
||||
@ -0,0 +1,44 @@
|
||||
from collections.abc import Sequence
|
||||
from datetime import timedelta
|
||||
from typing import overload
|
||||
|
||||
from pandas import Index
|
||||
from pandas.core.indexes.timedeltas import TimedeltaIndex
|
||||
from pandas.core.series import (
|
||||
Series,
|
||||
TimedeltaSeries,
|
||||
)
|
||||
|
||||
from pandas._libs.tslibs import Timedelta
|
||||
from pandas._libs.tslibs.timedeltas import TimeDeltaUnitChoices
|
||||
from pandas._typing import (
|
||||
ArrayLike,
|
||||
RaiseCoerce,
|
||||
SequenceNotStr,
|
||||
)
|
||||
|
||||
@overload
|
||||
def to_timedelta(
|
||||
arg: str | float | timedelta,
|
||||
unit: TimeDeltaUnitChoices | None = ...,
|
||||
errors: RaiseCoerce = ...,
|
||||
) -> Timedelta: ...
|
||||
@overload
|
||||
def to_timedelta(
|
||||
arg: Series,
|
||||
unit: TimeDeltaUnitChoices | None = ...,
|
||||
errors: RaiseCoerce = ...,
|
||||
) -> TimedeltaSeries: ...
|
||||
@overload
|
||||
def to_timedelta(
|
||||
arg: (
|
||||
SequenceNotStr
|
||||
| Sequence[float | timedelta]
|
||||
| tuple[str | float | timedelta, ...]
|
||||
| range
|
||||
| ArrayLike
|
||||
| Index
|
||||
),
|
||||
unit: TimeDeltaUnitChoices | None = ...,
|
||||
errors: RaiseCoerce = ...,
|
||||
) -> TimedeltaIndex: ...
|
||||
@ -0,0 +1,25 @@
|
||||
import numpy as np
|
||||
from pandas import (
|
||||
DataFrame,
|
||||
Index,
|
||||
Series,
|
||||
)
|
||||
|
||||
from pandas._typing import (
|
||||
ArrayLike,
|
||||
npt,
|
||||
)
|
||||
|
||||
def hash_pandas_object(
|
||||
obj: Index | Series | DataFrame,
|
||||
index: bool = True,
|
||||
encoding: str = "utf8",
|
||||
hash_key: str | None = "0123456789123456",
|
||||
categorize: bool = True,
|
||||
) -> Series: ...
|
||||
def hash_array(
|
||||
vals: ArrayLike,
|
||||
encoding: str = "utf8",
|
||||
hash_key: str = "0123456789123456",
|
||||
categorize: bool = True,
|
||||
) -> npt.NDArray[np.uint64]: ...
|
||||
@ -0,0 +1,13 @@
|
||||
from pandas.core.window.ewm import (
|
||||
ExponentialMovingWindow as ExponentialMovingWindow,
|
||||
ExponentialMovingWindowGroupby as ExponentialMovingWindowGroupby,
|
||||
)
|
||||
from pandas.core.window.expanding import (
|
||||
Expanding as Expanding,
|
||||
ExpandingGroupby as ExpandingGroupby,
|
||||
)
|
||||
from pandas.core.window.rolling import (
|
||||
Rolling as Rolling,
|
||||
RollingGroupby as RollingGroupby,
|
||||
Window as Window,
|
||||
)
|
||||
@ -0,0 +1,69 @@
|
||||
from pandas import (
|
||||
DataFrame,
|
||||
Series,
|
||||
)
|
||||
from pandas.core.window.rolling import (
|
||||
BaseWindow,
|
||||
BaseWindowGroupby,
|
||||
)
|
||||
|
||||
from pandas._typing import (
|
||||
NDFrameT,
|
||||
WindowingEngine,
|
||||
WindowingEngineKwargs,
|
||||
)
|
||||
|
||||
class ExponentialMovingWindow(BaseWindow[NDFrameT]):
|
||||
def mean(
|
||||
self,
|
||||
numeric_only: bool = False,
|
||||
engine: WindowingEngine = None,
|
||||
engine_kwargs: WindowingEngineKwargs = None,
|
||||
) -> NDFrameT: ...
|
||||
def sum(
|
||||
self,
|
||||
numeric_only: bool = False,
|
||||
engine: WindowingEngine = None,
|
||||
engine_kwargs: WindowingEngineKwargs = None,
|
||||
) -> NDFrameT: ...
|
||||
def std(self, bias: bool = False, numeric_only: bool = False) -> NDFrameT: ...
|
||||
def var(self, bias: bool = False, numeric_only: bool = False) -> NDFrameT: ...
|
||||
def cov(
|
||||
self,
|
||||
other: DataFrame | Series | None = None,
|
||||
pairwise: bool | None = None,
|
||||
bias: bool = False,
|
||||
numeric_only: bool = False,
|
||||
) -> NDFrameT: ...
|
||||
def corr(
|
||||
self,
|
||||
other: DataFrame | Series | None = None,
|
||||
pairwise: bool | None = None,
|
||||
numeric_only: bool = False,
|
||||
) -> NDFrameT: ...
|
||||
|
||||
class ExponentialMovingWindowGroupby(
|
||||
BaseWindowGroupby[NDFrameT], ExponentialMovingWindow[NDFrameT]
|
||||
): ...
|
||||
|
||||
class OnlineExponentialMovingWindow(ExponentialMovingWindow[NDFrameT]):
|
||||
def reset(self) -> None: ...
|
||||
def aggregate(self, func, *args, **kwargs): ...
|
||||
def std(self, bias: bool = False, *args, **kwargs): ... # pyrefly: ignore
|
||||
def corr(
|
||||
self,
|
||||
other: DataFrame | Series | None = None,
|
||||
pairwise: bool | None = None,
|
||||
numeric_only: bool = False,
|
||||
): ...
|
||||
def cov(
|
||||
self,
|
||||
other: DataFrame | Series | None = None,
|
||||
pairwise: bool | None = None,
|
||||
bias: bool = False,
|
||||
numeric_only: bool = False,
|
||||
): ...
|
||||
def var(self, bias: bool = False, numeric_only: bool = False): ...
|
||||
def mean( # pyrefly: ignore
|
||||
self, *args, update: NDFrameT | None = ..., update_times: None = ..., **kwargs
|
||||
) -> NDFrameT: ...
|
||||
@ -0,0 +1,9 @@
|
||||
from pandas.core.window.rolling import (
|
||||
BaseWindowGroupby,
|
||||
RollingAndExpandingMixin,
|
||||
)
|
||||
|
||||
from pandas._typing import NDFrameT
|
||||
|
||||
class Expanding(RollingAndExpandingMixin[NDFrameT]): ...
|
||||
class ExpandingGroupby(BaseWindowGroupby[NDFrameT], Expanding[NDFrameT]): ...
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user