This commit is contained in:
2025-09-07 22:09:54 +02:00
parent e1b817252c
commit 2fc0d000b6
7796 changed files with 2159515 additions and 933 deletions

View File

@ -0,0 +1,428 @@
import datetime as dt
from datetime import (
timedelta,
tzinfo as _tzinfo,
)
from typing import (
Generic,
Literal,
TypeVar,
)
import numpy as np
from pandas import (
DatetimeIndex,
Index,
PeriodIndex,
Timedelta,
TimedeltaIndex,
)
from pandas.core.accessor import PandasDelegate
from pandas.core.arrays import (
DatetimeArray,
PeriodArray,
)
from pandas.core.base import NoNewAttributesMixin
from pandas.core.frame import DataFrame
from pandas.core.series import (
PeriodSeries,
Series,
TimedeltaSeries,
TimestampSeries,
)
from pandas._libs.tslibs import BaseOffset
from pandas._libs.tslibs.offsets import DateOffset
from pandas._typing import (
TimeAmbiguous,
TimeNonexistent,
TimestampConvention,
TimeUnit,
TimeZones,
np_1darray,
np_ndarray_bool,
)
class Properties(PandasDelegate, NoNewAttributesMixin): ...
_DTFieldOpsReturnType = TypeVar("_DTFieldOpsReturnType", bound=Series[int] | Index[int])
class _DayLikeFieldOps(Generic[_DTFieldOpsReturnType]):
@property
def year(self) -> _DTFieldOpsReturnType: ...
@property
def month(self) -> _DTFieldOpsReturnType: ...
@property
def day(self) -> _DTFieldOpsReturnType: ...
@property
def hour(self) -> _DTFieldOpsReturnType: ...
@property
def minute(self) -> _DTFieldOpsReturnType: ...
@property
def second(self) -> _DTFieldOpsReturnType: ...
@property
def weekday(self) -> _DTFieldOpsReturnType: ...
@property
def dayofweek(self) -> _DTFieldOpsReturnType: ...
@property
def day_of_week(self) -> _DTFieldOpsReturnType: ...
@property
def dayofyear(self) -> _DTFieldOpsReturnType: ...
@property
def day_of_year(self) -> _DTFieldOpsReturnType: ...
@property
def quarter(self) -> _DTFieldOpsReturnType: ...
@property
def days_in_month(self) -> _DTFieldOpsReturnType: ...
@property
def daysinmonth(self) -> _DTFieldOpsReturnType: ...
class _MiniSeconds(Generic[_DTFieldOpsReturnType]):
@property
def microsecond(self) -> _DTFieldOpsReturnType: ...
@property
def nanosecond(self) -> _DTFieldOpsReturnType: ...
class _DatetimeFieldOps(
_DayLikeFieldOps[_DTFieldOpsReturnType], _MiniSeconds[_DTFieldOpsReturnType]
): ...
_DTBoolOpsReturnType = TypeVar(
"_DTBoolOpsReturnType", bound=Series[bool] | np_1darray[np.bool]
)
class _IsLeapYearProperty(Generic[_DTBoolOpsReturnType]):
@property
def is_leap_year(self) -> _DTBoolOpsReturnType: ...
class _DatetimeBoolOps(
_IsLeapYearProperty[_DTBoolOpsReturnType], Generic[_DTBoolOpsReturnType]
):
@property
def is_month_start(self) -> _DTBoolOpsReturnType: ...
@property
def is_month_end(self) -> _DTBoolOpsReturnType: ...
@property
def is_quarter_start(self) -> _DTBoolOpsReturnType: ...
@property
def is_quarter_end(self) -> _DTBoolOpsReturnType: ...
@property
def is_year_start(self) -> _DTBoolOpsReturnType: ...
@property
def is_year_end(self) -> _DTBoolOpsReturnType: ...
_DTFreqReturnType = TypeVar("_DTFreqReturnType", bound=str | BaseOffset)
class _FreqProperty(Generic[_DTFreqReturnType]):
@property
def freq(self) -> _DTFreqReturnType | None: ...
class _TZProperty:
@property
def tz(self) -> _tzinfo | None: ...
class _DatetimeObjectOps(
_FreqProperty[_DTFreqReturnType], _TZProperty, Generic[_DTFreqReturnType]
): ...
_DTOtherOpsDateReturnType = TypeVar(
"_DTOtherOpsDateReturnType", bound=Series[dt.date] | np_1darray[np.object_]
)
_DTOtherOpsTimeReturnType = TypeVar(
"_DTOtherOpsTimeReturnType", bound=Series[dt.time] | np_1darray[np.object_]
)
class _DatetimeOtherOps(Generic[_DTOtherOpsDateReturnType, _DTOtherOpsTimeReturnType]):
@property
def date(self) -> _DTOtherOpsDateReturnType: ...
@property
def time(self) -> _DTOtherOpsTimeReturnType: ...
@property
def timetz(self) -> _DTOtherOpsTimeReturnType: ...
class _DatetimeLikeOps(
_DatetimeFieldOps[_DTFieldOpsReturnType],
_DatetimeObjectOps[_DTFreqReturnType],
_DatetimeBoolOps[_DTBoolOpsReturnType],
_DatetimeOtherOps[_DTOtherOpsDateReturnType, _DTOtherOpsTimeReturnType],
Generic[
_DTFieldOpsReturnType,
_DTBoolOpsReturnType,
_DTOtherOpsDateReturnType,
_DTOtherOpsTimeReturnType,
_DTFreqReturnType,
],
): ...
# Ideally, the rounding methods would return TimestampSeries when `Series.dt.method`
# is invoked, but because of how Series.dt is hooked in and that we may not know the
# type of the series, we don't know which kind of series was ...ed
# in to the dt accessor
_DTTimestampTimedeltaReturnType = TypeVar(
"_DTTimestampTimedeltaReturnType",
bound=Series | TimestampSeries | TimedeltaSeries | DatetimeIndex | TimedeltaIndex,
)
class _DatetimeRoundingMethods(Generic[_DTTimestampTimedeltaReturnType]):
def round(
self,
freq: str | BaseOffset | None,
ambiguous: Literal["raise", "infer", "NaT"] | bool | np_ndarray_bool = ...,
nonexistent: (
Literal["shift_forward", "shift_backward", "NaT", "raise"]
| timedelta
| Timedelta
) = ...,
) -> _DTTimestampTimedeltaReturnType: ...
def floor(
self,
freq: str | BaseOffset | None,
ambiguous: Literal["raise", "infer", "NaT"] | bool | np_ndarray_bool = ...,
nonexistent: (
Literal["shift_forward", "shift_backward", "NaT", "raise"]
| timedelta
| Timedelta
) = ...,
) -> _DTTimestampTimedeltaReturnType: ...
def ceil(
self,
freq: str | BaseOffset | None,
ambiguous: Literal["raise", "infer", "NaT"] | bool | np_ndarray_bool = ...,
nonexistent: (
Literal["shift_forward", "shift_backward", "NaT", "raise"]
| timedelta
| Timedelta
) = ...,
) -> _DTTimestampTimedeltaReturnType: ...
_DTNormalizeReturnType = TypeVar(
"_DTNormalizeReturnType", TimestampSeries, DatetimeIndex
)
_DTStrKindReturnType = TypeVar("_DTStrKindReturnType", bound=Series[str] | Index)
_DTToPeriodReturnType = TypeVar(
"_DTToPeriodReturnType", bound=PeriodSeries | PeriodIndex
)
class _DatetimeLikeNoTZMethods(
_DatetimeRoundingMethods[_DTTimestampTimedeltaReturnType],
Generic[
_DTTimestampTimedeltaReturnType,
_DTNormalizeReturnType,
_DTStrKindReturnType,
_DTToPeriodReturnType,
],
):
def to_period(
self, freq: str | BaseOffset | None = ...
) -> _DTToPeriodReturnType: ...
def tz_localize(
self,
tz: TimeZones,
ambiguous: TimeAmbiguous = ...,
nonexistent: TimeNonexistent = ...,
) -> _DTNormalizeReturnType: ...
def tz_convert(self, tz: TimeZones) -> _DTNormalizeReturnType: ...
def normalize(self) -> _DTNormalizeReturnType: ...
def strftime(self, date_format: str) -> _DTStrKindReturnType: ...
def month_name(self, locale: str | None = ...) -> _DTStrKindReturnType: ...
def day_name(self, locale: str | None = ...) -> _DTStrKindReturnType: ...
class _DatetimeNoTZProperties(
_DatetimeLikeOps[
_DTFieldOpsReturnType,
_DTBoolOpsReturnType,
_DTOtherOpsDateReturnType,
_DTOtherOpsTimeReturnType,
_DTFreqReturnType,
],
_DatetimeLikeNoTZMethods[
_DTTimestampTimedeltaReturnType,
_DTNormalizeReturnType,
_DTStrKindReturnType,
_DTToPeriodReturnType,
],
Generic[
_DTFieldOpsReturnType,
_DTBoolOpsReturnType,
_DTTimestampTimedeltaReturnType,
_DTOtherOpsDateReturnType,
_DTOtherOpsTimeReturnType,
_DTFreqReturnType,
_DTNormalizeReturnType,
_DTStrKindReturnType,
_DTToPeriodReturnType,
],
): ...
class DatetimeProperties(
Properties,
_DatetimeNoTZProperties[
_DTFieldOpsReturnType,
_DTBoolOpsReturnType,
_DTTimestampTimedeltaReturnType,
_DTOtherOpsDateReturnType,
_DTOtherOpsTimeReturnType,
_DTFreqReturnType,
_DTNormalizeReturnType,
_DTStrKindReturnType,
_DTToPeriodReturnType,
],
Generic[
_DTFieldOpsReturnType,
_DTBoolOpsReturnType,
_DTTimestampTimedeltaReturnType,
_DTOtherOpsDateReturnType,
_DTOtherOpsTimeReturnType,
_DTFreqReturnType,
_DTNormalizeReturnType,
_DTStrKindReturnType,
_DTToPeriodReturnType,
],
):
def to_pydatetime(self) -> np_1darray[np.object_]: ...
def isocalendar(self) -> DataFrame: ...
@property
def unit(self) -> TimeUnit: ...
def as_unit(self, unit: TimeUnit) -> _DTTimestampTimedeltaReturnType: ...
_TDNoRoundingMethodReturnType = TypeVar(
"_TDNoRoundingMethodReturnType", bound=Series[int] | Index
)
_TDTotalSecondsReturnType = TypeVar(
"_TDTotalSecondsReturnType", bound=Series[float] | Index
)
class _TimedeltaPropertiesNoRounding(
Generic[_TDNoRoundingMethodReturnType, _TDTotalSecondsReturnType]
):
def to_pytimedelta(self) -> np_1darray[np.object_]: ...
@property
def components(self) -> DataFrame: ...
@property
def days(self) -> _TDNoRoundingMethodReturnType: ...
@property
def seconds(self) -> _TDNoRoundingMethodReturnType: ...
@property
def microseconds(self) -> _TDNoRoundingMethodReturnType: ...
@property
def nanoseconds(self) -> _TDNoRoundingMethodReturnType: ...
def total_seconds(self) -> _TDTotalSecondsReturnType: ...
class TimedeltaProperties(
Properties,
_TimedeltaPropertiesNoRounding[Series[int], Series[float]],
_DatetimeRoundingMethods[TimedeltaSeries],
):
@property
def unit(self) -> TimeUnit: ...
def as_unit(self, unit: TimeUnit) -> TimedeltaSeries: ...
_PeriodDTReturnTypes = TypeVar(
"_PeriodDTReturnTypes", bound=TimestampSeries | DatetimeIndex
)
_PeriodIntReturnTypes = TypeVar("_PeriodIntReturnTypes", bound=Series[int] | Index[int])
_PeriodStrReturnTypes = TypeVar("_PeriodStrReturnTypes", bound=Series[str] | Index)
_PeriodDTAReturnTypes = TypeVar(
"_PeriodDTAReturnTypes", bound=DatetimeArray | DatetimeIndex
)
_PeriodPAReturnTypes = TypeVar("_PeriodPAReturnTypes", bound=PeriodArray | PeriodIndex)
class _PeriodProperties(
Generic[
_PeriodDTReturnTypes,
_PeriodIntReturnTypes,
_PeriodStrReturnTypes,
_PeriodDTAReturnTypes,
_PeriodPAReturnTypes,
]
):
@property
def start_time(self) -> _PeriodDTReturnTypes: ...
@property
def end_time(self) -> _PeriodDTReturnTypes: ...
@property
def qyear(self) -> _PeriodIntReturnTypes: ...
def strftime(self, date_format: str) -> _PeriodStrReturnTypes: ...
def to_timestamp(
self,
freq: str | DateOffset | None = ...,
how: TimestampConvention = ...,
) -> _PeriodDTAReturnTypes: ...
def asfreq(
self,
freq: str | DateOffset | None = ...,
how: Literal["E", "END", "FINISH", "S", "START", "BEGIN"] = ...,
) -> _PeriodPAReturnTypes: ...
class PeriodIndexFieldOps(
_DayLikeFieldOps[Index[int]],
_PeriodProperties[DatetimeIndex, Index[int], Index, DatetimeIndex, PeriodIndex],
): ...
class PeriodProperties(
Properties,
_PeriodProperties[
TimestampSeries, Series[int], Series[str], DatetimeArray, PeriodArray
],
_DatetimeFieldOps[Series[int]],
_IsLeapYearProperty,
_FreqProperty[BaseOffset],
): ...
class CombinedDatetimelikeProperties(
DatetimeProperties[
Series[int],
Series[bool],
Series,
Series[dt.date],
Series[dt.time],
str,
TimestampSeries,
Series[str],
PeriodSeries,
],
_TimedeltaPropertiesNoRounding[Series[int], Series[float]],
_PeriodProperties,
): ...
class TimestampProperties(
DatetimeProperties[
Series[int],
Series[bool],
TimestampSeries,
Series[dt.date],
Series[dt.time],
str,
TimestampSeries,
Series[str],
PeriodSeries,
]
): ...
class DatetimeIndexProperties(
Properties,
_DatetimeNoTZProperties[
Index[int],
np_1darray[np.bool],
DatetimeIndex,
np_1darray[np.object_],
np_1darray[np.object_],
BaseOffset,
DatetimeIndex,
Index,
PeriodIndex,
],
_TZProperty,
):
@property
def is_normalized(self) -> bool: ...
@property
def tzinfo(self) -> _tzinfo | None: ...
def to_pydatetime(self) -> np_1darray[np.object_]: ...
def std(
self, axis: int | None = ..., ddof: int = ..., skipna: bool = ...
) -> Timedelta: ...
class TimedeltaIndexProperties(
Properties,
_TimedeltaPropertiesNoRounding[Index, Index],
_DatetimeRoundingMethods[TimedeltaIndex],
): ...

View File

@ -0,0 +1,8 @@
from pandas.core.indexes.base import Index as Index
from pandas.core.indexes.category import CategoricalIndex as CategoricalIndex
from pandas.core.indexes.datetimes import DatetimeIndex as DatetimeIndex
from pandas.core.indexes.interval import IntervalIndex as IntervalIndex
from pandas.core.indexes.multi import MultiIndex as MultiIndex
from pandas.core.indexes.period import PeriodIndex as PeriodIndex
from pandas.core.indexes.range import RangeIndex as RangeIndex
from pandas.core.indexes.timedeltas import TimedeltaIndex as TimedeltaIndex

View File

@ -0,0 +1,535 @@
from builtins import str as _str
from collections.abc import (
Callable,
Hashable,
Iterable,
Iterator,
Sequence,
)
from datetime import (
datetime,
timedelta,
)
from typing import (
Any,
ClassVar,
Generic,
Literal,
final,
overload,
type_check_only,
)
import numpy as np
from pandas import (
DataFrame,
DatetimeIndex,
Interval,
IntervalIndex,
MultiIndex,
Period,
PeriodDtype,
PeriodIndex,
Series,
TimedeltaIndex,
)
from pandas.core.arrays import ExtensionArray
from pandas.core.base import IndexOpsMixin
from pandas.core.strings.accessor import StringMethods
from typing_extensions import (
Never,
Self,
)
from pandas._libs.interval import _OrderableT
from pandas._typing import (
C2,
S1,
AnyAll,
ArrayLike,
AxesData,
DropKeep,
Dtype,
DtypeArg,
DTypeLike,
DtypeObj,
GenericT,
GenericT_co,
HashableT,
IgnoreRaise,
Label,
Level,
MaskType,
NaPosition,
ReindexMethod,
Scalar,
SequenceNotStr,
SliceType,
SupportsDType,
TimedeltaDtypeArg,
TimestampDtypeArg,
np_1darray,
np_ndarray_anyint,
np_ndarray_complex,
np_ndarray_float,
type_t,
)
class InvalidIndexError(Exception): ...
class Index(IndexOpsMixin[S1]):
__hash__: ClassVar[None] # type: ignore[assignment]
# overloads with additional dtypes
@overload
def __new__( # pyright: ignore[reportOverlappingOverload]
cls,
data: Sequence[int | np.integer] | IndexOpsMixin[int] | np_ndarray_anyint,
*,
dtype: Literal["int"] | type_t[int | np.integer] = ...,
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> Index[int]: ...
@overload
def __new__(
cls,
data: AxesData,
*,
dtype: Literal["int"] | type_t[int | np.integer],
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> Index[int]: ...
@overload
def __new__(
cls,
data: Sequence[float | np.floating] | IndexOpsMixin[float] | np_ndarray_float,
*,
dtype: Literal["float"] | type_t[float | np.floating] = ...,
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> Index[float]: ...
@overload
def __new__(
cls,
data: AxesData,
*,
dtype: Literal["float"] | type_t[float | np.floating],
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> Index[float]: ...
@overload
def __new__(
cls,
data: (
Sequence[complex | np.complexfloating]
| IndexOpsMixin[complex]
| np_ndarray_complex
),
*,
dtype: Literal["complex"] | type_t[complex | np.complexfloating] = ...,
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> Index[complex]: ...
@overload
def __new__(
cls,
data: AxesData,
*,
dtype: Literal["complex"] | type_t[complex | np.complexfloating],
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> Index[complex]: ...
# special overloads with dedicated Index-subclasses
@overload
def __new__(
cls,
data: Sequence[np.datetime64 | datetime] | IndexOpsMixin[datetime],
*,
dtype: TimestampDtypeArg = ...,
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> DatetimeIndex: ...
@overload
def __new__(
cls,
data: AxesData,
*,
dtype: TimestampDtypeArg,
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> DatetimeIndex: ...
@overload
def __new__(
cls,
data: Sequence[Period] | IndexOpsMixin[Period],
*,
dtype: PeriodDtype = ...,
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> PeriodIndex: ...
@overload
def __new__(
cls,
data: AxesData,
*,
dtype: PeriodDtype,
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> PeriodIndex: ...
@overload
def __new__(
cls,
data: Sequence[np.timedelta64 | timedelta] | IndexOpsMixin[timedelta],
*,
dtype: TimedeltaDtypeArg = ...,
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> TimedeltaIndex: ...
@overload
def __new__(
cls,
data: AxesData,
*,
dtype: TimedeltaDtypeArg,
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> TimedeltaIndex: ...
@overload
def __new__(
cls,
data: Sequence[Interval[_OrderableT]] | IndexOpsMixin[Interval[_OrderableT]],
*,
dtype: Literal["Interval"] = ...,
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> IntervalIndex[Interval[_OrderableT]]: ...
@overload
def __new__(
cls,
data: AxesData,
*,
dtype: Literal["Interval"],
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> IntervalIndex[Interval[Any]]: ...
# generic overloads
@overload
def __new__(
cls,
data: Iterable[S1] | IndexOpsMixin[S1],
*,
dtype: type[S1] = ...,
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> Self: ...
@overload
def __new__(
cls,
data: AxesData = ...,
*,
dtype: type[S1],
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> Self: ...
# fallback overload
@overload
def __new__(
cls,
data: AxesData,
*,
dtype: Dtype = ...,
copy: bool = ...,
name: Hashable = ...,
tupleize_cols: bool = ...,
) -> Self: ...
@property
def str(
self,
) -> StringMethods[
Self,
MultiIndex,
np_1darray[np.bool],
Index[list[_str]],
Index[int],
Index[bytes],
Index[_str],
Index,
]: ...
@final
def is_(self, other) -> bool: ...
def __len__(self) -> int: ...
def __array__(
self, dtype: _str | np.dtype = ..., copy: bool | None = ...
) -> np_1darray: ...
def __array_wrap__(self, result, context=...): ...
@property
def dtype(self) -> DtypeObj: ...
@final
def ravel(self, order: _str = ...): ...
def view(self, cls=...): ...
def astype(self, dtype: DtypeArg, copy: bool = True) -> Index: ...
def take(
self,
indices,
axis: int = 0,
allow_fill: bool = True,
fill_value: Scalar | None = None,
**kwargs,
): ...
def repeat(self, repeats, axis=...): ...
def copy(self, name: Hashable = ..., deep: bool = False) -> Self: ...
@final
def __copy__(self, **kwargs): ...
@final
def __deepcopy__(self, memo=...): ...
def format(
self, name: bool = ..., formatter: Callable | None = ..., na_rep: _str = ...
) -> list[_str]: ...
def to_flat_index(self): ...
def to_series(self, index=..., name: Hashable = ...) -> Series: ...
def to_frame(self, index: bool = True, name=...) -> DataFrame: ...
@property
def name(self) -> Hashable | None: ...
@name.setter
def name(self, value: Hashable) -> None: ...
@property
def names(self) -> list[Hashable | None]: ...
@names.setter
def names(self, names: SequenceNotStr[Hashable | None]) -> None: ...
def set_names(self, names, *, level=..., inplace: bool = ...): ...
@overload
def rename(self, name, *, inplace: Literal[False] = False) -> Self: ...
@overload
def rename(self, name, *, inplace: Literal[True]) -> None: ...
@property
def nlevels(self) -> int: ...
def get_level_values(self, level: int | _str) -> Index: ...
def droplevel(self, level: Level | list[Level] = 0): ...
@property
def is_monotonic_increasing(self) -> bool: ...
@property
def is_monotonic_decreasing(self) -> bool: ...
@property
def is_unique(self) -> bool: ...
@property
def has_duplicates(self) -> bool: ...
@property
def inferred_type(self) -> _str: ...
def __reduce__(self): ...
@property
def hasnans(self) -> bool: ...
@final
def isna(self): ...
isnull = ...
@final
def notna(self): ...
notnull = ...
def fillna(self, value=...): ...
def dropna(self, how: AnyAll = "any") -> Self: ...
def unique(self, level=...) -> Self: ...
def drop_duplicates(self, *, keep: DropKeep = ...) -> Self: ...
def duplicated(self, keep: DropKeep = "first") -> np_1darray[np.bool]: ...
def __and__(self, other: Never) -> Never: ...
def __rand__(self, other: Never) -> Never: ...
def __or__(self, other: Never) -> Never: ...
def __ror__(self, other: Never) -> Never: ...
def __xor__(self, other: Never) -> Never: ...
def __rxor__(self, other: Never) -> Never: ...
def __neg__(self) -> Self: ...
@final
def __nonzero__(self) -> None: ...
__bool__ = ...
def union(
self, other: list[HashableT] | Self, sort: bool | None = None
) -> Index: ...
def intersection(
self, other: list[S1] | Self, sort: bool | None = False
) -> Self: ...
def difference(self, other: list | Self, sort: bool | None = None) -> Self: ...
def symmetric_difference(
self,
other: list[S1] | Self,
result_name: Hashable = ...,
sort: bool | None = None,
) -> Self: ...
def get_loc(self, key: Label) -> int | slice | np_1darray[np.bool]: ...
def get_indexer(
self, target, method: ReindexMethod | None = ..., limit=..., tolerance=...
): ...
def reindex(
self,
target,
method: ReindexMethod | None = ...,
level=...,
limit=...,
tolerance=...,
): ...
def join(
self,
other,
*,
how: _str = ...,
level=...,
return_indexers: bool = ...,
sort: bool = ...,
): ...
@property
def values(self) -> np_1darray: ...
@property
def array(self) -> ExtensionArray: ...
def memory_usage(self, deep: bool = False): ...
def where(self, cond, other: Scalar | ArrayLike | None = None): ...
def __contains__(self, key) -> bool: ...
@final
def __setitem__(self, key, value) -> None: ...
@overload
def __getitem__(
self,
idx: slice | np_ndarray_anyint | Sequence[int] | Index | MaskType,
) -> Self: ...
@overload
def __getitem__(self, idx: int | tuple[np_ndarray_anyint, ...]) -> S1: ...
@overload
def append(
self: Index[C2], other: Index[C2] | Sequence[Index[C2]]
) -> Index[C2]: ...
@overload
def append(self, other: Index | Sequence[Index]) -> Index: ...
def putmask(self, mask, value): ...
def equals(self, other) -> bool: ...
@final
def identical(self, other) -> bool: ...
@final
def asof(self, label): ...
def asof_locs(self, where, mask): ...
def sort_values(
self,
*,
return_indexer: bool = ...,
ascending: bool = ...,
na_position: NaPosition = ...,
key: Callable[[Index], Index] | None = None,
): ...
@final
def sort(self, *args, **kwargs) -> None: ...
def argsort(self, *args, **kwargs): ...
def get_indexer_non_unique(self, target): ...
@final
def get_indexer_for(self, target, **kwargs): ...
@final
def groupby(self, values) -> dict[Hashable, np.ndarray]: ...
def map(self, mapper, na_action=...) -> Index: ...
def isin(self, values, level=...) -> np_1darray[np.bool]: ...
def slice_indexer(
self,
start: Label | None = None,
end: Label | None = None,
step: int | None = None,
): ...
def get_slice_bound(self, label, side): ...
def slice_locs(
self, start: SliceType = None, end: SliceType = None, step: int | None = None
): ...
def delete(self, loc) -> Self: ...
def insert(self, loc, item) -> Self: ...
def drop(self, labels, errors: IgnoreRaise = "raise") -> Self: ...
@property
def shape(self) -> tuple[int, ...]: ...
# Extra methods from old stubs
def __eq__(self, other: object) -> np_1darray[np.bool]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
def __iter__(self) -> Iterator[S1]: ...
def __ne__(self, other: object) -> np_1darray[np.bool]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
def __le__(self, other: Self | S1) -> np_1darray[np.bool]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
def __ge__(self, other: Self | S1) -> np_1darray[np.bool]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
def __lt__(self, other: Self | S1) -> np_1darray[np.bool]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
def __gt__(self, other: Self | S1) -> np_1darray[np.bool]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
# overwrite inherited methods from OpsMixin
@overload
def __mul__(
self: Index[int] | Index[float], other: timedelta
) -> TimedeltaIndex: ...
@overload
def __mul__(self, other: Any) -> Self: ...
def __floordiv__(
self,
other: (
float
| IndexOpsMixin[int]
| IndexOpsMixin[float]
| Sequence[int]
| Sequence[float]
),
) -> Self: ...
def __rfloordiv__(
self,
other: (
float
| IndexOpsMixin[int]
| IndexOpsMixin[float]
| Sequence[int]
| Sequence[float]
),
) -> Self: ...
def __truediv__(
self,
other: (
float
| IndexOpsMixin[int]
| IndexOpsMixin[float]
| Sequence[int]
| Sequence[float]
),
) -> Self: ...
def __rtruediv__(
self,
other: (
float
| IndexOpsMixin[int]
| IndexOpsMixin[float]
| Sequence[int]
| Sequence[float]
),
) -> Self: ...
def infer_objects(self, copy: bool = True) -> Self: ...
@type_check_only
class _IndexSubclassBase(Index[S1], Generic[S1, GenericT_co]):
@overload
def to_numpy( # pyrefly: ignore
self,
dtype: None = None,
copy: bool = False,
na_value: Scalar = ...,
**kwargs,
) -> np_1darray[GenericT_co]: ...
@overload
def to_numpy(
self,
dtype: np.dtype[GenericT] | SupportsDType[GenericT] | type[GenericT],
copy: bool = False,
na_value: Scalar = ...,
**kwargs,
) -> np_1darray[GenericT]: ...
@overload
def to_numpy(
self,
dtype: DTypeLike,
copy: bool = False,
na_value: Scalar = ...,
**kwargs,
) -> np_1darray: ...

View File

@ -0,0 +1,53 @@
from collections.abc import (
Hashable,
Iterable,
)
from typing import (
final,
)
import numpy as np
from pandas.core import accessor
from pandas.core.indexes.base import Index
from pandas.core.indexes.extension import ExtensionIndex
from typing_extensions import Self
from pandas._typing import (
S1,
DtypeArg,
)
class CategoricalIndex(ExtensionIndex[S1], accessor.PandasDelegate):
codes: np.ndarray = ...
categories: Index = ...
def __new__(
cls,
data: Iterable[S1] = ...,
categories=...,
ordered=...,
dtype=...,
copy: bool = ...,
name: Hashable = ...,
) -> Self: ...
def equals(self, other): ...
@property
def inferred_type(self) -> str: ...
@property
def values(self): ...
def __contains__(self, key) -> bool: ...
def __array__(
self, dtype: DtypeArg = ..., copy: bool | None = ...
) -> np.ndarray: ...
@property
def is_unique(self) -> bool: ...
@property
def is_monotonic_increasing(self) -> bool: ...
@property
def is_monotonic_decreasing(self) -> bool: ...
def unique(self, level=...): ...
def reindex(self, target, method=..., level=..., limit=..., tolerance=...): ...
@final
def get_indexer(self, target, method=..., limit=..., tolerance=...): ...
def get_indexer_non_unique(self, target): ...
def delete(self, loc): ...
def insert(self, loc, item): ...

View File

@ -0,0 +1,40 @@
import numpy as np
from pandas.core.indexes.extension import ExtensionIndex
from pandas.core.indexes.timedeltas import TimedeltaIndex
from typing_extensions import Self
from pandas._libs.tslibs import BaseOffset
from pandas._typing import (
S1,
AxisIndex,
GenericT_co,
TimeUnit,
)
class DatetimeIndexOpsMixin(ExtensionIndex[S1, GenericT_co]):
@property
def freq(self) -> BaseOffset | None: ...
@property
def freqstr(self) -> str | None: ...
@property
def is_all_dates(self) -> bool: ...
def min(
self, axis: AxisIndex | None = None, skipna: bool = True, *args, **kwargs
) -> S1: ...
def argmin(
self, axis: AxisIndex | None = None, skipna: bool = True, *args, **kwargs
) -> np.int64: ...
def max(
self, axis: AxisIndex | None = None, skipna: bool = True, *args, **kwargs
) -> S1: ...
def argmax(
self, axis: AxisIndex | None = None, skipna: bool = True, *args, **kwargs
) -> np.int64: ...
def __rsub__( # type: ignore[override]
self, other: DatetimeIndexOpsMixin
) -> TimedeltaIndex: ...
class DatetimeTimedeltaMixin(DatetimeIndexOpsMixin[S1, GenericT_co]):
@property
def unit(self) -> TimeUnit: ...
def as_unit(self, unit: TimeUnit) -> Self: ...

View File

@ -0,0 +1,177 @@
from collections.abc import (
Hashable,
Sequence,
)
from datetime import (
datetime,
timedelta,
tzinfo as _tzinfo,
)
from typing import (
final,
overload,
)
import numpy as np
from pandas import (
DataFrame,
Index,
Timedelta,
TimedeltaIndex,
Timestamp,
)
from pandas.core.indexes.accessors import DatetimeIndexProperties
from pandas.core.indexes.datetimelike import DatetimeTimedeltaMixin
from pandas.core.series import (
TimedeltaSeries,
TimestampSeries,
)
from typing_extensions import Self
from pandas._libs.tslibs.offsets import DateOffset
from pandas._typing import (
AxesData,
DateAndDatetimeLike,
Dtype,
Frequency,
IntervalClosedType,
TimeUnit,
TimeZones,
)
from pandas.core.dtypes.dtypes import DatetimeTZDtype
from pandas.tseries.offsets import BaseOffset
class DatetimeIndex(
DatetimeTimedeltaMixin[Timestamp, np.datetime64], DatetimeIndexProperties
):
def __new__(
cls,
data: AxesData,
freq: Frequency = ...,
tz: TimeZones = ...,
ambiguous: str = ...,
dayfirst: bool = ...,
yearfirst: bool = ...,
dtype: Dtype = ...,
copy: bool = ...,
name: Hashable = ...,
) -> Self: ...
def __reduce__(self): ...
# various ignores needed for mypy, as we do want to restrict what can be used in
# arithmetic for these types
@overload
def __add__(self, other: TimedeltaSeries) -> TimestampSeries: ...
@overload
def __add__(
self, other: timedelta | Timedelta | TimedeltaIndex | BaseOffset
) -> DatetimeIndex: ...
@overload
def __sub__(self, other: TimedeltaSeries) -> TimestampSeries: ...
@overload
def __sub__(
self, other: timedelta | Timedelta | TimedeltaIndex | BaseOffset
) -> DatetimeIndex: ...
@overload
def __sub__(
self, other: datetime | Timestamp | DatetimeIndex
) -> TimedeltaIndex: ...
@final
def to_series(self, index=..., name: Hashable = ...) -> TimestampSeries: ...
def snap(self, freq: str = ...): ...
def slice_indexer(self, start=..., end=..., step=...): ...
def searchsorted(self, value, side: str = ..., sorter=...): ...
@property
def inferred_type(self) -> str: ...
def indexer_at_time(self, time, asof: bool = ...): ...
def indexer_between_time(
self,
start_time: datetime | str,
end_time: datetime | str,
include_start: bool = True,
include_end: bool = True,
): ...
def to_julian_date(self) -> Index[float]: ...
def isocalendar(self) -> DataFrame: ...
@property
def tzinfo(self) -> _tzinfo | None: ...
@property
def dtype(self) -> np.dtype | DatetimeTZDtype: ...
def shift(
self, periods: int = 1, freq: DateOffset | Timedelta | str | None = None
) -> Self: ...
@overload
def date_range(
start: str | DateAndDatetimeLike,
end: str | DateAndDatetimeLike,
freq: str | timedelta | Timedelta | BaseOffset | None = None,
tz: TimeZones = None,
normalize: bool = False,
name: Hashable | None = None,
inclusive: IntervalClosedType = "both",
unit: TimeUnit | None = None,
) -> DatetimeIndex: ...
@overload
def date_range(
start: str | DateAndDatetimeLike,
end: str | DateAndDatetimeLike,
periods: int,
tz: TimeZones = None,
normalize: bool = False,
name: Hashable | None = None,
inclusive: IntervalClosedType = "both",
unit: TimeUnit | None = None,
) -> DatetimeIndex: ...
@overload
def date_range(
start: str | DateAndDatetimeLike,
*,
periods: int,
freq: str | timedelta | Timedelta | BaseOffset | None = None,
tz: TimeZones = None,
normalize: bool = False,
name: Hashable | None = None,
inclusive: IntervalClosedType = "both",
unit: TimeUnit | None = None,
) -> DatetimeIndex: ...
@overload
def date_range(
*,
end: str | DateAndDatetimeLike,
periods: int,
freq: str | timedelta | Timedelta | BaseOffset | None = None,
tz: TimeZones = None,
normalize: bool = False,
name: Hashable | None = None,
inclusive: IntervalClosedType = "both",
unit: TimeUnit | None = None,
) -> DatetimeIndex: ...
@overload
def bdate_range(
start: str | DateAndDatetimeLike | None = ...,
end: str | DateAndDatetimeLike | None = ...,
periods: int | None = ...,
freq: str | timedelta | Timedelta | BaseOffset = ...,
tz: TimeZones = ...,
normalize: bool = ...,
name: Hashable | None = ...,
weekmask: str | None = ...,
holidays: None = ...,
inclusive: IntervalClosedType = ...,
) -> DatetimeIndex: ...
@overload
def bdate_range(
start: str | DateAndDatetimeLike | None = ...,
end: str | DateAndDatetimeLike | None = ...,
periods: int | None = ...,
*,
freq: str | timedelta | Timedelta | BaseOffset,
tz: TimeZones = ...,
normalize: bool = ...,
name: Hashable | None = ...,
weekmask: str | None = ...,
holidays: Sequence[str | DateAndDatetimeLike],
inclusive: IntervalClosedType = ...,
) -> DatetimeIndex: ...

View File

@ -0,0 +1,8 @@
from pandas.core.indexes.base import _IndexSubclassBase
from pandas._typing import (
S1,
GenericT_co,
)
class ExtensionIndex(_IndexSubclassBase[S1, GenericT_co]): ...

View File

@ -0,0 +1,9 @@
class FrozenList(list):
def union(self, other) -> FrozenList: ...
def difference(self, other) -> FrozenList: ...
def __getitem__(self, n): ...
def __radd__(self, other): ...
def __eq__(self, other) -> bool: ...
def __mul__(self, other): ...
def __reduce__(self): ...
def __hash__(self) -> int: ... # type: ignore[override] # pyright: ignore[reportIncompatibleVariableOverride]

View File

@ -0,0 +1,367 @@
from collections.abc import (
Hashable,
Sequence,
)
import datetime as dt
from typing import (
Literal,
final,
overload,
)
import numpy as np
import pandas as pd
from pandas import Index
from pandas.core.indexes.extension import ExtensionIndex
from pandas.core.series import (
TimedeltaSeries,
TimestampSeries,
)
from typing_extensions import TypeAlias
from pandas._libs.interval import (
Interval as Interval,
IntervalMixin,
)
from pandas._libs.tslibs.offsets import BaseOffset
from pandas._typing import (
DatetimeLike,
DtypeArg,
FillnaOptions,
IntervalClosedType,
IntervalT,
Label,
MaskType,
np_1darray,
np_ndarray_anyint,
np_ndarray_bool,
npt,
)
from pandas.core.dtypes.dtypes import IntervalDtype as IntervalDtype
_EdgesInt: TypeAlias = (
Sequence[int]
| npt.NDArray[np.int64]
| npt.NDArray[np.int32]
| npt.NDArray[np.intp]
| pd.Series[int]
| Index[int]
)
_EdgesFloat: TypeAlias = (
Sequence[float] | npt.NDArray[np.float64] | pd.Series[float] | Index[float]
)
_EdgesTimestamp: TypeAlias = (
Sequence[DatetimeLike]
| npt.NDArray[np.datetime64]
| TimestampSeries
| pd.DatetimeIndex
)
_EdgesTimedelta: TypeAlias = (
Sequence[pd.Timedelta]
| npt.NDArray[np.timedelta64]
| TimedeltaSeries
| pd.TimedeltaIndex
)
_TimestampLike: TypeAlias = pd.Timestamp | np.datetime64 | dt.datetime
_TimedeltaLike: TypeAlias = pd.Timedelta | np.timedelta64 | dt.timedelta
class IntervalIndex(ExtensionIndex[IntervalT, np.object_], IntervalMixin):
closed: IntervalClosedType
def __new__(
cls,
data: Sequence[IntervalT],
closed: IntervalClosedType = ...,
dtype: IntervalDtype | None = ...,
copy: bool = ...,
name: Hashable = ...,
verify_integrity: bool = ...,
) -> IntervalIndex[IntervalT]: ...
@overload
@classmethod
def from_breaks( # pyright: ignore[reportOverlappingOverload]
cls,
breaks: _EdgesInt,
closed: IntervalClosedType = ...,
name: Hashable = ...,
copy: bool = ...,
dtype: IntervalDtype | None = ...,
) -> IntervalIndex[Interval[int]]: ...
@overload
@classmethod
def from_breaks(
cls,
breaks: _EdgesFloat,
closed: IntervalClosedType = ...,
name: Hashable = ...,
copy: bool = ...,
dtype: IntervalDtype | None = ...,
) -> IntervalIndex[Interval[float]]: ...
@overload
@classmethod
def from_breaks(
cls,
breaks: _EdgesTimestamp,
closed: IntervalClosedType = ...,
name: Hashable = ...,
copy: bool = ...,
dtype: IntervalDtype | None = ...,
) -> IntervalIndex[Interval[pd.Timestamp]]: ...
@overload
@classmethod
def from_breaks(
cls,
breaks: _EdgesTimedelta,
closed: IntervalClosedType = ...,
name: Hashable = ...,
copy: bool = ...,
dtype: IntervalDtype | None = ...,
) -> IntervalIndex[Interval[pd.Timedelta]]: ...
@overload
@classmethod
def from_arrays( # pyright: ignore[reportOverlappingOverload]
cls,
left: _EdgesInt,
right: _EdgesInt,
closed: IntervalClosedType = ...,
name: Hashable = ...,
copy: bool = ...,
dtype: IntervalDtype | None = ...,
) -> IntervalIndex[Interval[int]]: ...
@overload
@classmethod
def from_arrays(
cls,
left: _EdgesFloat,
right: _EdgesFloat,
closed: IntervalClosedType = ...,
name: Hashable = ...,
copy: bool = ...,
dtype: IntervalDtype | None = ...,
) -> IntervalIndex[Interval[float]]: ...
@overload
@classmethod
def from_arrays(
cls,
left: _EdgesTimestamp,
right: _EdgesTimestamp,
closed: IntervalClosedType = ...,
name: Hashable = ...,
copy: bool = ...,
dtype: IntervalDtype | None = ...,
) -> IntervalIndex[Interval[pd.Timestamp]]: ...
@overload
@classmethod
def from_arrays(
cls,
left: _EdgesTimedelta,
right: _EdgesTimedelta,
closed: IntervalClosedType = ...,
name: Hashable = ...,
copy: bool = ...,
dtype: IntervalDtype | None = ...,
) -> IntervalIndex[Interval[pd.Timedelta]]: ...
@overload
@classmethod
def from_tuples( # pyright: ignore[reportOverlappingOverload]
cls,
data: Sequence[tuple[int, int]],
closed: IntervalClosedType = ...,
name: Hashable = ...,
copy: bool = ...,
dtype: IntervalDtype | None = ...,
) -> IntervalIndex[pd.Interval[int]]: ...
# Ignore misc here due to intentional overlap between int and float
@overload
@classmethod
def from_tuples(
cls,
data: Sequence[tuple[float, float]],
closed: IntervalClosedType = ...,
name: Hashable = ...,
copy: bool = ...,
dtype: IntervalDtype | None = ...,
) -> IntervalIndex[pd.Interval[float]]: ...
@overload
@classmethod
def from_tuples(
cls,
data: Sequence[
tuple[pd.Timestamp, pd.Timestamp]
| tuple[dt.datetime, dt.datetime]
| tuple[np.datetime64, np.datetime64]
],
closed: IntervalClosedType = ...,
name: Hashable = ...,
copy: bool = ...,
dtype: IntervalDtype | None = ...,
) -> IntervalIndex[pd.Interval[pd.Timestamp]]: ...
@overload
@classmethod
def from_tuples(
cls,
data: Sequence[
tuple[pd.Timedelta, pd.Timedelta]
| tuple[dt.timedelta, dt.timedelta]
| tuple[np.timedelta64, np.timedelta64]
],
closed: IntervalClosedType = ...,
name: Hashable = ...,
copy: bool = ...,
dtype: IntervalDtype | None = ...,
) -> IntervalIndex[pd.Interval[pd.Timedelta]]: ...
def to_tuples(self, na_tuple: bool = True) -> pd.Index: ...
@overload
def __contains__(self, key: IntervalT) -> bool: ... # type: ignore[overload-overlap] # pyright: ignore[reportOverlappingOverload]
@overload
def __contains__(self, key: object) -> Literal[False]: ...
def astype(self, dtype: DtypeArg, copy: bool = True) -> IntervalIndex: ...
@property
def inferred_type(self) -> str: ...
def memory_usage(self, deep: bool = False) -> int: ...
@property
def is_overlapping(self) -> bool: ...
def get_loc(self, key: Label) -> int | slice | np_1darray[np.bool]: ...
@final
def get_indexer(
self,
target: Index,
method: FillnaOptions | Literal["nearest"] | None = ...,
limit: int | None = ...,
tolerance=...,
) -> npt.NDArray[np.intp]: ...
def get_indexer_non_unique(
self, target: Index
) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
@property
def left(self) -> Index: ...
@property
def right(self) -> Index: ...
@property
def mid(self) -> Index: ...
@property
def length(self) -> Index: ...
@overload # type: ignore[override]
def __getitem__(
self,
idx: (
slice
| np_ndarray_anyint
| Sequence[int]
| Index
| MaskType
| np_ndarray_bool
),
) -> IntervalIndex[IntervalT]: ...
@overload
def __getitem__( # pyright: ignore[reportIncompatibleMethodOverride]
self, idx: int
) -> IntervalT: ...
@overload # type: ignore[override]
def __gt__(
self, other: IntervalT | IntervalIndex[IntervalT]
) -> np_1darray[np.bool]: ...
@overload
def __gt__( # pyright: ignore[reportIncompatibleMethodOverride]
self, other: pd.Series[IntervalT]
) -> pd.Series[bool]: ...
@overload # type: ignore[override]
def __ge__(
self, other: IntervalT | IntervalIndex[IntervalT]
) -> np_1darray[np.bool]: ...
@overload
def __ge__( # pyright: ignore[reportIncompatibleMethodOverride]
self, other: pd.Series[IntervalT]
) -> pd.Series[bool]: ...
@overload # type: ignore[override]
def __le__(
self, other: IntervalT | IntervalIndex[IntervalT]
) -> np_1darray[np.bool]: ...
@overload
def __le__( # pyright: ignore[reportIncompatibleMethodOverride]
self, other: pd.Series[IntervalT]
) -> pd.Series[bool]: ...
@overload # type: ignore[override]
def __lt__(
self, other: IntervalT | IntervalIndex[IntervalT]
) -> np_1darray[np.bool]: ...
@overload
def __lt__( # pyright: ignore[reportIncompatibleMethodOverride]
self, other: pd.Series[IntervalT]
) -> pd.Series[bool]: ...
@overload # type: ignore[override]
def __eq__(self, other: IntervalT | IntervalIndex[IntervalT]) -> np_1darray[np.bool]: ... # type: ignore[overload-overlap] # pyright: ignore[reportOverlappingOverload]
@overload
def __eq__(self, other: pd.Series[IntervalT]) -> pd.Series[bool]: ... # type: ignore[overload-overlap]
@overload
def __eq__( # pyright: ignore[reportIncompatibleMethodOverride]
self, other: object
) -> Literal[False]: ...
@overload # type: ignore[override]
def __ne__(self, other: IntervalT | IntervalIndex[IntervalT]) -> np_1darray[np.bool]: ... # type: ignore[overload-overlap] # pyright: ignore[reportOverlappingOverload]
@overload
def __ne__(self, other: pd.Series[IntervalT]) -> pd.Series[bool]: ... # type: ignore[overload-overlap]
@overload
def __ne__( # pyright: ignore[reportIncompatibleMethodOverride]
self, other: object
) -> Literal[True]: ...
# misc here because int and float overlap but interval has distinct types
# int gets hit first and so the correct type is returned
@overload
def interval_range( # pyright: ignore[reportOverlappingOverload]
start: int | None = ...,
end: int | None = ...,
periods: int | None = ...,
freq: int | None = ...,
name: Hashable = ...,
closed: IntervalClosedType = ...,
) -> IntervalIndex[Interval[int]]: ...
@overload
def interval_range(
start: float | None = ...,
end: float | None = ...,
periods: int | None = ...,
freq: int | None = ...,
name: Hashable = ...,
closed: IntervalClosedType = ...,
) -> IntervalIndex[Interval[float]]: ...
@overload
def interval_range(
start: _TimestampLike,
end: _TimestampLike | None = ...,
periods: int | None = ...,
freq: str | BaseOffset | pd.Timedelta | dt.timedelta | None = ...,
name: Hashable = ...,
closed: IntervalClosedType = ...,
) -> IntervalIndex[Interval[pd.Timestamp]]: ...
@overload
def interval_range(
*,
start: None = ...,
end: _TimestampLike,
periods: int | None = ...,
freq: str | BaseOffset | pd.Timedelta | dt.timedelta | None = ...,
name: Hashable = ...,
closed: IntervalClosedType = ...,
) -> IntervalIndex[Interval[pd.Timestamp]]: ...
@overload
def interval_range(
start: _TimedeltaLike,
end: _TimedeltaLike | None = ...,
periods: int | None = ...,
freq: str | BaseOffset | pd.Timedelta | dt.timedelta | None = ...,
name: Hashable = ...,
closed: IntervalClosedType = ...,
) -> IntervalIndex[Interval[pd.Timedelta]]: ...
@overload
def interval_range(
*,
start: None = ...,
end: _TimedeltaLike,
periods: int | None = ...,
freq: str | BaseOffset | pd.Timedelta | dt.timedelta | None = ...,
name: Hashable = ...,
closed: IntervalClosedType = ...,
) -> IntervalIndex[Interval[pd.Timedelta]]: ...

View File

@ -0,0 +1,164 @@
from collections.abc import (
Callable,
Hashable,
Iterable,
Sequence,
)
from typing import (
final,
overload,
)
import numpy as np
import pandas as pd
from pandas.core.indexes.base import Index
from typing_extensions import Self
from pandas._typing import (
AnyAll,
Axes,
DropKeep,
Dtype,
HashableT,
IndexLabel,
Level,
MaskType,
NaPosition,
SequenceNotStr,
np_1darray,
np_ndarray_anyint,
)
class MultiIndex(Index):
def __new__(
cls,
levels: Sequence[SequenceNotStr[Hashable]] = ...,
codes: Sequence[Sequence[int]] = ...,
sortorder: int | None = ...,
names: SequenceNotStr[Hashable] = ...,
copy: bool = ...,
name: SequenceNotStr[Hashable] = ...,
verify_integrity: bool = ...,
) -> Self: ...
@classmethod
def from_arrays(
cls,
arrays: Sequence[Axes],
sortorder: int | None = ...,
names: SequenceNotStr[Hashable] = ...,
) -> Self: ...
@classmethod
def from_tuples(
cls,
tuples: Iterable[tuple[Hashable, ...]],
sortorder: int | None = ...,
names: SequenceNotStr[Hashable] = ...,
) -> Self: ...
@classmethod
def from_product(
cls,
iterables: Sequence[SequenceNotStr[Hashable] | pd.Series | pd.Index | range],
sortorder: int | None = ...,
names: SequenceNotStr[Hashable] = ...,
) -> Self: ...
@classmethod
def from_frame(
cls,
df: pd.DataFrame,
sortorder: int | None = ...,
names: SequenceNotStr[Hashable] = ...,
) -> Self: ...
@property
def shape(self): ...
@property # Should be read-only
def levels(self) -> list[Index]: ...
def set_levels(self, levels, *, level=..., verify_integrity: bool = ...): ...
@property
def codes(self): ...
def set_codes(self, codes, *, level=..., verify_integrity: bool = ...): ...
def copy( # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride] # pyrefly: ignore
self, names: SequenceNotStr[Hashable] = ..., deep: bool = False
) -> Self: ...
def view(self, cls=...): ...
def __contains__(self, key) -> bool: ...
@property
def dtype(self) -> np.dtype: ...
@property
def dtypes(self) -> pd.Series[Dtype]: ...
def memory_usage(self, deep: bool = False) -> int: ...
@property
def nbytes(self) -> int: ...
def format(
self,
name: bool | None = ...,
formatter: Callable | None = ...,
na_rep: str | None = ...,
names: bool = ...,
space: int = ...,
sparsify: bool | None = ...,
adjoin: bool = ...,
) -> list: ...
def __len__(self) -> int: ...
@property
def values(self): ...
@property
def is_monotonic_increasing(self) -> bool: ...
@property
def is_monotonic_decreasing(self) -> bool: ...
def duplicated(self, keep: DropKeep = "first"): ...
def dropna(self, how: AnyAll = "any") -> Self: ...
def get_level_values(self, level: str | int) -> Index: ...
def unique(self, level=...): ...
def to_frame( # pyrefly: ignore
self,
index: bool = True,
name: list[HashableT] = ...,
allow_duplicates: bool = False,
) -> pd.DataFrame: ...
def to_flat_index(self): ...
def remove_unused_levels(self): ...
@property
def nlevels(self) -> int: ...
@property
def levshape(self): ...
def __reduce__(self): ...
@overload # type: ignore[override]
def __getitem__(
self,
idx: slice | np_ndarray_anyint | Sequence[int] | Index | MaskType,
) -> Self: ...
@overload
def __getitem__( # pyright: ignore[reportIncompatibleMethodOverride]
self, key: int
) -> tuple: ...
def append(self, other): ... # pyrefly: ignore
def repeat(self, repeats, axis=...): ...
def drop(self, codes, level: Level | None = None, errors: str = "raise") -> Self: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
def swaplevel(self, i: int = -2, j: int = -1): ...
def reorder_levels(self, order): ...
def sortlevel(
self,
level: Level | Sequence[Level] = 0,
ascending: bool = True,
sort_remaining: bool = True,
na_position: NaPosition = "first",
): ...
@final
def get_indexer(self, target, method=..., limit=..., tolerance=...): ...
def get_indexer_non_unique(self, target): ...
def reindex(self, target, method=..., level=..., limit=..., tolerance=...): ...
def get_slice_bound(
self, label: Hashable | Sequence[Hashable], side: str
) -> int: ...
def get_loc_level(
self, key, level: Level | list[Level] | None = None, drop_level: bool = True
): ...
def get_locs(self, seq): ...
def truncate(
self, before: IndexLabel | None = None, after: IndexLabel | None = None
): ...
def equals(self, other) -> bool: ...
def equal_levels(self, other): ...
def insert(self, loc, item): ...
def delete(self, loc): ...
def isin(self, values, level=...) -> np_1darray[np.bool]: ...

View File

@ -0,0 +1,80 @@
from collections.abc import Hashable
import datetime
from typing import (
Any,
overload,
)
import numpy as np
import pandas as pd
from pandas import Index
from pandas.core.indexes.accessors import PeriodIndexFieldOps
from pandas.core.indexes.datetimelike import DatetimeIndexOpsMixin
from pandas.core.indexes.timedeltas import TimedeltaIndex
from typing_extensions import Self
from pandas._libs.tslibs import (
NaTType,
Period,
)
from pandas._libs.tslibs.period import _PeriodAddSub
from pandas._typing import (
AxesData,
Dtype,
Frequency,
np_1darray,
)
class PeriodIndex(DatetimeIndexOpsMixin[pd.Period, np.object_], PeriodIndexFieldOps):
def __new__(
cls,
data: AxesData[Any] | None = None,
freq: Frequency | None = None,
dtype: Dtype | None = None,
copy: bool = False,
name: Hashable | None = None,
) -> Self: ...
@property
def values(self) -> np_1darray[np.object_]: ...
@overload
def __sub__(self, other: Period) -> Index: ...
@overload
def __sub__(self, other: Self) -> Index: ...
@overload
def __sub__(self, other: _PeriodAddSub) -> Self: ...
@overload
def __sub__(self, other: NaTType) -> NaTType: ...
@overload
def __sub__(self, other: TimedeltaIndex | pd.Timedelta) -> Self: ...
@overload # type: ignore[override]
def __rsub__(self, other: Period) -> Index: ...
@overload
def __rsub__(self, other: Self) -> Index: ...
@overload
def __rsub__( # pyright: ignore[reportIncompatibleMethodOverride]
self, other: NaTType
) -> NaTType: ...
def asof_locs(
self,
where: pd.DatetimeIndex | PeriodIndex,
mask: np_1darray[np.bool_],
) -> np_1darray[np.intp]: ...
@property
def is_full(self) -> bool: ...
@property
def inferred_type(self) -> str: ...
@property
def freqstr(self) -> str: ...
def shift(self, periods: int = 1, freq: Frequency | None = None) -> Self: ...
def period_range(
start: (
str | datetime.datetime | datetime.date | pd.Timestamp | pd.Period | None
) = None,
end: (
str | datetime.datetime | datetime.date | pd.Timestamp | pd.Period | None
) = None,
periods: int | None = None,
freq: Frequency | None = None,
name: Hashable | None = None,
) -> PeriodIndex: ...

View File

@ -0,0 +1,94 @@
from collections.abc import (
Hashable,
Sequence,
)
from typing import (
final,
overload,
)
import numpy as np
from pandas.core.indexes.base import (
Index,
_IndexSubclassBase,
)
from pandas._typing import (
HashableT,
MaskType,
np_1darray,
np_ndarray_anyint,
)
class RangeIndex(_IndexSubclassBase[int, np.int64]):
def __new__(
cls,
start: int | RangeIndex | range = ...,
stop: int = ...,
step: int = ...,
dtype=...,
copy: bool = ...,
name: Hashable = ...,
): ...
@classmethod
def from_range(cls, data, name: Hashable = ..., dtype=...): ...
def __reduce__(self): ...
@property
def start(self) -> int: ...
@property
def stop(self) -> int: ...
@property
def step(self) -> int: ...
@property
def nbytes(self) -> int: ...
def memory_usage(self, deep: bool = ...) -> int: ...
@property
def dtype(self) -> np.dtype: ...
@property
def is_unique(self) -> bool: ...
@property
def is_monotonic_increasing(self) -> bool: ...
@property
def is_monotonic_decreasing(self) -> bool: ...
@property
def has_duplicates(self) -> bool: ...
def __contains__(self, key: int | np.integer) -> bool: ...
@final
def get_indexer(self, target, method=..., limit=..., tolerance=...): ...
def tolist(self): ...
def min(self, axis=..., skipna: bool = ..., *args, **kwargs): ...
def max(self, axis=..., skipna: bool = ..., *args, **kwargs): ...
def argsort(self, *args, **kwargs): ...
def factorize(
self, sort: bool = False, use_na_sentinel: bool = True
) -> tuple[np_1darray[np.intp], RangeIndex]: ...
def equals(self, other): ...
@final
def join(
self,
other,
*,
how: str = ...,
level=...,
return_indexers: bool = ...,
sort: bool = ...,
): ...
def __len__(self) -> int: ...
@property
def size(self) -> int: ...
def __floordiv__(self, other): ...
def all(self, *args, **kwargs) -> bool: ...
def any(self, *args, **kwargs) -> bool: ...
@final
def union( # pyrefly: ignore
self, other: list[HashableT] | Index, sort: bool | None = None
) -> Index | Index[int] | RangeIndex: ...
@overload # type: ignore[override]
def __getitem__(
self,
idx: slice | np_ndarray_anyint | Sequence[int] | Index | MaskType,
) -> Index: ...
@overload
def __getitem__( # pyright: ignore[reportIncompatibleMethodOverride]
self, idx: int
) -> int: ...

View File

@ -0,0 +1,121 @@
from collections.abc import (
Hashable,
Sequence,
)
import datetime as dt
from typing import (
Literal,
final,
overload,
)
import numpy as np
from pandas import (
DateOffset,
Index,
Period,
)
from pandas.core.indexes.accessors import TimedeltaIndexProperties
from pandas.core.indexes.datetimelike import DatetimeTimedeltaMixin
from pandas.core.indexes.datetimes import DatetimeIndex
from pandas.core.indexes.period import PeriodIndex
from pandas.core.series import TimedeltaSeries
from typing_extensions import Self
from pandas._libs import (
Timedelta,
Timestamp,
)
from pandas._libs.tslibs import BaseOffset
from pandas._typing import (
AxesData,
TimedeltaConvertibleTypes,
num,
)
class TimedeltaIndex(
DatetimeTimedeltaMixin[Timedelta, np.timedelta64], TimedeltaIndexProperties
):
def __new__(
cls,
data: (
Sequence[dt.timedelta | Timedelta | np.timedelta64 | float] | AxesData
) = ...,
freq: str | BaseOffset = ...,
closed: object = ...,
dtype: Literal["<m8[ns]"] = ...,
copy: bool = ...,
name: str = ...,
) -> Self: ...
# various ignores needed for mypy, as we do want to restrict what can be used in
# arithmetic for these types
@overload
def __add__(self, other: Period) -> PeriodIndex: ...
@overload
def __add__(self, other: DatetimeIndex) -> DatetimeIndex: ...
@overload
def __add__(self, other: dt.timedelta | Timedelta | Self) -> Self: ...
def __radd__(self, other: dt.datetime | Timestamp | DatetimeIndex) -> DatetimeIndex: ... # type: ignore[override]
def __sub__(self, other: dt.timedelta | Timedelta | Self) -> Self: ...
def __mul__(self, other: num) -> Self: ...
@overload # type: ignore[override]
def __truediv__(self, other: num | Sequence[float]) -> Self: ...
@overload
def __truediv__( # pyright: ignore[reportIncompatibleMethodOverride]
self, other: dt.timedelta | Sequence[dt.timedelta]
) -> Index[float]: ...
def __rtruediv__(self, other: dt.timedelta | Sequence[dt.timedelta]) -> Index[float]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
@overload # type: ignore[override]
def __floordiv__(self, other: num | Sequence[float]) -> Self: ...
@overload
def __floordiv__( # pyright: ignore[reportIncompatibleMethodOverride]
self, other: dt.timedelta | Sequence[dt.timedelta]
) -> Index[int]: ...
def __rfloordiv__(self, other: dt.timedelta | Sequence[dt.timedelta]) -> Index[int]: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
def searchsorted(self, value, side: str = ..., sorter=...): ...
@property
def inferred_type(self) -> str: ...
@final
def to_series(self, index=..., name: Hashable = ...) -> TimedeltaSeries: ...
def shift(self, periods: int = 1, freq=...) -> Self: ...
@overload
def timedelta_range(
start: TimedeltaConvertibleTypes,
end: TimedeltaConvertibleTypes,
*,
freq: str | DateOffset | Timedelta | dt.timedelta | None = None,
name: Hashable | None = None,
closed: Literal["left", "right"] | None = None,
unit: None | str = ...,
) -> TimedeltaIndex: ...
@overload
def timedelta_range(
*,
end: TimedeltaConvertibleTypes,
periods: int,
freq: str | DateOffset | Timedelta | dt.timedelta | None = None,
name: Hashable | None = None,
closed: Literal["left", "right"] | None = None,
unit: None | str = ...,
) -> TimedeltaIndex: ...
@overload
def timedelta_range(
start: TimedeltaConvertibleTypes,
*,
periods: int,
freq: str | DateOffset | Timedelta | dt.timedelta | None = None,
name: Hashable | None = None,
closed: Literal["left", "right"] | None = None,
unit: None | str = ...,
) -> TimedeltaIndex: ...
@overload
def timedelta_range(
start: TimedeltaConvertibleTypes,
end: TimedeltaConvertibleTypes,
periods: int,
*,
name: Hashable | None = None,
closed: Literal["left", "right"] | None = None,
unit: None | str = ...,
) -> TimedeltaIndex: ...