done
This commit is contained in:
		| @ -0,0 +1,7 @@ | ||||
| from pandas.io.json._json import ( | ||||
|     read_json as read_json, | ||||
| ) | ||||
|  | ||||
| # below are untyped imports so commented out | ||||
| # to_json as to_json,; ujson_dumps as ujson_dumps,; ujson_loads as ujson_loads, | ||||
| from pandas.io.json._table_schema import build_table_schema as build_table_schema | ||||
							
								
								
									
										240
									
								
								lib/python3.11/site-packages/pandas-stubs/io/json/_json.pyi
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										240
									
								
								lib/python3.11/site-packages/pandas-stubs/io/json/_json.pyi
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,240 @@ | ||||
| from collections import abc | ||||
| from collections.abc import Mapping | ||||
| from types import TracebackType | ||||
| from typing import ( | ||||
|     Generic, | ||||
|     Literal, | ||||
|     overload, | ||||
| ) | ||||
|  | ||||
| from pandas.core.frame import DataFrame | ||||
| from pandas.core.series import Series | ||||
|  | ||||
| from pandas._libs.lib import _NoDefaultDoNotUse | ||||
| from pandas._typing import ( | ||||
|     CompressionOptions, | ||||
|     DtypeArg, | ||||
|     DtypeBackend, | ||||
|     FilePath, | ||||
|     HashableT, | ||||
|     JsonFrameOrient, | ||||
|     JsonSeriesOrient, | ||||
|     NDFrameT, | ||||
|     ReadBuffer, | ||||
|     StorageOptions, | ||||
|     TimeUnit, | ||||
| ) | ||||
|  | ||||
| @overload | ||||
| def read_json( | ||||
|     path_or_buf: FilePath | ReadBuffer[str] | ReadBuffer[bytes], | ||||
|     *, | ||||
|     orient: JsonSeriesOrient | None = ..., | ||||
|     typ: Literal["series"], | ||||
|     dtype: bool | Mapping[HashableT, DtypeArg] | None = ..., | ||||
|     convert_axes: bool | None = ..., | ||||
|     convert_dates: bool | list[str] = ..., | ||||
|     keep_default_dates: bool = ..., | ||||
|     precise_float: bool = ..., | ||||
|     date_unit: TimeUnit | None = ..., | ||||
|     encoding: str | None = ..., | ||||
|     encoding_errors: ( | ||||
|         Literal["strict", "ignore", "replace", "backslashreplace", "surrogateescape"] | ||||
|         | None | ||||
|     ) = ..., | ||||
|     lines: Literal[True], | ||||
|     chunksize: int, | ||||
|     compression: CompressionOptions = ..., | ||||
|     nrows: int | None = ..., | ||||
|     storage_options: StorageOptions = ..., | ||||
|     dtype_backend: DtypeBackend | _NoDefaultDoNotUse = ..., | ||||
|     engine: Literal["ujson"] = ..., | ||||
| ) -> JsonReader[Series]: ... | ||||
| @overload | ||||
| def read_json( | ||||
|     path_or_buf: FilePath | ReadBuffer[bytes], | ||||
|     *, | ||||
|     orient: JsonSeriesOrient | None = ..., | ||||
|     typ: Literal["series"], | ||||
|     dtype: bool | Mapping[HashableT, DtypeArg] | None = ..., | ||||
|     convert_axes: bool | None = ..., | ||||
|     convert_dates: bool | list[str] = ..., | ||||
|     keep_default_dates: bool = ..., | ||||
|     precise_float: bool = ..., | ||||
|     date_unit: TimeUnit | None = ..., | ||||
|     encoding: str | None = ..., | ||||
|     encoding_errors: ( | ||||
|         Literal["strict", "ignore", "replace", "backslashreplace", "surrogateescape"] | ||||
|         | None | ||||
|     ) = ..., | ||||
|     lines: Literal[True], | ||||
|     chunksize: int, | ||||
|     compression: CompressionOptions = ..., | ||||
|     nrows: int | None = ..., | ||||
|     storage_options: StorageOptions = ..., | ||||
|     dtype_backend: DtypeBackend | _NoDefaultDoNotUse = ..., | ||||
|     engine: Literal["pyarrow"], | ||||
| ) -> JsonReader[Series]: ... | ||||
| @overload | ||||
| def read_json( | ||||
|     path_or_buf: FilePath | ReadBuffer[bytes], | ||||
|     *, | ||||
|     orient: JsonFrameOrient | None = ..., | ||||
|     typ: Literal["frame"] = ..., | ||||
|     dtype: bool | Mapping[HashableT, DtypeArg] | None = ..., | ||||
|     convert_axes: bool | None = ..., | ||||
|     convert_dates: bool | list[str] = ..., | ||||
|     keep_default_dates: bool = ..., | ||||
|     precise_float: bool = ..., | ||||
|     date_unit: TimeUnit | None = ..., | ||||
|     encoding: str | None = ..., | ||||
|     encoding_errors: ( | ||||
|         Literal["strict", "ignore", "replace", "backslashreplace", "surrogateescape"] | ||||
|         | None | ||||
|     ) = ..., | ||||
|     lines: Literal[True], | ||||
|     chunksize: int, | ||||
|     compression: CompressionOptions = ..., | ||||
|     nrows: int | None = ..., | ||||
|     storage_options: StorageOptions = ..., | ||||
|     dtype_backend: DtypeBackend | _NoDefaultDoNotUse = ..., | ||||
|     engine: Literal["ujson"] = ..., | ||||
| ) -> JsonReader[DataFrame]: ... | ||||
| @overload | ||||
| def read_json( | ||||
|     path_or_buf: FilePath | ReadBuffer[bytes], | ||||
|     *, | ||||
|     orient: JsonFrameOrient | None = ..., | ||||
|     typ: Literal["frame"] = ..., | ||||
|     dtype: bool | Mapping[HashableT, DtypeArg] | None = ..., | ||||
|     convert_axes: bool | None = ..., | ||||
|     convert_dates: bool | list[str] = ..., | ||||
|     keep_default_dates: bool = ..., | ||||
|     precise_float: bool = ..., | ||||
|     date_unit: TimeUnit | None = ..., | ||||
|     encoding: str | None = ..., | ||||
|     encoding_errors: ( | ||||
|         Literal["strict", "ignore", "replace", "backslashreplace", "surrogateescape"] | ||||
|         | None | ||||
|     ) = ..., | ||||
|     lines: Literal[True], | ||||
|     chunksize: int, | ||||
|     compression: CompressionOptions = ..., | ||||
|     nrows: int | None = ..., | ||||
|     storage_options: StorageOptions = ..., | ||||
|     dtype_backend: DtypeBackend | _NoDefaultDoNotUse = ..., | ||||
|     engine: Literal["pyarrow"], | ||||
| ) -> JsonReader[DataFrame]: ... | ||||
| @overload | ||||
| def read_json( | ||||
|     path_or_buf: FilePath | ReadBuffer[str] | ReadBuffer[bytes], | ||||
|     *, | ||||
|     orient: JsonSeriesOrient | None = ..., | ||||
|     typ: Literal["series"], | ||||
|     dtype: bool | Mapping[HashableT, DtypeArg] | None = ..., | ||||
|     convert_axes: bool | None = ..., | ||||
|     convert_dates: bool | list[str] = ..., | ||||
|     keep_default_dates: bool = ..., | ||||
|     precise_float: bool = ..., | ||||
|     date_unit: TimeUnit | None = ..., | ||||
|     encoding: str | None = ..., | ||||
|     encoding_errors: ( | ||||
|         Literal["strict", "ignore", "replace", "backslashreplace", "surrogateescape"] | ||||
|         | None | ||||
|     ) = ..., | ||||
|     lines: bool = ..., | ||||
|     chunksize: None = ..., | ||||
|     compression: CompressionOptions = ..., | ||||
|     nrows: int | None = ..., | ||||
|     storage_options: StorageOptions = ..., | ||||
|     dtype_backend: DtypeBackend | _NoDefaultDoNotUse = ..., | ||||
|     engine: Literal["ujson"] = ..., | ||||
| ) -> Series: ... | ||||
| @overload | ||||
| def read_json( | ||||
|     path_or_buf: FilePath | ReadBuffer[bytes], | ||||
|     *, | ||||
|     orient: JsonSeriesOrient | None = ..., | ||||
|     typ: Literal["series"], | ||||
|     dtype: bool | Mapping[HashableT, DtypeArg] | None = ..., | ||||
|     convert_axes: bool | None = ..., | ||||
|     convert_dates: bool | list[str] = ..., | ||||
|     keep_default_dates: bool = ..., | ||||
|     precise_float: bool = ..., | ||||
|     date_unit: TimeUnit | None = ..., | ||||
|     encoding: str | None = ..., | ||||
|     encoding_errors: ( | ||||
|         Literal["strict", "ignore", "replace", "backslashreplace", "surrogateescape"] | ||||
|         | None | ||||
|     ) = ..., | ||||
|     lines: Literal[True], | ||||
|     chunksize: None = ..., | ||||
|     compression: CompressionOptions = ..., | ||||
|     nrows: int | None = ..., | ||||
|     storage_options: StorageOptions = ..., | ||||
|     dtype_backend: DtypeBackend | _NoDefaultDoNotUse = ..., | ||||
|     engine: Literal["pyarrow"], | ||||
| ) -> Series: ... | ||||
| @overload | ||||
| def read_json( | ||||
|     path_or_buf: FilePath | ReadBuffer[str] | ReadBuffer[bytes], | ||||
|     *, | ||||
|     orient: JsonFrameOrient | None = ..., | ||||
|     typ: Literal["frame"] = ..., | ||||
|     dtype: bool | Mapping[HashableT, DtypeArg] | None = ..., | ||||
|     convert_axes: bool | None = ..., | ||||
|     convert_dates: bool | list[str] = ..., | ||||
|     keep_default_dates: bool = ..., | ||||
|     precise_float: bool = ..., | ||||
|     date_unit: TimeUnit | None = ..., | ||||
|     encoding: str | None = ..., | ||||
|     encoding_errors: ( | ||||
|         Literal["strict", "ignore", "replace", "backslashreplace", "surrogateescape"] | ||||
|         | None | ||||
|     ) = ..., | ||||
|     lines: bool = ..., | ||||
|     chunksize: None = ..., | ||||
|     compression: CompressionOptions = ..., | ||||
|     nrows: int | None = ..., | ||||
|     storage_options: StorageOptions = ..., | ||||
|     dtype_backend: DtypeBackend | _NoDefaultDoNotUse = ..., | ||||
|     engine: Literal["ujson"] = ..., | ||||
| ) -> DataFrame: ... | ||||
| @overload | ||||
| def read_json( | ||||
|     path_or_buf: FilePath | ReadBuffer[bytes], | ||||
|     *, | ||||
|     orient: JsonFrameOrient | None = ..., | ||||
|     typ: Literal["frame"] = ..., | ||||
|     dtype: bool | Mapping[HashableT, DtypeArg] | None = ..., | ||||
|     convert_axes: bool | None = ..., | ||||
|     convert_dates: bool | list[str] = ..., | ||||
|     keep_default_dates: bool = ..., | ||||
|     precise_float: bool = ..., | ||||
|     date_unit: TimeUnit | None = ..., | ||||
|     encoding: str | None = ..., | ||||
|     encoding_errors: ( | ||||
|         Literal["strict", "ignore", "replace", "backslashreplace", "surrogateescape"] | ||||
|         | None | ||||
|     ) = ..., | ||||
|     lines: Literal[True], | ||||
|     chunksize: None = ..., | ||||
|     compression: CompressionOptions = ..., | ||||
|     nrows: int | None = ..., | ||||
|     storage_options: StorageOptions = ..., | ||||
|     dtype_backend: DtypeBackend | _NoDefaultDoNotUse = ..., | ||||
|     engine: Literal["pyarrow"], | ||||
| ) -> DataFrame: ... | ||||
|  | ||||
| class JsonReader(abc.Iterator, Generic[NDFrameT]): | ||||
|     def read(self) -> NDFrameT: ... | ||||
|     def close(self) -> None: ... | ||||
|     def __iter__(self) -> JsonReader[NDFrameT]: ... | ||||
|     def __next__(self) -> NDFrameT: ... | ||||
|     def __enter__(self) -> JsonReader[NDFrameT]: ... | ||||
|     def __exit__( | ||||
|         self, | ||||
|         exc_type: type[BaseException] | None, | ||||
|         exc_value: BaseException | None, | ||||
|         traceback: TracebackType | None, | ||||
|     ) -> None: ... | ||||
| @ -0,0 +1,14 @@ | ||||
| from pandas import DataFrame | ||||
|  | ||||
| from pandas._typing import IgnoreRaise | ||||
|  | ||||
| def json_normalize( | ||||
|     data: dict | list[dict], | ||||
|     record_path: str | list | None = None, | ||||
|     meta: str | list[str | list[str]] | None = None, | ||||
|     meta_prefix: str | None = None, | ||||
|     record_prefix: str | None = None, | ||||
|     errors: IgnoreRaise = "raise", | ||||
|     sep: str = ".", | ||||
|     max_level: int | None = None, | ||||
| ) -> DataFrame: ... | ||||
| @ -0,0 +1,13 @@ | ||||
| from pandas import ( | ||||
|     DataFrame, | ||||
|     Series, | ||||
| ) | ||||
|  | ||||
| from pandas._typing import JSONSerializable | ||||
|  | ||||
| def build_table_schema( | ||||
|     data: DataFrame | Series, | ||||
|     index: bool = True, | ||||
|     primary_key: bool | None = True, | ||||
|     version: bool = True, | ||||
| ) -> dict[str, JSONSerializable]: ... | ||||
		Reference in New Issue
	
	Block a user