Skip to content
21 changes: 15 additions & 6 deletions pandas/core/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -2412,9 +2412,13 @@ def to_hdf(
complib: Optional[str] = None,
append: bool_t = False,
format: Optional[str] = None,
index: bool_t = True,
min_itemsize: Optional[Union[int, Dict[str, int]]] = None,
nan_rep=None,
dropna: Optional[bool_t] = None,
data_columns: Optional[List[str]] = None,
errors: str = "strict",
encoding: str = "UTF-8",
**kwargs,
):
"""
Write the contained data to an HDF5 file using HDFStore.
Expand Down Expand Up @@ -2471,15 +2475,16 @@ def to_hdf(
See the errors argument for :func:`open` for a full list
of options.
encoding : str, default "UTF-8"
min_itemsize : dict or int, optional
Map column names to minimum string sizes for columns.
nan_rep : Any, optional
How to represent null values as str.
Not allowed with append=True.
data_columns : list of columns or True, optional
List of columns to create as indexed data columns for on-disk
queries, or True to use all columns. By default only the axes
of the object are indexed. See :ref:`io.hdf5-query-data-columns`.
Applicable only to format='table'.
fletcher32 : bool, default False
If applying compression use the fletcher32 checksum.
dropna : bool, default False
If true, ALL nan rows will not be written to store.

See Also
--------
Expand Down Expand Up @@ -2530,9 +2535,13 @@ def to_hdf(
complib=complib,
append=append,
format=format,
index=index,
min_itemsize=min_itemsize,
nan_rep=nan_rep,
dropna=dropna,
data_columns=data_columns,
errors=errors,
encoding=encoding,
**kwargs,
)

def to_msgpack(self, path_or_buf=None, encoding="utf-8", **kwargs):
Expand Down
90 changes: 82 additions & 8 deletions pandas/io/pytables.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,19 +258,41 @@ def to_hdf(
complib: Optional[str] = None,
append: bool = False,
format: Optional[str] = None,
index: bool = True,
min_itemsize: Optional[Union[int, Dict[str, int]]] = None,
nan_rep=None,
dropna: Optional[bool] = None,
data_columns: Optional[List[str]] = None,
errors: str = "strict",
encoding: str = "UTF-8",
**kwargs,
):
""" store this object, close it if we opened it """

if append:
f = lambda store: store.append(
key, value, format=format, errors=errors, encoding=encoding, **kwargs
key,
value,
format=format,
index=index,
min_itemsize=min_itemsize,
nan_rep=nan_rep,
dropna=dropna,
data_columns=data_columns,
errors=errors,
encoding=encoding,
)
else:
# NB: dropna is not passed to `put`
f = lambda store: store.put(
key, value, format=format, errors=errors, encoding=encoding, **kwargs
key,
value,
format=format,
index=index,
min_itemsize=min_itemsize,
nan_rep=nan_rep,
data_columns=data_columns,
errors=errors,
encoding=encoding,
)

path_or_buf = _stringify_path(path_or_buf)
Expand Down Expand Up @@ -976,7 +998,21 @@ def func(_start, _stop, _where):

return it.get_result(coordinates=True)

def put(self, key: str, value: FrameOrSeries, format=None, append=False, **kwargs):
def put(
self,
key: str,
value: FrameOrSeries,
format=None,
index=True,
append=False,
complib=None,
complevel: Optional[int] = None,
min_itemsize: Optional[Union[int, Dict[str, int]]] = None,
nan_rep=None,
data_columns: Optional[List[str]] = None,
encoding=None,
errors: str = "strict",
):
"""
Store object in HDFStore.

Expand Down Expand Up @@ -1006,7 +1042,20 @@ def put(self, key: str, value: FrameOrSeries, format=None, append=False, **kwarg
if format is None:
format = get_option("io.hdf.default_format") or "fixed"
format = self._validate_format(format)
self._write_to_group(key, value, format=format, append=append, **kwargs)
self._write_to_group(
key,
value,
format=format,
index=index,
append=append,
complib=complib,
complevel=complevel,
min_itemsize=min_itemsize,
nan_rep=nan_rep,
data_columns=data_columns,
encoding=encoding,
errors=errors,
)

def remove(self, key: str, where=None, start=None, stop=None):
"""
Expand Down Expand Up @@ -1067,10 +1116,20 @@ def append(
key: str,
value: FrameOrSeries,
format=None,
axes=None,
index=True,
append=True,
complib=None,
complevel: Optional[int] = None,
columns=None,
min_itemsize: Optional[Union[int, Dict[str, int]]] = None,
nan_rep=None,
chunksize=None,
expectedrows=None,
dropna: Optional[bool] = None,
**kwargs,
data_columns: Optional[List[str]] = None,
encoding=None,
errors: str = "strict",
):
"""
Append to Table in file. Node must already exist and be Table
Expand Down Expand Up @@ -1117,7 +1176,22 @@ def append(
format = get_option("io.hdf.default_format") or "table"
format = self._validate_format(format)
self._write_to_group(
key, value, format=format, append=append, dropna=dropna, **kwargs
key,
value,
format=format,
axes=axes,
index=index,
append=append,
complib=complib,
complevel=complevel,
min_itemsize=min_itemsize,
nan_rep=nan_rep,
chunksize=chunksize,
expectedrows=expectedrows,
dropna=dropna,
data_columns=data_columns,
encoding=encoding,
errors=errors,
)

def append_to_multiple(
Expand Down Expand Up @@ -1578,7 +1652,7 @@ def _write_to_group(
complib=None,
complevel: Optional[int] = None,
fletcher32=None,
min_itemsize=None,
min_itemsize: Optional[Union[int, Dict[str, int]]] = None,
chunksize=None,
expectedrows=None,
dropna=False,
Expand Down