cannot read parquet

#3
by PaulLerner - opened

hi, it's all in the title, i'm getting "ArrowInvalid":

# using datasets.load_dataset gives the same result
>>> df = pd.read_parquet('ACL-OCL/acl-publication-info.74k.v2.parquet')
---------------------------------------------------------------------------
ArrowInvalid                              Traceback (most recent call last)
Cell In[16], line 1
----> 1 df = pd.read_parquet('/home/paul/open/ACL-OCL/acl-publication-info.74k.v2.parquet')

File ~/anaconda3/envs/matos/lib/python3.10/site-packages/pandas/io/parquet.py:667, in read_parquet(path, engine, columns, storage_options, use_nullable_dtypes, dtype_backend, filesystem, filters, **kwargs)
    664     use_nullable_dtypes = False
    665 check_dtype_backend(dtype_backend)
--> 667 return impl.read(
    668     path,
    669     columns=columns,
    670     filters=filters,
    671     storage_options=storage_options,
    672     use_nullable_dtypes=use_nullable_dtypes,
    673     dtype_backend=dtype_backend,
    674     filesystem=filesystem,
    675     **kwargs,
    676 )

File ~/anaconda3/envs/matos/lib/python3.10/site-packages/pandas/io/parquet.py:274, in PyArrowImpl.read(self, path, columns, filters, use_nullable_dtypes, dtype_backend, storage_options, filesystem, **kwargs)
    267 path_or_handle, handles, filesystem = _get_path_or_handle(
    268     path,
    269     filesystem,
    270     storage_options=storage_options,
    271     mode="rb",
    272 )
    273 try:
--> 274     pa_table = self.api.parquet.read_table(
    275         path_or_handle,
    276         columns=columns,
    277         filesystem=filesystem,
    278         filters=filters,
    279         **kwargs,
    280     )
    281     result = pa_table.to_pandas(**to_pandas_kwargs)
    283     if manager == "array":

File ~/anaconda3/envs/matos/lib/python3.10/site-packages/pyarrow/parquet/core.py:1776, in read_table(source, columns, use_threads, schema, use_pandas_metadata, read_dictionary, memory_map, buffer_size, partitioning, filesystem, filters, use_legacy_dataset, ignore_prefixes, pre_buffer, coerce_int96_timestamp_unit, decryption_properties, thrift_string_size_limit, thrift_container_size_limit, page_checksum_verification)
   1770     warnings.warn(
   1771         "Passing 'use_legacy_dataset' is deprecated as of pyarrow 15.0.0 "
   1772         "and will be removed in a future version.",
   1773         FutureWarning, stacklevel=2)
   1775 try:
-> 1776     dataset = ParquetDataset(
   1777         source,
   1778         schema=schema,
   1779         filesystem=filesystem,
   1780         partitioning=partitioning,
   1781         memory_map=memory_map,
   1782         read_dictionary=read_dictionary,
   1783         buffer_size=buffer_size,
   1784         filters=filters,
   1785         ignore_prefixes=ignore_prefixes,
   1786         pre_buffer=pre_buffer,
   1787         coerce_int96_timestamp_unit=coerce_int96_timestamp_unit,
   1788         thrift_string_size_limit=thrift_string_size_limit,
   1789         thrift_container_size_limit=thrift_container_size_limit,
   1790         page_checksum_verification=page_checksum_verification,
   1791     )
   1792 except ImportError:
   1793     # fall back on ParquetFile for simple cases when pyarrow.dataset
   1794     # module is not available
   1795     if filters is not None:

File ~/anaconda3/envs/matos/lib/python3.10/site-packages/pyarrow/parquet/core.py:1343, in ParquetDataset.__init__(self, path_or_paths, filesystem, schema, filters, read_dictionary, memory_map, buffer_size, partitioning, ignore_prefixes, pre_buffer, coerce_int96_timestamp_unit, decryption_properties, thrift_string_size_limit, thrift_container_size_limit, page_checksum_verification, use_legacy_dataset)
   1339 if single_file is not None:
   1340     fragment = parquet_format.make_fragment(single_file, filesystem)
   1342     self._dataset = ds.FileSystemDataset(
-> 1343         [fragment], schema=schema or fragment.physical_schema,
   1344         format=parquet_format,
   1345         filesystem=fragment.filesystem
   1346     )
   1347     return
   1349 # check partitioning to enable dictionary encoding

File ~/anaconda3/envs/matos/lib/python3.10/site-packages/pyarrow/_dataset.pyx:1367, in pyarrow._dataset.Fragment.physical_schema.__get__()

File ~/anaconda3/envs/matos/lib/python3.10/site-packages/pyarrow/error.pxi:154, in pyarrow.lib.pyarrow_internal_check_status()

File ~/anaconda3/envs/matos/lib/python3.10/site-packages/pyarrow/error.pxi:91, in pyarrow.lib.check_status()

ArrowInvalid: Could not open Parquet input source '<Buffer>': Parquet magic bytes not found in footer. Either the file is corrupted or this is not a parquet file.

Sign up or log in to comment