__init__.py 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129
  1. import struct
  2. import thrift
  3. import logging
  4. from ttypes import FileMetaData, CompressionCodec, Encoding, PageHeader, PageType, Type
  5. from thrift.protocol import TCompactProtocol
  6. from thrift.transport import TTransport
  7. logger = logging.getLogger("parquet")
  8. class ParquetFormatException(Exception):
  9. pass
  10. def _check_header_magic_bytes(fo):
  11. """Returns true if the file-like obj has the PAR1 magic bytes at the header"""
  12. fo.seek(0, 0)
  13. magic = fo.read(4)
  14. return magic == 'PAR1'
  15. def _check_footer_magic_bytes(fo):
  16. """Returns true if the file-like obj has the PAR1 magic bytes at the footer"""
  17. fo.seek(-4, 2) # seek to four bytes from the end of the file
  18. magic = fo.read(4)
  19. return magic == 'PAR1'
  20. def _get_footer_size(fo):
  21. """Readers the footer size in bytes, which is serialized as little endian"""
  22. fo.seek(-8, 2)
  23. tup = struct.unpack("<i", fo.read(4))
  24. return tup[0]
  25. def _read_footer(fo):
  26. """Reads the footer from the given file object, returning a FileMetaData object. This method
  27. assumes that the fo references a valid parquet file"""
  28. footer_size = _get_footer_size(fo)
  29. logger.debug("Footer size in bytes: %s", footer_size)
  30. fo.seek(-(8+footer_size), 2) # seek to beginning of footer
  31. tin = TTransport.TFileObjectTransport(fo)
  32. pin = TCompactProtocol.TCompactProtocol(tin)
  33. fmd = FileMetaData()
  34. fmd.read(pin)
  35. return fmd
  36. def _read_page_header(fo, offset):
  37. """Reads the page_header at the given offset"""
  38. fo.seek(offset, 0)
  39. tin = TTransport.TFileObjectTransport(fo)
  40. pin = TCompactProtocol.TCompactProtocol(tin)
  41. ph = PageHeader()
  42. ph.read(pin)
  43. return ph
  44. def read_footer(filename):
  45. """Reads and returns the FileMetaData object for the given file."""
  46. with open(filename, 'rb') as fo:
  47. if not _check_header_magic_bytes(fo) or not _check_footer_magic_bytes(fo):
  48. raise ParquetFormatException("{0} is not a valid parquet file (missing magic bytes)".format(filename))
  49. return _read_footer(fo)
  50. def dump_metadata(filename):
  51. footer = read_footer(filename)
  52. print("File: {0}".format(filename))
  53. print(" version: {0}".format(footer.version))
  54. print(" num rows: {0}".format(footer.num_rows))
  55. print(" k/v metadata: ")
  56. if footer.key_value_metadata and len(footer.key_value_metadata) > 0:
  57. for kv in footer.key_value_metadata:
  58. print(" {0}={1}".format(kv.key, kv.value))
  59. else:
  60. print(" (none)")
  61. print(" schema: ")
  62. for se in footer.schema:
  63. print(" {name} ({type}): length={type_length}, repetition={repetition_type}, children={num_children}, converted_type={converted_type}".format(
  64. name=se.name, type=Type._VALUES_TO_NAMES[se.type] if se.type else None, type_length=se.type_length, repetition_type=se.repetition_type,
  65. num_children=se.num_children, converted_type=se.converted_type))
  66. print(" row groups: ")
  67. for rg in footer.row_groups:
  68. num_rows = rg.num_rows
  69. bytes = rg.total_byte_size
  70. print(" rows={num_rows}, bytes={bytes}".format(num_rows=num_rows, bytes=bytes))
  71. print(" chunks:")
  72. for cg in rg.columns:
  73. cmd = cg.meta_data
  74. print(" type={type} file_offset={offset} compression={codec} "
  75. "encodings={encodings} path_in_schema={path_in_schema} "
  76. "num_values={num_values} uncompressed_bytes={raw_bytes} "
  77. "compressed_bytes={compressed_bytes} data_page_offset={data_page_offset} "
  78. "dictionary_page_offset={dictionary_page_offset}".format(
  79. type=cmd.type, offset=cg.file_offset, codec=CompressionCodec._VALUES_TO_NAMES[cmd.codec],
  80. encodings=",".join([Encoding._VALUES_TO_NAMES[s] for s in cmd.encodings]),
  81. path_in_schema=cmd.path_in_schema, num_values=cmd.num_values,
  82. raw_bytes=cmd.total_uncompressed_size, compressed_bytes=cmd.total_compressed_size,
  83. data_page_offset=cmd.data_page_offset, dictionary_page_offset=cmd.dictionary_page_offset
  84. ))
  85. with open(filename, 'rb') as fo:
  86. offset = cmd.data_page_offset if (cmd.dictionary_page_offset is None or cmd.data_page_offset < cmd.dictionary_page_offset) else cmd.dictionary_page_offset
  87. values_read = 0
  88. print(" pages: ")
  89. while values_read < num_rows:
  90. ph = _read_page_header(fo, offset)
  91. daph = ph.data_page_header
  92. diph = ph.dictionary_page_header
  93. type_ = PageType._VALUES_TO_NAMES[ph.type] if ph.type else None
  94. raw_bytes = ph.uncompressed_page_size
  95. num_values = None
  96. if ph.type == PageType.DATA_PAGE:
  97. num_values = daph.num_values
  98. values_read += num_values
  99. if ph.type == PageType.DICTIONARY_PAGE:
  100. num_values = diph.num_values
  101. encoding = None
  102. def_level_encoding = None
  103. rep_level_encoding = None
  104. if daph:
  105. encoding = Encoding._VALUES_TO_NAMES[daph.encoding]
  106. def_level_encoding = Encoding._VALUES_TO_NAMES[daph.definition_level_encoding]
  107. rep_level_encoding = Encoding._VALUES_TO_NAMES[daph.repetition_level_encoding]
  108. print(" page header: type={type} uncompressed_size={raw_bytes} "
  109. "num_values={num_values} encoding={encoding} "
  110. "def_level_encoding={def_level_encoding} "
  111. "rep_level_encoding={rep_level_encoding}".format(
  112. type=type_, raw_bytes=raw_bytes, num_values=num_values,
  113. encoding=encoding, def_level_encoding=def_level_encoding,
  114. rep_level_encoding=rep_level_encoding))