1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
#![allow(clippy::type_complexity)]
mod deserialize;
mod file;
mod row_group;
pub mod schema;
pub mod statistics;
use futures::{AsyncRead, AsyncSeek};
pub use parquet2::{
error::ParquetError,
fallible_streaming_iterator,
metadata::{ColumnChunkMetaData, ColumnDescriptor, RowGroupMetaData},
page::{CompressedDataPage, DataPage, DataPageHeader},
read::{
decompress, get_column_iterator, get_page_iterator as _get_page_iterator,
get_page_stream as _get_page_stream, read_metadata as _read_metadata,
read_metadata_async as _read_metadata_async, BasicDecompressor, ColumnChunkIter,
Decompressor, MutStreamingIterator, PageFilter, PageIterator, ReadColumnIterator, State,
},
schema::types::{
LogicalType, ParquetType, PhysicalType, PrimitiveConvertedType,
TimeUnit as ParquetTimeUnit, TimestampType,
},
types::int96_to_i64_ns,
FallibleStreamingIterator,
};
pub use deserialize::{column_iter_to_arrays, get_page_iterator};
pub use file::{FileReader, RowGroupReader};
pub use row_group::*;
pub(crate) use schema::is_type_nullable;
pub use schema::{infer_schema, FileMetaData};
use std::{
io::{Read, Seek},
sync::Arc,
};
use crate::{array::Array, error::Result};
pub trait DataPages:
FallibleStreamingIterator<Item = DataPage, Error = ParquetError> + Send + Sync
{
}
impl<I: FallibleStreamingIterator<Item = DataPage, Error = ParquetError> + Send + Sync> DataPages
for I
{
}
pub type ArrayIter<'a> = Box<dyn Iterator<Item = Result<Arc<dyn Array>>> + Send + Sync + 'a>;
pub fn read_metadata<R: Read + Seek>(reader: &mut R) -> Result<FileMetaData> {
Ok(_read_metadata(reader)?)
}
pub async fn read_metadata_async<R: AsyncRead + AsyncSeek + Send + Unpin>(
reader: &mut R,
) -> Result<FileMetaData> {
Ok(_read_metadata_async(reader).await?)
}