= scan_chronicle_metrics("./data", "2023/04/03").collect()
m
# create a temporary file
= tempfile.NamedTemporaryFile(suffix = ".parquet")
tf assert os.path.getsize(tf.name) == 0
= write_parquet(m, tf)
z
assert os.path.getsize(tf.name) > 0
assert z is None
io
File operations on chronicle parquet files
write_parquet
write_parquet (x:polars.dataframe.frame.DataFrame, filename:str)
Write chronicle data to parquet file
Type | Details | |
---|---|---|
x | DataFrame | polars DataFrame |
filename | str | Full file name |
Returns | None |
get_s3_bucket_dates
get_s3_bucket_dates (bucket:str, type='logs', version='v1')
Get a list of dates for which there are chronicle logs or metrics in an S3 bucket
Type | Default | Details | |
---|---|---|---|
bucket | str | S3 bucket name, without the “s3://” prefix | |
type | str | logs | “logs” or “metrics” |
version | str | v1 | “v1” or “v2” |
Returns | list |
= "colorado-posit-chronicle"
bucket "metrics") get_s3_bucket_dates(bucket,