-
Notifications
You must be signed in to change notification settings - Fork 0
/
ParquetToPandasDataFrame.py
47 lines (40 loc) · 1.53 KB
/
ParquetToPandasDataFrame.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import boto3
import pandas as pd
import logging
from botocore.exceptions import ClientError
import io
bucket_name = "webpresence-geocore-geojson-to-parquet-dev"
file_name = "records.parquet"
pd.set_option('max_columns', None)
def open_S3_file_as_df(bucket_name, file_name):
"""Open a S3 parquet file from bucket and filename and return the parquet as pandas dataframe
:param bucket_name: Bucket name
:param file_name: Specific file name to open
:return: body of the file as a string
"""
try:
s3 = boto3.resource('s3')
object = s3.Object(bucket_name, file_name)
body = object.get()['Body'].read()
df = pd.read_parquet(io.BytesIO(body))
print(f'Loading {file_name} from {bucket_name} to pandas dataframe')
# print(df['error'].to_string(index=False))
# print all of the column names
print(df.columns.tolist())
last_column = df.iloc[:, -1]
print(last_column)
return df
except ClientError as e:
logging.error(e)
return e
open_S3_file_as_df(bucket_name, file_name)
'''
# Add a new column to log the process, and loop through the pandas rows to assign values
df['process_log'] = 'error'
## Loop through the DataFrame and update the new column based on processing condition 'Fail' or 'Success'
for index, row in df.iterrows():
if Transformed == True:
df.at[index, 'process_log'] = 'Success' # or 1
else:
df.at[index, 'process_log'] = 'Fail' # or 0
'''