FastAPI – return contents of csv file as plain text
Question:
Wondering if anyone can help me out please.
In FastAPI I want to set up an endpoint which returns the contents of a generated csv file as plain text. I don’t want the file to be downloaded.
I’ve tried the following which works just fine however a file download is always initiated.
@app.get('/hosts/last_hour')
def hosts_last_hour():
epoch_start=time.mktime(datetime.now().timetuple())-3600
epoch_end=time.mktime(datetime.now().timetuple())
process_hosts(epoch_start,epoch_end)
def iterate_csv(epoch_start):
with open(f'output/hosts_traffic_{int(epoch_start)}.csv',mode='rb') as csv_file:
yield from csv_file
response = StreamingResponse(iterate_csv(epoch_start), media_type="text/csv")
return(response)
I need the contents of the file to be sent in the response body as text/csv (don’t want a downloaded to be initiated and don’t want the response in json format). Any ideas how to achieve this?
Thanks in advance.
José
Answers:
This will be easy using pandas:
Juast read the csv into a DataFrame using pd.read_csv and then turn the DataFrame into a dict using the to_dict() function.
Do note that pandas DataFrames can cause memory issues.
import pandas as pd
@app.get('/hosts/last_hour')
def hosts_last_hour():
df_data = pd.read_csv("SampleCSVFile_11kb.csv")
dict_data = df_data.to_dict()
return dict_data
If you don’t want to use pandas then do this:
import csv
import json
@app.get('/hosts/last_hour')
def hosts_last_hour():
data_dict = []
with open("data.csv", encoding = 'utf-8') as csv_file_handler:
csv_reader = csv.DictReader(csv_file_handler)
for rows in csv_reader:
data_dict.append(rows)
return json.dumps(data_dict)
In case anyone is looking for a similar solution, here’s what worked for me (see this answer for more details):
# 1 MB chunks
CHUNK_SIZE = 1024 * 1024
@app.get('/hosts/')
async def hosts():
tenant=TENANT
# Get data for the last hour
epoch_start=time.mktime(datetime.now().timetuple())-MAX_QUERY_PERIOD
epoch_end=time.mktime(datetime.now().timetuple())
process_hosts(epoch_start,epoch_end)
async def iter_file():
async with aiofiles.open(f'output/hosts_traffic_{int(epoch_start)}.csv',mode='rb') as csv_file:
while chunk := await csv_file.read(CHUNK_SIZE):
yield chunk
headers = {'Content-Disposition': 'inline'}
response = StreamingResponse(iter_file(), media_type="text/csv",headers=headers)
return response
Thanks!
Wondering if anyone can help me out please.
In FastAPI I want to set up an endpoint which returns the contents of a generated csv file as plain text. I don’t want the file to be downloaded.
I’ve tried the following which works just fine however a file download is always initiated.
@app.get('/hosts/last_hour')
def hosts_last_hour():
epoch_start=time.mktime(datetime.now().timetuple())-3600
epoch_end=time.mktime(datetime.now().timetuple())
process_hosts(epoch_start,epoch_end)
def iterate_csv(epoch_start):
with open(f'output/hosts_traffic_{int(epoch_start)}.csv',mode='rb') as csv_file:
yield from csv_file
response = StreamingResponse(iterate_csv(epoch_start), media_type="text/csv")
return(response)
I need the contents of the file to be sent in the response body as text/csv (don’t want a downloaded to be initiated and don’t want the response in json format). Any ideas how to achieve this?
Thanks in advance.
José
This will be easy using pandas:
Juast read the csv into a DataFrame using pd.read_csv and then turn the DataFrame into a dict using the to_dict() function.
Do note that pandas DataFrames can cause memory issues.
import pandas as pd
@app.get('/hosts/last_hour')
def hosts_last_hour():
df_data = pd.read_csv("SampleCSVFile_11kb.csv")
dict_data = df_data.to_dict()
return dict_data
If you don’t want to use pandas then do this:
import csv
import json
@app.get('/hosts/last_hour')
def hosts_last_hour():
data_dict = []
with open("data.csv", encoding = 'utf-8') as csv_file_handler:
csv_reader = csv.DictReader(csv_file_handler)
for rows in csv_reader:
data_dict.append(rows)
return json.dumps(data_dict)
In case anyone is looking for a similar solution, here’s what worked for me (see this answer for more details):
# 1 MB chunks
CHUNK_SIZE = 1024 * 1024
@app.get('/hosts/')
async def hosts():
tenant=TENANT
# Get data for the last hour
epoch_start=time.mktime(datetime.now().timetuple())-MAX_QUERY_PERIOD
epoch_end=time.mktime(datetime.now().timetuple())
process_hosts(epoch_start,epoch_end)
async def iter_file():
async with aiofiles.open(f'output/hosts_traffic_{int(epoch_start)}.csv',mode='rb') as csv_file:
while chunk := await csv_file.read(CHUNK_SIZE):
yield chunk
headers = {'Content-Disposition': 'inline'}
response = StreamingResponse(iter_file(), media_type="text/csv",headers=headers)
return response
Thanks!