Skip to content

Commit 0328990

Browse files
Merge pull request #1194 from DalgoT4D/download-csv-of-chart-data
endpoint to download csv data for the chart
2 parents 0c3363d + f4432ca commit 0328990

File tree

1 file changed

+90
-1
lines changed

1 file changed

+90
-1
lines changed

ddpui/api/charts_api.py

Lines changed: 90 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,14 @@
22

33
from typing import Optional, List, Dict, Any
44
import copy
5+
import csv
56
from datetime import datetime
7+
from io import StringIO
68

79
from ninja import Router, Schema, Field
810
from ninja.errors import HttpError
911
from django.shortcuts import get_object_or_404
10-
from django.http import HttpResponse
12+
from django.http import HttpResponse, StreamingHttpResponse
1113

1214
from ddpui.auth import has_permission
1315
from ddpui.models.org_user import OrgUser
@@ -785,6 +787,93 @@ def generate_map_chart_data(request, payload: ChartDataPayload):
785787
}
786788

787789

790+
@charts_router.post("/download-csv/")
791+
@has_permission(["can_view_charts"])
792+
def download_chart_data_csv(request, payload: ChartDataPayload):
793+
"""Stream and download chart data as CSV with all filters/aggregations applied"""
794+
795+
orguser: OrgUser = request.orguser
796+
797+
# Validate user has access to schema/table
798+
if not has_schema_access(request, payload.schema_name):
799+
raise HttpError(403, "Access to schema denied")
800+
801+
org_warehouse = OrgWarehouse.objects.filter(org=orguser.org).first()
802+
803+
if not org_warehouse:
804+
raise HttpError(404, "Please set up your warehouse first")
805+
806+
def stream_chart_data(org_warehouse, payload, page_size=5000):
807+
"""Generator that yields CSV data in chunks"""
808+
page = 0
809+
header_written = False
810+
output = StringIO()
811+
812+
# Fetch first page
813+
try:
814+
preview_data = charts_service.get_chart_data_table_preview(
815+
org_warehouse, payload, page=page, limit=page_size
816+
)
817+
data = preview_data["data"]
818+
columns = preview_data["columns"]
819+
820+
if not columns:
821+
logger.warning("No columns found in chart data")
822+
return
823+
824+
# Create CSV writer and write headers immediately
825+
writer = csv.DictWriter(output, fieldnames=columns)
826+
writer.writeheader()
827+
header_written = True
828+
829+
# Yield header
830+
yield output.getvalue()
831+
output.truncate(0)
832+
output.seek(0)
833+
834+
# Stream pages until no more data
835+
while len(data) > 0:
836+
logger.info(f"Streaming chart data page {page} with {len(data)} rows")
837+
838+
for row in data:
839+
writer.writerow(row)
840+
841+
# Yield current chunk
842+
yield output.getvalue()
843+
output.truncate(0)
844+
output.seek(0)
845+
846+
# Fetch next page
847+
page += 1
848+
preview_data = charts_service.get_chart_data_table_preview(
849+
org_warehouse, payload, page=page, limit=page_size
850+
)
851+
data = preview_data["data"]
852+
853+
output.close()
854+
855+
except Exception as error:
856+
logger.exception(
857+
f"Error streaming chart data for schema {payload.schema_name}.{payload.table_name}: {str(error)}"
858+
)
859+
raise HttpError(500, "Internal server error")
860+
861+
# Generate filename from chart configuration
862+
chart_type = payload.chart_type or "chart"
863+
table_name = payload.table_name or "data"
864+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
865+
filename = f"{chart_type}_{table_name}_{timestamp}.csv"
866+
867+
# Stream response
868+
response = StreamingHttpResponse(
869+
stream_chart_data(org_warehouse, payload, page_size=5000),
870+
content_type="application/octet-stream",
871+
)
872+
response["Content-Disposition"] = f'attachment; filename="{filename}"'
873+
874+
return response
875+
876+
788877
@charts_router.get("/{chart_id}/", response=ChartResponse)
789878
@has_permission(["can_view_charts"])
790879
def get_chart(request, chart_id: int):

0 commit comments

Comments
 (0)