From 6747cc5e585d09652fde7174cc60971b96a20507 Mon Sep 17 00:00:00 2001 From: Himanshut4d Date: Fri, 31 Oct 2025 13:33:11 +0530 Subject: [PATCH 1/2] csv download on public dashboard working --- ddpui/api/charts_api.py | 122 ++++++++++++++++++++++------------------ ddpui/api/public_api.py | 65 ++++++++++++++++++++- 2 files changed, 132 insertions(+), 55 deletions(-) diff --git a/ddpui/api/charts_api.py b/ddpui/api/charts_api.py index 1eb63bd9..27684f17 100644 --- a/ddpui/api/charts_api.py +++ b/ddpui/api/charts_api.py @@ -787,76 +787,90 @@ def generate_map_chart_data(request, payload: ChartDataPayload): } -@charts_router.post("/download-csv/") -@has_permission(["can_view_charts"]) -def download_chart_data_csv(request, payload: ChartDataPayload): - """Stream and download chart data as CSV with all filters/aggregations applied""" +def stream_chart_data_csv(org_warehouse, payload: ChartDataPayload, page_size=5000): + """ + Common function to stream chart data as CSV - orguser: OrgUser = request.orguser + This function is used by both authenticated and public CSV download endpoints. + It generates CSV data in chunks by paginating through the chart data. - # Validate user has access to schema/table - if not has_schema_access(request, payload.schema_name): - raise HttpError(403, "Access to schema denied") + Args: + org_warehouse: OrgWarehouse instance for database connection + payload: ChartDataPayload containing chart configuration and filters + page_size: Number of rows to fetch per page (default 5000) - org_warehouse = OrgWarehouse.objects.filter(org=orguser.org).first() + Yields: + CSV data chunks as strings + """ + page = 0 + header_written = False + output = StringIO() - if not org_warehouse: - raise HttpError(404, "Please set up your warehouse first") + # Fetch first page + try: + preview_data = charts_service.get_chart_data_table_preview( + org_warehouse, payload, page=page, limit=page_size + ) + data = preview_data["data"] + columns = preview_data["columns"] - def stream_chart_data(org_warehouse, payload, page_size=5000): - """Generator that yields CSV data in chunks""" - page = 0 - header_written = False - output = StringIO() + if not columns: + logger.warning("No columns found in chart data") + return - # Fetch first page - try: - preview_data = charts_service.get_chart_data_table_preview( - org_warehouse, payload, page=page, limit=page_size - ) - data = preview_data["data"] - columns = preview_data["columns"] + # Create CSV writer and write headers immediately + writer = csv.DictWriter(output, fieldnames=columns) + writer.writeheader() + header_written = True + + # Yield header + yield output.getvalue() + output.truncate(0) + output.seek(0) - if not columns: - logger.warning("No columns found in chart data") - return + # Stream pages until no more data + while len(data) > 0: + logger.info(f"Streaming chart data page {page} with {len(data)} rows") - # Create CSV writer and write headers immediately - writer = csv.DictWriter(output, fieldnames=columns) - writer.writeheader() - header_written = True + for row in data: + writer.writerow(row) - # Yield header + # Yield current chunk yield output.getvalue() output.truncate(0) output.seek(0) - # Stream pages until no more data - while len(data) > 0: - logger.info(f"Streaming chart data page {page} with {len(data)} rows") + # Fetch next page + page += 1 + preview_data = charts_service.get_chart_data_table_preview( + org_warehouse, payload, page=page, limit=page_size + ) + data = preview_data["data"] - for row in data: - writer.writerow(row) + output.close() - # Yield current chunk - yield output.getvalue() - output.truncate(0) - output.seek(0) + except Exception as error: + logger.exception( + f"Error streaming chart data for schema {payload.schema_name}.{payload.table_name}: {str(error)}" + ) + raise HttpError(500, "Internal server error") - # Fetch next page - page += 1 - preview_data = charts_service.get_chart_data_table_preview( - org_warehouse, payload, page=page, limit=page_size - ) - data = preview_data["data"] - output.close() +@charts_router.post("/download-csv/") +@has_permission(["can_view_charts"]) +def download_chart_data_csv(request, payload: ChartDataPayload): + """Stream and download chart data as CSV with all filters/aggregations applied (authenticated)""" - except Exception as error: - logger.exception( - f"Error streaming chart data for schema {payload.schema_name}.{payload.table_name}: {str(error)}" - ) - raise HttpError(500, "Internal server error") + orguser: OrgUser = request.orguser + + # Validate user has access to schema/table + if not has_schema_access(request, payload.schema_name): + raise HttpError(403, "Access to schema denied") + + org_warehouse = OrgWarehouse.objects.filter(org=orguser.org).first() + + if not org_warehouse: + raise HttpError(404, "Please set up your warehouse first") # Generate filename from chart configuration chart_type = payload.chart_type or "chart" @@ -864,9 +878,9 @@ def stream_chart_data(org_warehouse, payload, page_size=5000): timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") filename = f"{chart_type}_{table_name}_{timestamp}.csv" - # Stream response + # Stream response using common function response = StreamingHttpResponse( - stream_chart_data(org_warehouse, payload, page_size=5000), + stream_chart_data_csv(org_warehouse, payload, page_size=5000), content_type="application/octet-stream", ) response["Content-Disposition"] = f'attachment; filename="{filename}"' diff --git a/ddpui/api/public_api.py b/ddpui/api/public_api.py index 97dc0615..896668f3 100644 --- a/ddpui/api/public_api.py +++ b/ddpui/api/public_api.py @@ -3,10 +3,13 @@ import json from typing import Optional, List import copy +from datetime import datetime from ninja import Router, Schema from django.utils import timezone from django.db.models import F +from django.http import StreamingHttpResponse +from ninja.errors import HttpError from ddpui.datainsights.warehouse.warehouse_factory import WarehouseFactory from ddpui.models.dashboard import Dashboard @@ -26,7 +29,7 @@ FilterPreviewResponse, FilterOptionResponse as AuthFilterOptionResponse, ) -from ddpui.schemas.chart_schema import ChartDataResponse +from ddpui.schemas.chart_schema import ChartDataResponse, ChartDataPayload logger = CustomLogger("ddpui") @@ -904,3 +907,63 @@ def get_region_geojsons_public(request, region_id: int): except Exception as e: logger.error(f"Public region geojsons error for {region_id}: {str(e)}") return [] + + +@public_router.post("/dashboards/{token}/charts/{chart_id}/download-csv/") +def download_public_chart_data_csv(request, token: str, chart_id: int, payload: ChartDataPayload): + """ + Stream and download chart data as CSV for public dashboards + + PURPOSE: Enables CSV export functionality on public dashboards without authentication. + This is essential for users viewing shared dashboards who want to download the data. + + Args: + token: Public dashboard share token + chart_id: Chart ID to export data from + payload: ChartDataPayload containing chart configuration and filters + + Returns: + StreamingHttpResponse with CSV data + """ + try: + # Verify dashboard is public and get organization + dashboard = Dashboard.objects.get(public_share_token=token, is_public=True) + + # Get the chart and verify it belongs to the dashboard's organization + chart = Chart.objects.filter(id=chart_id, org=dashboard.org).first() + if not chart: + raise HttpError(404, "Chart not found in dashboard's organization") + + # Get organization warehouse + org_warehouse = OrgWarehouse.objects.filter(org=dashboard.org).first() + if not org_warehouse: + raise HttpError(404, "No warehouse configured for organization") + + # Import the common CSV streaming function + from ddpui.api.charts_api import stream_chart_data_csv + + # Generate filename from chart configuration + chart_type = payload.chart_type or "chart" + table_name = payload.table_name or "data" + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + filename = f"{chart_type}_{table_name}_{timestamp}.csv" + + # Stream response using the same common function as authenticated endpoint + response = StreamingHttpResponse( + stream_chart_data_csv(org_warehouse, payload, page_size=5000), + content_type="application/octet-stream", + ) + response["Content-Disposition"] = f'attachment; filename="{filename}"' + + logger.info( + f"Public CSV download for chart {chart_id} on dashboard {dashboard.id} ({dashboard.title})" + ) + + return response + + except Dashboard.DoesNotExist: + logger.warning(f"Public CSV download failed - dashboard not found for token: {token}") + raise HttpError(404, "Dashboard not found or no longer public") + except Exception as e: + logger.error(f"Public CSV download error for chart {chart_id}: {str(e)}") + raise HttpError(500, f"CSV download failed: {str(e)}") From 250f8de8e8a0da8170765fa0896f088cd29be264 Mon Sep 17 00:00:00 2001 From: Himanshut4d Date: Fri, 31 Oct 2025 13:45:29 +0530 Subject: [PATCH 2/2] cr-suggestions --- ddpui/api/charts_api.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/ddpui/api/charts_api.py b/ddpui/api/charts_api.py index 27684f17..6023c4dc 100644 --- a/ddpui/api/charts_api.py +++ b/ddpui/api/charts_api.py @@ -803,11 +803,10 @@ def stream_chart_data_csv(org_warehouse, payload: ChartDataPayload, page_size=50 CSV data chunks as strings """ page = 0 - header_written = False output = StringIO() - # Fetch first page try: + # Fetch first page preview_data = charts_service.get_chart_data_table_preview( org_warehouse, payload, page=page, limit=page_size ) @@ -821,7 +820,6 @@ def stream_chart_data_csv(org_warehouse, payload: ChartDataPayload, page_size=50 # Create CSV writer and write headers immediately writer = csv.DictWriter(output, fieldnames=columns) writer.writeheader() - header_written = True # Yield header yield output.getvalue() @@ -847,13 +845,13 @@ def stream_chart_data_csv(org_warehouse, payload: ChartDataPayload, page_size=50 ) data = preview_data["data"] - output.close() - except Exception as error: logger.exception( - f"Error streaming chart data for schema {payload.schema_name}.{payload.table_name}: {str(error)}" + f"Error streaming chart data for schema {payload.schema_name}.{payload.table_name}" ) - raise HttpError(500, "Internal server error") + raise HttpError(500, "Internal server error") from error + finally: + output.close() @charts_router.post("/download-csv/")