Skip to content

Commit f66e388

Browse files
committed
Merge remote-tracking branch 'origin/main'
2 parents 91937c7 + 77f5ca2 commit f66e388

File tree

2 files changed

+130
-55
lines changed

2 files changed

+130
-55
lines changed

ddpui/api/charts_api.py

Lines changed: 66 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -787,86 +787,98 @@ def generate_map_chart_data(request, payload: ChartDataPayload):
787787
}
788788

789789

790-
@charts_router.post("/download-csv/")
791-
@has_permission(["can_view_charts"])
792-
def download_chart_data_csv(request, payload: ChartDataPayload):
793-
"""Stream and download chart data as CSV with all filters/aggregations applied"""
790+
def stream_chart_data_csv(org_warehouse, payload: ChartDataPayload, page_size=5000):
791+
"""
792+
Common function to stream chart data as CSV
794793
795-
orguser: OrgUser = request.orguser
794+
This function is used by both authenticated and public CSV download endpoints.
795+
It generates CSV data in chunks by paginating through the chart data.
796796
797-
# Validate user has access to schema/table
798-
if not has_schema_access(request, payload.schema_name):
799-
raise HttpError(403, "Access to schema denied")
797+
Args:
798+
org_warehouse: OrgWarehouse instance for database connection
799+
payload: ChartDataPayload containing chart configuration and filters
800+
page_size: Number of rows to fetch per page (default 5000)
800801
801-
org_warehouse = OrgWarehouse.objects.filter(org=orguser.org).first()
802+
Yields:
803+
CSV data chunks as strings
804+
"""
805+
page = 0
806+
output = StringIO()
802807

803-
if not org_warehouse:
804-
raise HttpError(404, "Please set up your warehouse first")
808+
try:
809+
# Fetch first page
810+
preview_data = charts_service.get_chart_data_table_preview(
811+
org_warehouse, payload, page=page, limit=page_size
812+
)
813+
data = preview_data["data"]
814+
columns = preview_data["columns"]
805815

806-
def stream_chart_data(org_warehouse, payload, page_size=5000):
807-
"""Generator that yields CSV data in chunks"""
808-
page = 0
809-
header_written = False
810-
output = StringIO()
816+
if not columns:
817+
logger.warning("No columns found in chart data")
818+
return
811819

812-
# Fetch first page
813-
try:
814-
preview_data = charts_service.get_chart_data_table_preview(
815-
org_warehouse, payload, page=page, limit=page_size
816-
)
817-
data = preview_data["data"]
818-
columns = preview_data["columns"]
820+
# Create CSV writer and write headers immediately
821+
writer = csv.DictWriter(output, fieldnames=columns)
822+
writer.writeheader()
823+
824+
# Yield header
825+
yield output.getvalue()
826+
output.truncate(0)
827+
output.seek(0)
819828

820-
if not columns:
821-
logger.warning("No columns found in chart data")
822-
return
829+
# Stream pages until no more data
830+
while len(data) > 0:
831+
logger.info(f"Streaming chart data page {page} with {len(data)} rows")
823832

824-
# Create CSV writer and write headers immediately
825-
writer = csv.DictWriter(output, fieldnames=columns)
826-
writer.writeheader()
827-
header_written = True
833+
for row in data:
834+
writer.writerow(row)
828835

829-
# Yield header
836+
# Yield current chunk
830837
yield output.getvalue()
831838
output.truncate(0)
832839
output.seek(0)
833840

834-
# Stream pages until no more data
835-
while len(data) > 0:
836-
logger.info(f"Streaming chart data page {page} with {len(data)} rows")
841+
# Fetch next page
842+
page += 1
843+
preview_data = charts_service.get_chart_data_table_preview(
844+
org_warehouse, payload, page=page, limit=page_size
845+
)
846+
data = preview_data["data"]
837847

838-
for row in data:
839-
writer.writerow(row)
848+
except Exception as error:
849+
logger.exception(
850+
f"Error streaming chart data for schema {payload.schema_name}.{payload.table_name}"
851+
)
852+
raise HttpError(500, "Internal server error") from error
853+
finally:
854+
output.close()
840855

841-
# Yield current chunk
842-
yield output.getvalue()
843-
output.truncate(0)
844-
output.seek(0)
845856

846-
# Fetch next page
847-
page += 1
848-
preview_data = charts_service.get_chart_data_table_preview(
849-
org_warehouse, payload, page=page, limit=page_size
850-
)
851-
data = preview_data["data"]
857+
@charts_router.post("/download-csv/")
858+
@has_permission(["can_view_charts"])
859+
def download_chart_data_csv(request, payload: ChartDataPayload):
860+
"""Stream and download chart data as CSV with all filters/aggregations applied (authenticated)"""
852861

853-
output.close()
862+
orguser: OrgUser = request.orguser
854863

855-
except Exception as error:
856-
logger.exception(
857-
f"Error streaming chart data for schema {payload.schema_name}.{payload.table_name}: {str(error)}"
858-
)
859-
raise HttpError(500, "Internal server error")
864+
# Validate user has access to schema/table
865+
if not has_schema_access(request, payload.schema_name):
866+
raise HttpError(403, "Access to schema denied")
867+
868+
org_warehouse = OrgWarehouse.objects.filter(org=orguser.org).first()
869+
870+
if not org_warehouse:
871+
raise HttpError(404, "Please set up your warehouse first")
860872

861873
# Generate filename from chart configuration
862874
chart_type = payload.chart_type or "chart"
863875
table_name = payload.table_name or "data"
864876
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
865877
filename = f"{chart_type}_{table_name}_{timestamp}.csv"
866878

867-
# Stream response
879+
# Stream response using common function
868880
response = StreamingHttpResponse(
869-
stream_chart_data(org_warehouse, payload, page_size=5000),
881+
stream_chart_data_csv(org_warehouse, payload, page_size=5000),
870882
content_type="application/octet-stream",
871883
)
872884
response["Content-Disposition"] = f'attachment; filename="{filename}"'

ddpui/api/public_api.py

Lines changed: 64 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,13 @@
33
import json
44
from typing import Optional, List
55
import copy
6+
from datetime import datetime
67

78
from ninja import Router, Schema
89
from django.utils import timezone
910
from django.db.models import F
11+
from django.http import StreamingHttpResponse
12+
from ninja.errors import HttpError
1013

1114
from ddpui.datainsights.warehouse.warehouse_factory import WarehouseFactory
1215
from ddpui.models.dashboard import Dashboard
@@ -26,7 +29,7 @@
2629
FilterPreviewResponse,
2730
FilterOptionResponse as AuthFilterOptionResponse,
2831
)
29-
from ddpui.schemas.chart_schema import ChartDataResponse
32+
from ddpui.schemas.chart_schema import ChartDataResponse, ChartDataPayload
3033

3134
logger = CustomLogger("ddpui")
3235

@@ -903,3 +906,63 @@ def get_region_geojsons_public(request, region_id: int):
903906
except Exception as e:
904907
logger.error(f"Public region geojsons error for {region_id}: {str(e)}")
905908
return []
909+
910+
911+
@public_router.post("/dashboards/{token}/charts/{chart_id}/download-csv/")
912+
def download_public_chart_data_csv(request, token: str, chart_id: int, payload: ChartDataPayload):
913+
"""
914+
Stream and download chart data as CSV for public dashboards
915+
916+
PURPOSE: Enables CSV export functionality on public dashboards without authentication.
917+
This is essential for users viewing shared dashboards who want to download the data.
918+
919+
Args:
920+
token: Public dashboard share token
921+
chart_id: Chart ID to export data from
922+
payload: ChartDataPayload containing chart configuration and filters
923+
924+
Returns:
925+
StreamingHttpResponse with CSV data
926+
"""
927+
try:
928+
# Verify dashboard is public and get organization
929+
dashboard = Dashboard.objects.get(public_share_token=token, is_public=True)
930+
931+
# Get the chart and verify it belongs to the dashboard's organization
932+
chart = Chart.objects.filter(id=chart_id, org=dashboard.org).first()
933+
if not chart:
934+
raise HttpError(404, "Chart not found in dashboard's organization")
935+
936+
# Get organization warehouse
937+
org_warehouse = OrgWarehouse.objects.filter(org=dashboard.org).first()
938+
if not org_warehouse:
939+
raise HttpError(404, "No warehouse configured for organization")
940+
941+
# Import the common CSV streaming function
942+
from ddpui.api.charts_api import stream_chart_data_csv
943+
944+
# Generate filename from chart configuration
945+
chart_type = payload.chart_type or "chart"
946+
table_name = payload.table_name or "data"
947+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
948+
filename = f"{chart_type}_{table_name}_{timestamp}.csv"
949+
950+
# Stream response using the same common function as authenticated endpoint
951+
response = StreamingHttpResponse(
952+
stream_chart_data_csv(org_warehouse, payload, page_size=5000),
953+
content_type="application/octet-stream",
954+
)
955+
response["Content-Disposition"] = f'attachment; filename="{filename}"'
956+
957+
logger.info(
958+
f"Public CSV download for chart {chart_id} on dashboard {dashboard.id} ({dashboard.title})"
959+
)
960+
961+
return response
962+
963+
except Dashboard.DoesNotExist:
964+
logger.warning(f"Public CSV download failed - dashboard not found for token: {token}")
965+
raise HttpError(404, "Dashboard not found or no longer public")
966+
except Exception as e:
967+
logger.error(f"Public CSV download error for chart {chart_id}: {str(e)}")
968+
raise HttpError(500, f"CSV download failed: {str(e)}")

0 commit comments

Comments
 (0)