Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
120 changes: 66 additions & 54 deletions ddpui/api/charts_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -787,86 +787,98 @@ def generate_map_chart_data(request, payload: ChartDataPayload):
}


@charts_router.post("/download-csv/")
@has_permission(["can_view_charts"])
def download_chart_data_csv(request, payload: ChartDataPayload):
"""Stream and download chart data as CSV with all filters/aggregations applied"""
def stream_chart_data_csv(org_warehouse, payload: ChartDataPayload, page_size=5000):
"""
Common function to stream chart data as CSV

orguser: OrgUser = request.orguser
This function is used by both authenticated and public CSV download endpoints.
It generates CSV data in chunks by paginating through the chart data.

# Validate user has access to schema/table
if not has_schema_access(request, payload.schema_name):
raise HttpError(403, "Access to schema denied")
Args:
org_warehouse: OrgWarehouse instance for database connection
payload: ChartDataPayload containing chart configuration and filters
page_size: Number of rows to fetch per page (default 5000)

org_warehouse = OrgWarehouse.objects.filter(org=orguser.org).first()
Yields:
CSV data chunks as strings
"""
page = 0
output = StringIO()

if not org_warehouse:
raise HttpError(404, "Please set up your warehouse first")
try:
# Fetch first page
preview_data = charts_service.get_chart_data_table_preview(
org_warehouse, payload, page=page, limit=page_size
)
data = preview_data["data"]
columns = preview_data["columns"]

def stream_chart_data(org_warehouse, payload, page_size=5000):
"""Generator that yields CSV data in chunks"""
page = 0
header_written = False
output = StringIO()
if not columns:
logger.warning("No columns found in chart data")
return

# Fetch first page
try:
preview_data = charts_service.get_chart_data_table_preview(
org_warehouse, payload, page=page, limit=page_size
)
data = preview_data["data"]
columns = preview_data["columns"]
# Create CSV writer and write headers immediately
writer = csv.DictWriter(output, fieldnames=columns)
writer.writeheader()

# Yield header
yield output.getvalue()
output.truncate(0)
output.seek(0)

if not columns:
logger.warning("No columns found in chart data")
return
# Stream pages until no more data
while len(data) > 0:
logger.info(f"Streaming chart data page {page} with {len(data)} rows")

# Create CSV writer and write headers immediately
writer = csv.DictWriter(output, fieldnames=columns)
writer.writeheader()
header_written = True
for row in data:
writer.writerow(row)

# Yield header
# Yield current chunk
yield output.getvalue()
output.truncate(0)
output.seek(0)

# Stream pages until no more data
while len(data) > 0:
logger.info(f"Streaming chart data page {page} with {len(data)} rows")
# Fetch next page
page += 1
preview_data = charts_service.get_chart_data_table_preview(
org_warehouse, payload, page=page, limit=page_size
)
data = preview_data["data"]

for row in data:
writer.writerow(row)
except Exception as error:
logger.exception(
f"Error streaming chart data for schema {payload.schema_name}.{payload.table_name}"
)
raise HttpError(500, "Internal server error") from error
finally:
output.close()

# Yield current chunk
yield output.getvalue()
output.truncate(0)
output.seek(0)

# Fetch next page
page += 1
preview_data = charts_service.get_chart_data_table_preview(
org_warehouse, payload, page=page, limit=page_size
)
data = preview_data["data"]
@charts_router.post("/download-csv/")
@has_permission(["can_view_charts"])
def download_chart_data_csv(request, payload: ChartDataPayload):
"""Stream and download chart data as CSV with all filters/aggregations applied (authenticated)"""

output.close()
orguser: OrgUser = request.orguser

except Exception as error:
logger.exception(
f"Error streaming chart data for schema {payload.schema_name}.{payload.table_name}: {str(error)}"
)
raise HttpError(500, "Internal server error")
# Validate user has access to schema/table
if not has_schema_access(request, payload.schema_name):
raise HttpError(403, "Access to schema denied")

org_warehouse = OrgWarehouse.objects.filter(org=orguser.org).first()

if not org_warehouse:
raise HttpError(404, "Please set up your warehouse first")

# Generate filename from chart configuration
chart_type = payload.chart_type or "chart"
table_name = payload.table_name or "data"
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"{chart_type}_{table_name}_{timestamp}.csv"

# Stream response
# Stream response using common function
response = StreamingHttpResponse(
stream_chart_data(org_warehouse, payload, page_size=5000),
stream_chart_data_csv(org_warehouse, payload, page_size=5000),
content_type="application/octet-stream",
)
response["Content-Disposition"] = f'attachment; filename="{filename}"'
Expand Down
65 changes: 64 additions & 1 deletion ddpui/api/public_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,13 @@
import json
from typing import Optional, List
import copy
from datetime import datetime

from ninja import Router, Schema
from django.utils import timezone
from django.db.models import F
from django.http import StreamingHttpResponse
from ninja.errors import HttpError

from ddpui.datainsights.warehouse.warehouse_factory import WarehouseFactory
from ddpui.models.dashboard import Dashboard
Expand All @@ -26,7 +29,7 @@
FilterPreviewResponse,
FilterOptionResponse as AuthFilterOptionResponse,
)
from ddpui.schemas.chart_schema import ChartDataResponse
from ddpui.schemas.chart_schema import ChartDataResponse, ChartDataPayload

logger = CustomLogger("ddpui")

Expand Down Expand Up @@ -904,3 +907,63 @@ def get_region_geojsons_public(request, region_id: int):
except Exception as e:
logger.error(f"Public region geojsons error for {region_id}: {str(e)}")
return []


@public_router.post("/dashboards/{token}/charts/{chart_id}/download-csv/")
def download_public_chart_data_csv(request, token: str, chart_id: int, payload: ChartDataPayload):
"""
Stream and download chart data as CSV for public dashboards

PURPOSE: Enables CSV export functionality on public dashboards without authentication.
This is essential for users viewing shared dashboards who want to download the data.

Args:
token: Public dashboard share token
chart_id: Chart ID to export data from
payload: ChartDataPayload containing chart configuration and filters

Returns:
StreamingHttpResponse with CSV data
"""
try:
# Verify dashboard is public and get organization
dashboard = Dashboard.objects.get(public_share_token=token, is_public=True)

# Get the chart and verify it belongs to the dashboard's organization
chart = Chart.objects.filter(id=chart_id, org=dashboard.org).first()
if not chart:
raise HttpError(404, "Chart not found in dashboard's organization")

# Get organization warehouse
org_warehouse = OrgWarehouse.objects.filter(org=dashboard.org).first()
if not org_warehouse:
raise HttpError(404, "No warehouse configured for organization")

# Import the common CSV streaming function
from ddpui.api.charts_api import stream_chart_data_csv

# Generate filename from chart configuration
chart_type = payload.chart_type or "chart"
table_name = payload.table_name or "data"
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"{chart_type}_{table_name}_{timestamp}.csv"

# Stream response using the same common function as authenticated endpoint
response = StreamingHttpResponse(
stream_chart_data_csv(org_warehouse, payload, page_size=5000),
content_type="application/octet-stream",
)
response["Content-Disposition"] = f'attachment; filename="{filename}"'

logger.info(
f"Public CSV download for chart {chart_id} on dashboard {dashboard.id} ({dashboard.title})"
)

return response

except Dashboard.DoesNotExist:
logger.warning(f"Public CSV download failed - dashboard not found for token: {token}")
raise HttpError(404, "Dashboard not found or no longer public")
except Exception as e:
logger.error(f"Public CSV download error for chart {chart_id}: {str(e)}")
raise HttpError(500, f"CSV download failed: {str(e)}")
Loading