@@ -787,86 +787,98 @@ def generate_map_chart_data(request, payload: ChartDataPayload):
787787 }
788788
789789
790- @charts_router .post ("/download-csv/" )
791- @has_permission (["can_view_charts" ])
792- def download_chart_data_csv (request , payload : ChartDataPayload ):
793- """Stream and download chart data as CSV with all filters/aggregations applied"""
790+ def stream_chart_data_csv (org_warehouse , payload : ChartDataPayload , page_size = 5000 ):
791+ """
792+ Common function to stream chart data as CSV
794793
795- orguser : OrgUser = request .orguser
794+ This function is used by both authenticated and public CSV download endpoints.
795+ It generates CSV data in chunks by paginating through the chart data.
796796
797- # Validate user has access to schema/table
798- if not has_schema_access (request , payload .schema_name ):
799- raise HttpError (403 , "Access to schema denied" )
797+ Args:
798+ org_warehouse: OrgWarehouse instance for database connection
799+ payload: ChartDataPayload containing chart configuration and filters
800+ page_size: Number of rows to fetch per page (default 5000)
800801
801- org_warehouse = OrgWarehouse .objects .filter (org = orguser .org ).first ()
802+ Yields:
803+ CSV data chunks as strings
804+ """
805+ page = 0
806+ output = StringIO ()
802807
803- if not org_warehouse :
804- raise HttpError (404 , "Please set up your warehouse first" )
808+ try :
809+ # Fetch first page
810+ preview_data = charts_service .get_chart_data_table_preview (
811+ org_warehouse , payload , page = page , limit = page_size
812+ )
813+ data = preview_data ["data" ]
814+ columns = preview_data ["columns" ]
805815
806- def stream_chart_data (org_warehouse , payload , page_size = 5000 ):
807- """Generator that yields CSV data in chunks"""
808- page = 0
809- header_written = False
810- output = StringIO ()
816+ if not columns :
817+ logger .warning ("No columns found in chart data" )
818+ return
811819
812- # Fetch first page
813- try :
814- preview_data = charts_service .get_chart_data_table_preview (
815- org_warehouse , payload , page = page , limit = page_size
816- )
817- data = preview_data ["data" ]
818- columns = preview_data ["columns" ]
820+ # Create CSV writer and write headers immediately
821+ writer = csv .DictWriter (output , fieldnames = columns )
822+ writer .writeheader ()
823+
824+ # Yield header
825+ yield output .getvalue ()
826+ output .truncate (0 )
827+ output .seek (0 )
819828
820- if not columns :
821- logger . warning ( "No columns found in chart data" )
822- return
829+ # Stream pages until no more data
830+ while len ( data ) > 0 :
831+ logger . info ( f"Streaming chart data page { page } with { len ( data ) } rows" )
823832
824- # Create CSV writer and write headers immediately
825- writer = csv .DictWriter (output , fieldnames = columns )
826- writer .writeheader ()
827- header_written = True
833+ for row in data :
834+ writer .writerow (row )
828835
829- # Yield header
836+ # Yield current chunk
830837 yield output .getvalue ()
831838 output .truncate (0 )
832839 output .seek (0 )
833840
834- # Stream pages until no more data
835- while len (data ) > 0 :
836- logger .info (f"Streaming chart data page { page } with { len (data )} rows" )
841+ # Fetch next page
842+ page += 1
843+ preview_data = charts_service .get_chart_data_table_preview (
844+ org_warehouse , payload , page = page , limit = page_size
845+ )
846+ data = preview_data ["data" ]
837847
838- for row in data :
839- writer .writerow (row )
848+ except Exception as error :
849+ logger .exception (
850+ f"Error streaming chart data for schema { payload .schema_name } .{ payload .table_name } "
851+ )
852+ raise HttpError (500 , "Internal server error" ) from error
853+ finally :
854+ output .close ()
840855
841- # Yield current chunk
842- yield output .getvalue ()
843- output .truncate (0 )
844- output .seek (0 )
845856
846- # Fetch next page
847- page += 1
848- preview_data = charts_service .get_chart_data_table_preview (
849- org_warehouse , payload , page = page , limit = page_size
850- )
851- data = preview_data ["data" ]
857+ @charts_router .post ("/download-csv/" )
858+ @has_permission (["can_view_charts" ])
859+ def download_chart_data_csv (request , payload : ChartDataPayload ):
860+ """Stream and download chart data as CSV with all filters/aggregations applied (authenticated)"""
852861
853- output . close ()
862+ orguser : OrgUser = request . orguser
854863
855- except Exception as error :
856- logger .exception (
857- f"Error streaming chart data for schema { payload .schema_name } .{ payload .table_name } : { str (error )} "
858- )
859- raise HttpError (500 , "Internal server error" )
864+ # Validate user has access to schema/table
865+ if not has_schema_access (request , payload .schema_name ):
866+ raise HttpError (403 , "Access to schema denied" )
867+
868+ org_warehouse = OrgWarehouse .objects .filter (org = orguser .org ).first ()
869+
870+ if not org_warehouse :
871+ raise HttpError (404 , "Please set up your warehouse first" )
860872
861873 # Generate filename from chart configuration
862874 chart_type = payload .chart_type or "chart"
863875 table_name = payload .table_name or "data"
864876 timestamp = datetime .now ().strftime ("%Y%m%d_%H%M%S" )
865877 filename = f"{ chart_type } _{ table_name } _{ timestamp } .csv"
866878
867- # Stream response
879+ # Stream response using common function
868880 response = StreamingHttpResponse (
869- stream_chart_data (org_warehouse , payload , page_size = 5000 ),
881+ stream_chart_data_csv (org_warehouse , payload , page_size = 5000 ),
870882 content_type = "application/octet-stream" ,
871883 )
872884 response ["Content-Disposition" ] = f'attachment; filename="{ filename } "'
0 commit comments