Skip to content

Commit

Permalink
[4.3] HELP-41845: generate CSVs providing a consistent cell ordering (2…
Browse files Browse the repository at this point in the history
…600hz#5495)

* HELP-41845: generate CSVs providing a consistent cell ordering

Rather than ouputting rows in an ad-hoc manner, apply a consistent
ordering of JSON paths to cells when processing JSON objects that may
vary in the paths present.

* return 204 no content for empty CSVs

* logging

* logline updates

* not really an error anymore

* type cleanup

* handle already-encoded CSVs

* more tests for CSV fetching

* if we have an acc use it

* HELP-41845: handle customized csv chunk response

* maybe not setting headers?

* let not set content-disposition to a file path

* make fmt
  • Loading branch information
jamesaimonetti authored and icehess committed Feb 19, 2019
1 parent b58f618 commit a13bbc5
Show file tree
Hide file tree
Showing 23 changed files with 949 additions and 241 deletions.
90 changes: 51 additions & 39 deletions applications/crossbar/src/api_resource.erl
Original file line number Diff line number Diff line change
Expand Up @@ -797,11 +797,8 @@ create_from_response(Req, Context) ->
create_from_response(Req, Context, 'undefined') ->
create_from_response(Req, Context, <<"*/*">>);
create_from_response(Req, Context, Accept) ->
CTPs = [F || {F, _} <- cb_context:content_types_provided(Context)],
DefaultFun = case CTPs of
[] -> 'to_json';
[F|_] -> F
end,
DefaultFun = content_type_provided_fun(Context),

case to_fun(Context, Accept, DefaultFun) of
'to_json' -> api_util:create_push_response(Req, Context);
'send_file' -> api_util:create_push_response(Req, Context, fun api_util:create_resp_file/2);
Expand All @@ -810,6 +807,12 @@ create_from_response(Req, Context, Accept) ->
api_util:create_push_response(Req, Context)
end.

content_type_provided_fun(Context) ->
case cb_context:content_types_provided(Context) of
[{F, _}|_] -> F;
[] -> 'to_json'
end.

-spec to_json(cowboy_req:req(), cb_context:context()) ->
{iolist() | kz_term:ne_binary() | 'stop', cowboy_req:req(), cb_context:context()}.
to_json(Req, Context) ->
Expand Down Expand Up @@ -871,6 +874,7 @@ to_binary(Req, Context, 'undefined') ->
],
NewContext = cb_context:setters(Context, Setters),
%% Respond, possibly with 206
lager:debug("replying with ~p", [ErrorCode]),
Req1 = cowboy_req:reply(kz_term:to_binary(ErrorCode), cb_context:resp_headers(NewContext), Content, Req),
{'stop', Req1, NewContext}
end;
Expand Down Expand Up @@ -944,12 +948,16 @@ accept_matches_provided(Major, Minor, CTPs) ->
{iolist(), cowboy_req:req(), cb_context:context()}.
to_csv(Req0, Context0) ->
case cb_context:fetch(Context0, 'is_chunked') of
'true' -> to_chunk(<<"to_csv">>, Req0, Context0);
'true' ->
to_chunk(<<"to_csv">>, Req0, cb_context:add_resp_header(Context0, <<"content-type">>, <<"text/csv">>));
_ ->
lager:debug("run: to_csv"),
Event = to_fun_event_name(<<"to_csv">>, Context0),
{Req1, Context1} = crossbar_bindings:fold(Event, {Req0, Context0}),
api_util:create_pull_response(Req1, Context1, fun api_util:create_csv_resp_content/2)
api_util:create_pull_response(Req1
,cb_context:add_resp_header(Context1, <<"content-type">>, <<"text/csv">>)
,fun api_util:create_csv_resp_content/2
)
end.

-spec to_pdf(cowboy_req:req(), cb_context:context()) ->
Expand Down Expand Up @@ -1005,28 +1013,33 @@ next_chunk_fold(#{chunking_started := StartedChunk
,context := Context0
,chunk_response_type := _ToFun
}=ChunkMap0) ->
lager:debug("(chunked) calling next chunk"),
lager:debug("(chunked) calling next chunk ~s", [_ToFun]),
Context1 = cb_context:store(Context0, 'chunking_started', StartedChunk),
ChunkMap1 = #{context := Context2
,cowboy_req := Req0
,event_name := Event
} = crossbar_view:next_chunk(ChunkMap0#{context := Context1}),
ChunkMap1 = #{context := Context2} = crossbar_view:next_chunk(ChunkMap0#{context := Context1}),

case api_util:succeeded(Context2)
andalso crossbar_bindings:fold(Event, {Req0, Context2})
of
next_chunk_results(ChunkMap1, api_util:succeeded(Context2)).

next_chunk_results(#{context := Context
,chunking_started := StartedChunk
}=ChunkMap, 'false') ->
lager:debug("(chunked) getting next chunk was unsuccessful"),
finish_chunked_response(ChunkMap#{context => reset_context_between_chunks(Context, StartedChunk)});
next_chunk_results(#{context := Context0
,cowboy_req := Req0
,chunk_response_type := _ToFun
,chunking_started := StartedChunk
,event_name := Event
}=ChunkMap, 'true') ->
{Req1, Context1} = crossbar_bindings:fold(Event, {Req0, Context0}),
lager:debug("(chunked) ran '~s'", [_ToFun]),
case api_util:succeeded(Context1) of
'true' ->
process_chunk(ChunkMap#{cowboy_req := Req1
,context := Context1
});
'false' ->
lager:debug("(chunked) getting next chunk was unsuccessful"),
finish_chunked_response(ChunkMap1#{context => reset_context_between_chunks(Context2, StartedChunk)});
{Req1, Context3} ->
lager:debug("(chunked) ran '~s'", [_ToFun]),
case api_util:succeeded(Context3) of
'true' ->
process_chunk(ChunkMap1#{cowboy_req := Req1, context := Context3});
'false' ->
lager:debug("(chunked) '~s' was unsuccessful", [_ToFun]),
finish_chunked_response(ChunkMap1#{context => reset_context_between_chunks(Context3, StartedChunk)})
end
lager:debug("(chunked) '~s' was unsuccessful", [_ToFun]),
finish_chunked_response(ChunkMap#{context => reset_context_between_chunks(Context1, StartedChunk)})
end.

%%------------------------------------------------------------------------------
Expand Down Expand Up @@ -1071,28 +1084,33 @@ process_chunk(#{context := Context
}=ChunkMap) ->
case cb_context:resp_data(Context) of
0 ->
lager:debug("(chunked) ~s did not send data", [ToFun]),
next_chunk_fold(ChunkMap#{context => reset_context_between_chunks(Context, IsStarted)
,chunking_started => IsStarted
}
);
SentLength when is_integer(SentLength) ->
lager:debug("(chunked) ~s sent ~p data", [ToFun, SentLength]),
next_chunk_fold(ChunkMap#{context => reset_context_between_chunks(Context, 'true')
,chunking_started => 'true'
,previous_chunk_length => SentLength
}
);
[] ->
lager:debug("(chunked) ~s did not send data", [ToFun]),
next_chunk_fold(ChunkMap#{context => reset_context_between_chunks(Context, IsStarted)
,chunking_started => IsStarted
,previous_chunk_length => 0 %% the module filtered all queried result
}
);
Resp when is_list(Resp) ->
{StartedChunk, Req1} = send_chunk_response(ToFun, Req, Context),
next_chunk_fold(ChunkMap#{context => reset_context_between_chunks(Context, StartedChunk)
PrevLength = length(Resp),
{StartedChunk, Req1, Context1} = send_chunk_response(ToFun, Req, Context),
lager:debug("(chunked) ~s sent ~p chunked resp", [ToFun, PrevLength]),
next_chunk_fold(ChunkMap#{context => reset_context_between_chunks(Context1, StartedChunk)
,cowboy_req => Req1
,chunking_started => StartedChunk
,previous_chunk_length => length(Resp)
,previous_chunk_length => PrevLength
}
);
_Other ->
Expand Down Expand Up @@ -1135,10 +1153,11 @@ reset_context_between_chunks(Context, _StartedChunk, 'false') ->
end.

-spec send_chunk_response(kz_term:ne_binary(), cowboy_req:req(), cb_context:context()) ->
{boolean(), cowboy_req:req()}.
{boolean(), cowboy_req:req(), cb_context:context()}.
send_chunk_response(<<"to_json">>, Req, Context) ->
api_util:create_json_chunk_response(Req, Context);
send_chunk_response(<<"to_csv">>, Req, Context) ->
lager:debug("creating CSV chunk"),
api_util:create_csv_chunk_response(Req, Context).

%%------------------------------------------------------------------------------
Expand All @@ -1155,20 +1174,13 @@ finish_chunked_response(#{chunking_started := 'false'
}) ->
%% chunk is not started, return whatever error's or response data in Context
api_util:create_pull_response(Req, Context);
finish_chunked_response(#{chunking_started := 'false'
,chunk_response_type := <<"to_csv">>
finish_chunked_response(#{chunk_response_type := <<"to_csv">>
,context := Context
,cowboy_req := Req
}) ->
%% chunk is not started, return empty CSV
lager:debug("creating CSV pull response"),
api_util:create_pull_response(Req, Context, fun api_util:create_csv_resp_content/2);
finish_chunked_response(#{chunk_response_type := <<"to_csv">>
,context := Context
,cowboy_req := Req
}) ->
%% Chunk is already started, stopping,
'ok' = cowboy_req:stream_body(<<>>, 'fin', Req),
{'stop', Req, Context};
finish_chunked_response(#{total_queried := TotalQueried
,chunking_started := 'true'
,cowboy_req := Req
Expand Down
Loading

0 comments on commit a13bbc5

Please sign in to comment.