@@ -363,7 +363,7 @@ internal struct LambdaHTTPServer {
363
363
// we always accept the /invoke request and push them to the pool
364
364
let requestId = " \( DispatchTime . now ( ) . uptimeNanoseconds) "
365
365
logger [ metadataKey: " requestId " ] = " \( requestId) "
366
- logger. trace ( " /invoke received invocation, pushing it to the stack " )
366
+ logger. trace ( " /invoke received invocation, pushing it to the pool and wait for a lambda response " )
367
367
await self . invocationPool. push ( LocalServerInvocation ( requestId: requestId, request: body) )
368
368
369
369
// wait for the lambda function to process the request
@@ -429,12 +429,13 @@ internal struct LambdaHTTPServer {
429
429
id: requestId,
430
430
status: . ok,
431
431
headers: HTTPHeaders ( [ ( " Content-Type " , " application/json " ) ] ) ,
432
- body: body
432
+ body: body,
433
+ final: true
433
434
)
434
435
)
435
436
436
437
// tell the Lambda function we accepted the response
437
- return try await sendResponse ( . init( id: requestId, status: . accepted) , outbound: outbound, logger: logger)
438
+ return try await sendResponse ( . init( id: requestId, status: . accepted, final : true ) , outbound: outbound, logger: logger)
438
439
439
440
// :requestId/error endpoint is called by the lambda posting an error response
440
441
// we accept all requestId and we do not handle the body, we just acknowledge the request
@@ -469,7 +470,7 @@ internal struct LambdaHTTPServer {
469
470
) async throws {
470
471
var logger = logger
471
472
logger [ metadataKey: " requestId " ] = " \( response. requestId ?? " nil " ) "
472
- logger. trace ( " Writing response " )
473
+ logger. trace ( " Writing response for \( response . status ? . code ?? 0 ) " )
473
474
474
475
var headers = response. headers ?? HTTPHeaders ( )
475
476
if let body = response. body {
0 commit comments