Skip to content

Spark upgrade #61

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Aug 16, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,16 @@ public class LambdaHandler implements RequestHandler<AwsProxyRequest, AwsProxyRe
}
```

If you configure an [`initExceptionHandler` method](http://sparkjava.com/documentation#stopping-the-server), make sure that you call `System.exit` at the end of the method. This framework keeps a `CountDownLatch` on the request
and unless you forcefully exit from the thread, the Lambda function will hang waiting for a latch that is never released.

```java
initExceptionHandler((e) -> {
LOG.error("ignite failed", e);
System.exit(100);
});
```

# Security context
The `aws-serverless-java-container-core` contains a default implementation of the `SecurityContextWriter` that supports API Gateway's proxy integration. The generated security context uses the API Gateway `$context` object to establish the request security context. The context looks for the following values in order and returns the first matched type:

Expand Down
16 changes: 0 additions & 16 deletions aws-serverless-java-container-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -50,22 +50,6 @@
<version>1.3.2</version>
</dependency>

<!-- https://mvnrepository.com/artifact/junit/junit -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>

<!-- https://mvnrepository.com/artifact/org.mockito/mockito-all -->
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.10.19</version>
<scope>test</scope>
</dependency>

<!-- https://mvnrepository.com/artifact/org.apache.httpcomponents/httpmime -->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
Expand Down
8 changes: 0 additions & 8 deletions aws-serverless-java-container-jersey/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,6 @@
<version>${jersey.version}</version>
</dependency>

<!-- https://mvnrepository.com/artifact/junit/junit -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>

<!-- https://mvnrepository.com/artifact/commons-codec/commons-codec -->
<dependency>
<groupId>commons-codec</groupId>
Expand Down
11 changes: 2 additions & 9 deletions aws-serverless-java-container-spark/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
</parent>

<properties>
<spark.version>2.5.3</spark.version>
<spark.version>2.6.0</spark.version>
</properties>

<dependencies>
Expand All @@ -31,21 +31,14 @@
<groupId>com.sparkjava</groupId>
<artifactId>spark-core</artifactId>
<version>${spark.version}</version>
<!-- excluding slf4j from spark because it includes 1.7.13, the framework already pulls in 1.7.25 -->
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>

<!-- https://mvnrepository.com/artifact/junit/junit -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
</dependencies>

</project>
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.slf4j.LoggerFactory;
import spark.Service;
import spark.Spark;
import spark.embeddedserver.EmbeddedServerFactory;
import spark.embeddedserver.EmbeddedServers;

import java.lang.reflect.Field;
Expand Down Expand Up @@ -91,7 +92,8 @@ public static SparkLambdaContainerHandler<AwsProxyRequest, AwsProxyResponse> get
return new SparkLambdaContainerHandler<>(new AwsProxyHttpServletRequestReader(),
new AwsProxyHttpServletResponseWriter(),
new AwsProxySecurityContextWriter(),
new AwsProxyExceptionHandler());
new AwsProxyExceptionHandler(),
new LambdaEmbeddedServerFactory());
}


Expand All @@ -102,31 +104,34 @@ public static SparkLambdaContainerHandler<AwsProxyRequest, AwsProxyResponse> get
public SparkLambdaContainerHandler(RequestReader<RequestType, AwsProxyHttpServletRequest> requestReader,
ResponseWriter<AwsHttpServletResponse, ResponseType> responseWriter,
SecurityContextWriter<RequestType> securityContextWriter,
ExceptionHandler<ResponseType> exceptionHandler)
ExceptionHandler<ResponseType> exceptionHandler,
EmbeddedServerFactory embeddedServerFactory)
throws ContainerInitializationException {
super(requestReader, responseWriter, securityContextWriter, exceptionHandler);

EmbeddedServers.add(LAMBDA_EMBEDDED_SERVER_CODE, new LambdaEmbeddedServerFactory());
EmbeddedServers.add(LAMBDA_EMBEDDED_SERVER_CODE, embeddedServerFactory);

// TODO: This is pretty bad but we are not given access to the embeddedServerIdentifier property of the
// Service object
try {
log.debug("Changing visibility of getInstance method and embeddedServerIdentifier properties");
Method serviceInstanceMethod = Spark.class.getDeclaredMethod("getInstance");
serviceInstanceMethod.setAccessible(true);
Service sparkService = (Service) serviceInstanceMethod.invoke(null);
Field serverIdentifierField = Service.class.getDeclaredField("embeddedServerIdentifier");
serverIdentifierField.setAccessible(true);
serverIdentifierField.set(sparkService, LAMBDA_EMBEDDED_SERVER_CODE);

// remove Jetty from embedded servers
EmbeddedServers.class.getDeclaredMethod("initialize");
} catch (NoSuchFieldException e) {
log.error("Could not fine embeddedServerIdentifier field in Service class", e);
throw new ContainerInitializationException("Cannot find embeddedServerIdentifier field in Service class", e);
} catch (NoSuchMethodException e) {
log.error("Could not find getInstance method in Spark class", e);
throw new ContainerInitializationException("Cannot find getInstance method in Spark class", e);
} catch (IllegalAccessException e) {
log.error("Could not access getInstance method in Spark class", e);
throw new ContainerInitializationException("Cannot access getInstance method in Spark class", e);
} catch (InvocationTargetException e) {
log.error("Could not invoke getInstance method in Spark class", e);
throw new ContainerInitializationException("Cannot invoke getInstance method in Spark class", e);
}
}
Expand All @@ -145,10 +150,12 @@ protected AwsHttpServletResponse getContainerResponse(AwsProxyHttpServletRequest
@Override
protected void handleRequest(AwsProxyHttpServletRequest httpServletRequest, AwsHttpServletResponse httpServletResponse, Context lambdaContext)
throws Exception {

// this method of the AwsLambdaServletContainerHandler sets the request context
super.handleRequest(httpServletRequest, httpServletResponse, lambdaContext);

if (embeddedServer == null) {
log.debug("First request, getting new server instance");
embeddedServer = LambdaEmbeddedServerFactory.getServerInstance();

// call the onStartup event if set to give developers a chance to set filters in the context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
import java.io.IOException;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;

public class LambdaEmbeddedServer
implements EmbeddedServer {
Expand Down Expand Up @@ -45,19 +44,14 @@ public class LambdaEmbeddedServer
//-------------------------------------------------------------
// Implementation - EmbeddedServer
//-------------------------------------------------------------

@Override
public int ignite(String host,
int port,
SslStores sslStores,
CountDownLatch countDownLatch,
int maxThreads,
int minThreads,
int threadIdleTimeoutMillis) {
public int ignite(String s, int i, SslStores sslStores, int i1, int i2, int i3)
throws Exception {
log.info("Starting Spark server, ignoring port and host");
sparkFilter = new MatcherFilter(applicationRoutes, staticFilesConfiguration, false, hasMultipleHandler);
sparkFilter.init(null);

countDownLatch.countDown();
//countDownLatch.countDown();

return 0;
}
Expand All @@ -71,18 +65,31 @@ public void configureWebSockets(Map<String, WebSocketHandlerWrapper> webSocketHa
}


@Override
public void join()
throws InterruptedException {
log.info("Called join method, nothing to do here since Lambda only runs a single event per container");
}


@Override
public void extinguish() {
log.info("Called extinguish method, nothing to do here.");
}


@Override
public int activeThreadCount() {
log.debug("Called activeThreadCount, since Lambda only runs one event per container we always return 1");
return 1;
}

//-------------------------------------------------------------
// Methods - Public
//-------------------------------------------------------------

public void handle(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
//RouteMatch route = applicationRoutes.find(HttpMethod.get(request.requestMethod()), request.contextPath(), "*/*");
sparkFilter.doFilter(request, response, null);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,21 @@ public class LambdaEmbeddedServerFactory implements EmbeddedServerFactory {
private static LambdaEmbeddedServer embeddedServer;


/**
* Empty constructor, applications should always use this constructor.
*/
public LambdaEmbeddedServerFactory() {}


/**
* Constructor used in unit tests to inject a mocked embedded server
* @param server The mocked server
*/
public LambdaEmbeddedServerFactory(LambdaEmbeddedServer server) {
embeddedServer = server;
}


//-------------------------------------------------------------
// Implementation - EmbeddedServerFactory
//-------------------------------------------------------------
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,10 @@
import com.amazonaws.serverless.proxy.internal.testutils.AwsProxyRequestBuilder;
import com.amazonaws.serverless.proxy.internal.testutils.MockLambdaContext;

import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import spark.Spark;

import javax.servlet.http.Cookie;
import javax.ws.rs.core.HttpHeaders;
Expand All @@ -22,10 +24,10 @@ public class HelloWorldSparkTest {
private static final String CUSTOM_HEADER_VALUE = "My Header Value";
private static final String BODY_TEXT_RESPONSE = "Hello World";

public static final String COOKIE_NAME = "MyCookie";
public static final String COOKIE_VALUE = "CookieValue";
public static final String COOKIE_DOMAIN = "mydomain.com";
public static final String COOKIE_PATH = "/";
private static final String COOKIE_NAME = "MyCookie";
private static final String COOKIE_VALUE = "CookieValue";
private static final String COOKIE_DOMAIN = "mydomain.com";
private static final String COOKIE_PATH = "/";

private static SparkLambdaContainerHandler<AwsProxyRequest, AwsProxyResponse> handler;

Expand All @@ -42,6 +44,11 @@ public static void initializeServer() {
}
}

@AfterClass
public static void stopSpark() {
Spark.stop();
}

@Test
public void basicServer_handleRequest_emptyFilters() {
AwsProxyRequest req = new AwsProxyRequestBuilder().method("GET").path("/hello").build();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
package com.amazonaws.serverless.proxy.spark;


import com.amazonaws.serverless.exceptions.ContainerInitializationException;
import com.amazonaws.serverless.proxy.internal.AwsProxyExceptionHandler;
import com.amazonaws.serverless.proxy.internal.AwsProxySecurityContextWriter;
import com.amazonaws.serverless.proxy.internal.servlet.AwsProxyHttpServletRequestReader;
import com.amazonaws.serverless.proxy.internal.servlet.AwsProxyHttpServletResponseWriter;
import com.amazonaws.serverless.proxy.spark.embeddedserver.LambdaEmbeddedServer;
import com.amazonaws.serverless.proxy.spark.embeddedserver.LambdaEmbeddedServerFactory;

import org.junit.AfterClass;
import org.junit.Test;
import spark.Spark;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.when;
import static spark.Spark.get;
import static spark.Spark.initExceptionHandler;


public class InitExceptionHandlerTest {

private static final String TEST_EXCEPTION_MESSAGE = "test exception";
private static LambdaEmbeddedServer embeddedServer = mock(LambdaEmbeddedServer.class);

@Test
public void initException_mockException_expectHandlerToRun() {
try {

when(embeddedServer.ignite(anyString(), anyInt(), anyObject(), anyInt(), anyInt(), anyInt()))
.thenThrow(new ContainerInitializationException(TEST_EXCEPTION_MESSAGE, null));
LambdaEmbeddedServerFactory serverFactory = new LambdaEmbeddedServerFactory(embeddedServer);
new SparkLambdaContainerHandler<>(new AwsProxyHttpServletRequestReader(),
new AwsProxyHttpServletResponseWriter(),
new AwsProxySecurityContextWriter(),
new AwsProxyExceptionHandler(),
serverFactory);

configureRoutes();

} catch (Exception e) {
e.printStackTrace();
fail("Error while mocking server");
}

}

@AfterClass
public static void stopSpark() {
// un-mock the embedded server to avoid blocking other tests
reset(embeddedServer);
// reset the static variable in the factory so that it will be instantiated again next time
new LambdaEmbeddedServerFactory(null);
Spark.stop();
}

private static void configureRoutes() {
initExceptionHandler((e) -> {
System.out.println("Exception Handler called: " + e.getLocalizedMessage());
assertEquals(TEST_EXCEPTION_MESSAGE, e.getLocalizedMessage());
});

get("/test-route", (req, res) -> {
res.status(200);
return "test";
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@
import com.amazonaws.serverless.proxy.internal.testutils.MockLambdaContext;
import com.amazonaws.serverless.proxy.spark.filter.CustomHeaderFilter;

import org.junit.AfterClass;
import org.junit.Test;
import spark.Spark;

import javax.servlet.DispatcherType;
import javax.servlet.FilterRegistration;
Expand Down Expand Up @@ -57,6 +59,11 @@ public void filters_onStartupMethod_executeFilters() {

}

@AfterClass
public static void stopSpark() {
Spark.stop();
}

private void configureRoutes() {
get("/header-filter", (req, res) -> {
res.status(200);
Expand Down
8 changes: 0 additions & 8 deletions aws-serverless-java-container-spring/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,6 @@
<scope>test</scope>
</dependency>

<!-- https://mvnrepository.com/artifact/junit/junit -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
Expand Down
Loading