Skip to content

Commit

Permalink
Merge pull request #101 from scireum/aha/FastETags
Browse files Browse the repository at this point in the history
Aha/fast e tags
  • Loading branch information
sabieber committed Dec 11, 2018
2 parents 130642d + e1c84a0 commit 03ade13
Show file tree
Hide file tree
Showing 3 changed files with 52 additions and 36 deletions.
41 changes: 21 additions & 20 deletions src/main/java/ninja/NinjaController.java
Expand Up @@ -47,6 +47,12 @@
@Register
public class NinjaController implements Controller {

@Part
private Storage storage;

@Part
private APILog log;

@Override
public void onError(WebContext ctx, HandledException error) {
if (error != null) {
Expand All @@ -59,19 +65,13 @@ public void onError(WebContext ctx, HandledException error) {
UserContext.message(Message.error(e.getMessage()));
}
ctx.respondWith()
.template("templates/index.html.pasta",
buckets,
storage.getBasePath(),
storage.getAwsAccessKey(),
storage.getAwsSecretKey());
.template("templates/index.html.pasta",
buckets,
storage.getBasePath(),
storage.getAwsAccessKey(),
storage.getAwsSecretKey());
}

@Part
private Storage storage;

@Part
private APILog log;

/**
* Handles requests to /
*
Expand Down Expand Up @@ -123,13 +123,13 @@ public void log(WebContext ctx) {
entries.remove(entries.size() - 1);
}
ctx.respondWith()
.template("templates/log.html.pasta",
entries,
canPagePrev,
canPageNext,
(start + 1) + " - " + (start + entries.size()),
Math.max(1, start - pageSize + 1),
start + pageSize + 1);
.template("templates/log.html.pasta",
entries,
canPagePrev,
canPageNext,
(start + 1) + " - " + (start + entries.size()),
Math.max(1, start - pageSize + 1),
start + pageSize + 1);
}

/**
Expand Down Expand Up @@ -177,7 +177,7 @@ public void object(WebContext ctx, String bucketName, String id) {
return;
}
Response response = ctx.respondWith();
for (Map.Entry<Object, Object> entry : object.getProperties()) {
for (Map.Entry<Object, Object> entry : object.getProperties().entrySet()) {
response.addHeader(entry.getKey().toString(), entry.getValue().toString());
}
response.file(object.getFile());
Expand All @@ -190,6 +190,7 @@ public void object(WebContext ctx, String bucketName, String id) {
* Handles manual object uploads
*
* @param ctx the context describing the current request
* @param out the output to write to
* @param bucket the name of the target bucket
*/
@Routed(priority = PriorityCollector.DEFAULT_PRIORITY - 1, value = "/ui/:1/upload", jsonCall = true)
Expand All @@ -206,7 +207,7 @@ public void uploadFile(WebContext ctx, JSONStructuredOutput out, String bucket)

Map<String, String> properties = Maps.newTreeMap();
properties.put(HttpHeaderNames.CONTENT_TYPE.toString(),
ctx.getHeaderValue(HttpHeaderNames.CONTENT_TYPE).asString(MimeHelper.guessMimeType(name)));
ctx.getHeaderValue(HttpHeaderNames.CONTENT_TYPE).asString(MimeHelper.guessMimeType(name)));
HashCode hash = Files.hash(object.getFile(), Hashing.md5());
String md5 = BaseEncoding.base64().encode(hash.asBytes());
properties.put("Content-MD5", md5);
Expand Down
38 changes: 27 additions & 11 deletions src/main/java/ninja/S3Dispatcher.java
Expand Up @@ -52,6 +52,7 @@
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Matcher;
Expand Down Expand Up @@ -507,6 +508,7 @@ private void putObject(WebContext ctx, Bucket bucket, String id, InputStreamHand
properties.put(name, ctx.getHeader(name));
}
}

HashCode hash = Files.hash(object.getFile(), Hashing.md5());
String md5 = BaseEncoding.base64().encode(hash.asBytes());
String contentMd5 = properties.get("Content-MD5");
Expand All @@ -518,15 +520,17 @@ private void putObject(WebContext ctx, Bucket bucket, String id, InputStreamHand
return;
}

String etag = BaseEncoding.base16().encode(hash.asBytes());
properties.put(HTTP_HEADER_NAME_ETAG, etag);
object.storeProperties(properties);
Response response = ctx.respondWith();
response.addHeader(HTTP_HEADER_NAME_ETAG, etag(hash)).status(HttpResponseStatus.OK);
response.addHeader(HTTP_HEADER_NAME_ETAG, etag(etag)).status(HttpResponseStatus.OK);
response.addHeader(HttpHeaderNames.ACCESS_CONTROL_EXPOSE_HEADERS, HTTP_HEADER_NAME_ETAG);
signalObjectSuccess(ctx);
}

private String etag(HashCode hash) {
return "\"" + hash + "\"";
private String etag(String etag) {
return "\"" + etag + "\"";
}

/**
Expand Down Expand Up @@ -559,14 +563,15 @@ private void copyObject(WebContext ctx, Bucket bucket, String id, String copy) t
Files.copy(src.getPropertiesFile(), object.getPropertiesFile());
}
HashCode hash = Files.hash(object.getFile(), Hashing.md5());
String etag = etag(hash);
XMLStructuredOutput structuredOutput = ctx.respondWith().addHeader(HTTP_HEADER_NAME_ETAG, etag).xml();
String etag = BaseEncoding.base16().encode(hash.asBytes());

XMLStructuredOutput structuredOutput = ctx.respondWith().addHeader(HTTP_HEADER_NAME_ETAG, etag(etag)).xml();
structuredOutput.beginOutput("CopyObjectResult");
structuredOutput.beginObject("LastModified");
structuredOutput.text(RFC822_INSTANT.format(object.getLastModifiedInstant()));
structuredOutput.endObject();
structuredOutput.beginObject(HTTP_HEADER_NAME_ETAG);
structuredOutput.text(etag);
structuredOutput.text(etag(etag));
structuredOutput.endObject();
structuredOutput.endOutput();
signalObjectSuccess(ctx);
Expand All @@ -586,14 +591,25 @@ private void getObject(WebContext ctx, Bucket bucket, String id, boolean sendFil
return;
}
Response response = ctx.respondWith();
for (Map.Entry<Object, Object> entry : object.getProperties()) {
Properties properties = object.getProperties();
for (Map.Entry<Object, Object> entry : properties.entrySet()) {
response.addHeader(entry.getKey().toString(), entry.getValue().toString());
}
for (Map.Entry<String, String> entry : getOverridenHeaders(ctx).entrySet()) {
response.setHeader(entry.getKey(), entry.getValue());
}
HashCode hash = Files.hash(object.getFile(), Hashing.md5());
response.addHeader(HTTP_HEADER_NAME_ETAG, BaseEncoding.base16().encode(hash.asBytes()));

String etag = properties.getProperty(HTTP_HEADER_NAME_ETAG);
if (Strings.isEmpty(etag)) {
HashCode hash = Files.hash(object.getFile(), Hashing.md5());
etag = BaseEncoding.base16().encode(hash.asBytes());
Map<String, String> data = new HashMap<>();
properties.forEach((key, value) -> data.put(key.toString(), String.valueOf(value)));
data.put(HTTP_HEADER_NAME_ETAG, etag);
object.storeProperties(data);
}

response.addHeader(HTTP_HEADER_NAME_ETAG, etag(etag));
response.addHeader(HttpHeaderNames.ACCESS_CONTROL_EXPOSE_HEADERS, HTTP_HEADER_NAME_ETAG);
if (sendFile) {
response.file(object.getFile());
Expand Down Expand Up @@ -754,8 +770,8 @@ private File combineParts(String id, String uploadId, List<File> parts) {
try {
if (!file.createNewFile()) {
Storage.LOG.WARN("Failed to create multipart result file %s (%s).",
file.getName(),
file.getAbsolutePath());

file.getName(), file.getAbsolutePath());
}
try (FileChannel out = new FileOutputStream(file).getChannel()) {
combine(parts, out);
Expand Down
9 changes: 4 additions & 5 deletions src/main/java/ninja/StoredObject.java
Expand Up @@ -18,7 +18,6 @@
import java.time.Instant;
import java.util.Map;
import java.util.Properties;
import java.util.Set;

/**
* Represents a stored object.
Expand Down Expand Up @@ -75,8 +74,8 @@ public void delete() {
}
if (!getPropertiesFile().delete()) {
Storage.LOG.WARN("Failed to delete properties file for object %s (%s).",
getName(),
getPropertiesFile().getAbsolutePath());
getName(),
getPropertiesFile().getAbsolutePath());
}
}

Expand Down Expand Up @@ -107,14 +106,14 @@ public boolean exists() {
* @return a set of name value pairs representing all properties stored for this object or an empty set if no
* properties could be read.
*/
public Set<Map.Entry<Object, Object>> getProperties() {
public Properties getProperties() {
Properties props = new Properties();
try (FileInputStream in = new FileInputStream(getPropertiesFile())) {
props.load(in);
} catch (IOException e) {
Exceptions.ignore(e);
}
return props.entrySet();
return props;
}

/**
Expand Down

0 comments on commit 03ade13

Please sign in to comment.