Skip to content

Commit

Permalink
add proto files
Browse files Browse the repository at this point in the history
  • Loading branch information
mutianf committed May 3, 2022
1 parent 2f242c6 commit 2106861
Show file tree
Hide file tree
Showing 12 changed files with 1,042 additions and 70 deletions.
Expand Up @@ -21,7 +21,9 @@
import com.google.api.gax.rpc.ResponseObserver;
import com.google.api.gax.rpc.ServerStreamingCallable;
import com.google.api.gax.rpc.StreamController;
import com.google.bigtable.v2.ResponseParams;
import com.google.common.base.Preconditions;
import com.google.protobuf.InvalidProtocolBufferException;
import io.grpc.Metadata;
import javax.annotation.Nonnull;

Expand Down Expand Up @@ -100,10 +102,12 @@ public void onError(Throwable t) {
Metadata metadata = responseMetadata.getMetadata();
Long latency = Util.getGfeLatency(metadata);
tracer.recordGfeMetadata(latency, t);
Metadata trailers = responseMetadata.getTrailingMetadata();
if (trailers != null) {
tracer.setLocations(
trailers.get(Util.ZONE_HEADER_KEY), trailers.get(Util.CLUSTER_HEADER_KEY));
try {
byte[] trailers = responseMetadata.getTrailingMetadata()
.get(Metadata.Key.of(Util.TRAILER_KEY, Metadata.BINARY_BYTE_MARSHALLER));
ResponseParams decodedTrailers = ResponseParams.parseFrom(trailers);
tracer.setLocations(decodedTrailers.getZoneId(), decodedTrailers.getClusterId());
} catch (NullPointerException | InvalidProtocolBufferException e) {
}

outerObserver.onError(t);
Expand All @@ -114,10 +118,12 @@ public void onComplete() {
Metadata metadata = responseMetadata.getMetadata();
Long latency = Util.getGfeLatency(metadata);
tracer.recordGfeMetadata(latency, null);
Metadata trailers = responseMetadata.getTrailingMetadata();
if (trailers != null) {
tracer.setLocations(
trailers.get(Util.ZONE_HEADER_KEY), trailers.get(Util.CLUSTER_HEADER_KEY));
try {
byte[] trailers = responseMetadata.getTrailingMetadata()
.get(Metadata.Key.of(Util.TRAILER_KEY, Metadata.BINARY_BYTE_MARSHALLER));
ResponseParams decodedTrailers = ResponseParams.parseFrom(trailers);
tracer.setLocations(decodedTrailers.getZoneId(), decodedTrailers.getClusterId());
} catch(NullPointerException | InvalidProtocolBufferException e) {
}

outerObserver.onComplete();
Expand Down
Expand Up @@ -22,8 +22,10 @@
import com.google.api.gax.grpc.GrpcResponseMetadata;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.bigtable.v2.ResponseParams;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.protobuf.InvalidProtocolBufferException;
import io.grpc.Metadata;
import javax.annotation.Nonnull;

Expand Down Expand Up @@ -82,10 +84,12 @@ public void onFailure(Throwable throwable) {
Metadata metadata = responseMetadata.getMetadata();
Long latency = Util.getGfeLatency(metadata);
tracer.recordGfeMetadata(latency, throwable);
Metadata trailers = responseMetadata.getTrailingMetadata();
if (trailers != null) {
tracer.setLocations(
trailers.get(Util.ZONE_HEADER_KEY), trailers.get(Util.CLUSTER_HEADER_KEY));
try {
byte[] trailers = responseMetadata.getTrailingMetadata()
.get(Metadata.Key.of(Util.TRAILER_KEY, Metadata.BINARY_BYTE_MARSHALLER));
ResponseParams decodedTrailers = ResponseParams.parseFrom(trailers);
tracer.setLocations(decodedTrailers.getZoneId(), decodedTrailers.getClusterId());
} catch(NullPointerException | InvalidProtocolBufferException e) {
}
}

Expand All @@ -94,10 +98,12 @@ public void onSuccess(ResponseT response) {
Metadata metadata = responseMetadata.getMetadata();
Long latency = Util.getGfeLatency(metadata);
tracer.recordGfeMetadata(latency, null);
Metadata trailers = responseMetadata.getTrailingMetadata();
if (trailers != null) {
tracer.setLocations(
trailers.get(Util.ZONE_HEADER_KEY), trailers.get(Util.CLUSTER_HEADER_KEY));
try {
byte[] trailers = responseMetadata.getTrailingMetadata()
.get(Metadata.Key.of(Util.TRAILER_KEY, Metadata.BINARY_BYTE_MARSHALLER));
ResponseParams decodedTrailers = ResponseParams.parseFrom(trailers);
tracer.setLocations(decodedTrailers.getZoneId(), decodedTrailers.getClusterId());
} catch(NullPointerException | InvalidProtocolBufferException e) {
}
}
}
Expand Down
Expand Up @@ -58,11 +58,7 @@ public class Util {
Metadata.Key.of("server-timing", Metadata.ASCII_STRING_MARSHALLER);
private static final Pattern SERVER_TIMING_HEADER_PATTERN = Pattern.compile(".*dur=(?<dur>\\d+)");

// TODO: update key value
public static final Metadata.Key<String> ZONE_HEADER_KEY =
Metadata.Key.of("bigtable-zone", Metadata.ASCII_STRING_MARSHALLER);
public static final Metadata.Key<String> CLUSTER_HEADER_KEY =
Metadata.Key.of("bigtable-cluster", Metadata.ASCII_STRING_MARSHALLER);
static final String TRAILER_KEY = "x-goog-ext-425905942-bin";

private static final TagValue OK_STATUS = TagValue.create(StatusCode.Code.OK.toString());

Expand Down
Expand Up @@ -44,12 +44,11 @@ final class BigtableCreateTimeSeriesExporter extends MetricExporter {
BigtableCreateTimeSeriesExporter(
String projectId,
MetricServiceClient metricServiceClient,
MonitoredResource monitoredResource,
@Nullable String metricNamePrefix) {
MonitoredResource monitoredResource) {
this.projectName = ProjectName.newBuilder().setProject(projectId).build();
this.metricServiceClient = metricServiceClient;
this.monitoredResource = monitoredResource;
this.domain = BigtableStackdriverExportUtils.getDomain(metricNamePrefix);
this.domain = "bigtable.googleapis.com/client/";
}

public void export(Collection<Metric> metrics) {
Expand Down
Expand Up @@ -31,7 +31,6 @@
import com.google.bigtable.veneer.repackaged.io.opencensus.metrics.export.Summary;
import com.google.bigtable.veneer.repackaged.io.opencensus.metrics.export.Value;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import com.google.monitoring.v3.TimeInterval;
import com.google.monitoring.v3.TimeSeries;
Expand All @@ -42,30 +41,13 @@
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.security.SecureRandom;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nullable;

class BigtableStackdriverExportUtils {
@VisibleForTesting
static final LabelKey OPENCENSUS_TASK_KEY =
LabelKey.create("opencensus_task", "Opencensus task identifier");

@VisibleForTesting
static final LabelValue OPENCENSUS_TASK_VALUE_DEFAULT =
LabelValue.create(generateDefaultTaskValue());

static final Map<LabelKey, LabelValue> DEFAULT_CONSTANT_LABELS;
@VisibleForTesting static final String CUSTOM_METRIC_DOMAIN = "custom.googleapis.com/";

@VisibleForTesting
static final String CUSTOM_OPENCENSUS_DOMAIN = CUSTOM_METRIC_DOMAIN + "opencensus/";

// TODO: clean up unused other types
@VisibleForTesting static final int MAX_BATCH_EXPORT_SIZE = 200;
private static final Logger logger;

private static final com.google.bigtable.veneer.repackaged.io.opencensus.common.Function<
Expand Down Expand Up @@ -310,23 +292,8 @@ static com.google.protobuf.Timestamp convertTimestamp(
}

private BigtableStackdriverExportUtils() {}

static String getDomain(@Nullable String metricNamePrefix) {
String domain;
if (Strings.isNullOrEmpty(metricNamePrefix)) {
domain = CUSTOM_OPENCENSUS_DOMAIN;
} else if (!metricNamePrefix.endsWith("/")) {
domain = metricNamePrefix + '/';
} else {
domain = metricNamePrefix;
}

return domain;
}


static {
DEFAULT_CONSTANT_LABELS =
Collections.singletonMap(OPENCENSUS_TASK_KEY, OPENCENSUS_TASK_VALUE_DEFAULT);
logger = Logger.getLogger(BigtableStackdriverExportUtils.class.getName());
typedValueDoubleFunction =
new com.google.bigtable.veneer.repackaged.io.opencensus.common.Function<
Expand Down
Expand Up @@ -58,15 +58,14 @@ private BigtableStackdriverStatsExporter(
String projectId,
MetricServiceClient metricServiceClient,
Duration exportInterval,
MonitoredResource monitoredResource,
@Nullable String metricNamePrefix) {
MonitoredResource monitoredResource) {
IntervalMetricReader.Options.Builder intervalMetricReaderOptionsBuilder =
IntervalMetricReader.Options.builder();
intervalMetricReaderOptionsBuilder.setExportInterval(exportInterval);
this.intervalMetricReader =
IntervalMetricReader.create(
new BigtableCreateTimeSeriesExporter(
projectId, metricServiceClient, monitoredResource, metricNamePrefix),
projectId, metricServiceClient, monitoredResource),
MetricReader.create(
com.google.bigtable.veneer.repackaged.io.opencensus.exporter.metrics.util
.MetricReader.Options.builder()
Expand All @@ -85,7 +84,6 @@ public static void createAndRegister(StackdriverStatsConfiguration configuration
configuration.getProjectId(),
configuration.getExportInterval(),
configuration.getMonitoredResource(),
configuration.getMetricNamePrefix(),
configuration.getDeadline(),
configuration.getMetricServiceStub());
}
Expand All @@ -95,7 +93,6 @@ private static void createInternal(
String projectId,
Duration exportInterval,
MonitoredResource monitoredResource,
@Nullable String metricNamePrefix,
Duration deadline,
@Nullable MetricServiceStub stub)
throws IOException {
Expand All @@ -107,7 +104,7 @@ private static void createInternal(
: MetricServiceClient.create(stub);
instance =
new BigtableStackdriverStatsExporter(
projectId, client, exportInterval, monitoredResource, metricNamePrefix);
projectId, client, exportInterval, monitoredResource);
}
}

Expand Down
Expand Up @@ -27,6 +27,7 @@
import com.google.bigtable.v2.MutateRowResponse;
import com.google.bigtable.v2.ReadRowsRequest;
import com.google.bigtable.v2.ReadRowsResponse;
import com.google.bigtable.v2.ResponseParams;
import com.google.cloud.bigtable.data.v2.BigtableDataSettings;
import com.google.cloud.bigtable.data.v2.FakeServiceHelper;
import com.google.cloud.bigtable.data.v2.models.Query;
Expand Down Expand Up @@ -151,8 +152,12 @@ public void sendHeaders(Metadata headers) {

@Override
public void close(Status status, Metadata trailers) {
trailers.put(Util.ZONE_HEADER_KEY, ZONE);
trailers.put(Util.CLUSTER_HEADER_KEY, CLUSTER);
ResponseParams params = ResponseParams.newBuilder()
.setClusterId(CLUSTER)
.setZoneId(ZONE)
.build();
byte[] byteArray = params.toByteArray();
trailers.put(Metadata.Key.of(Util.TRAILER_KEY, Metadata.BINARY_BYTE_MARSHALLER), byteArray);
super.close(status, trailers);
}
},
Expand Down
Expand Up @@ -57,7 +57,6 @@ public class BigtableCreateTimeSeriesExporterTest {
private static final String tableId = "fake-table";
private static final String zone = "us-east-1";
private static final String cluster = "cluster-1";
private static final String METRIC_PREFIX = "bigtable.googleapis.com/client/";

@Rule public final MockitoRule mockitoRule = MockitoJUnit.rule();

Expand All @@ -74,15 +73,14 @@ public void setUp() {
new BigtableCreateTimeSeriesExporter(
projectId,
fakeMetricServiceClient,
MonitoredResource.newBuilder().setType("bigtable-table").build(),
METRIC_PREFIX);
MonitoredResource.newBuilder().setType("bigtable-table").build());
}

@After
public void tearDown() {}

@Test
public void testTimeSeries() throws Exception {
public void testTimeSeries() {
ArgumentCaptor<CreateTimeSeriesRequest> argumentCaptor =
ArgumentCaptor.forClass(CreateTimeSeriesRequest.class);

Expand Down

0 comments on commit 2106861

Please sign in to comment.