diff --git a/google-cloud-bigtable/src/main/java/com/google/cloud/bigtable/data/v2/stub/metrics/BigtableCloudMonitoringExporter.java b/google-cloud-bigtable/src/main/java/com/google/cloud/bigtable/data/v2/stub/metrics/BigtableCloudMonitoringExporter.java
new file mode 100644
index 0000000000..5ca8271791
--- /dev/null
+++ b/google-cloud-bigtable/src/main/java/com/google/cloud/bigtable/data/v2/stub/metrics/BigtableCloudMonitoringExporter.java
@@ -0,0 +1,190 @@
+/*
+ * Copyright 2023 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.cloud.bigtable.data.v2.stub.metrics;
+
+import com.google.api.MonitoredResource;
+import com.google.api.core.ApiFuture;
+import com.google.api.core.ApiFutureCallback;
+import com.google.api.core.ApiFutures;
+import com.google.api.gax.core.FixedCredentialsProvider;
+import com.google.auth.Credentials;
+import com.google.cloud.monitoring.v3.MetricServiceClient;
+import com.google.cloud.monitoring.v3.MetricServiceSettings;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.util.concurrent.MoreExecutors;
+import com.google.monitoring.v3.CreateTimeSeriesRequest;
+import com.google.monitoring.v3.ProjectName;
+import com.google.monitoring.v3.TimeSeries;
+import com.google.protobuf.Empty;
+import io.opentelemetry.sdk.common.CompletableResultCode;
+import io.opentelemetry.sdk.metrics.InstrumentType;
+import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
+import io.opentelemetry.sdk.metrics.data.MetricData;
+import io.opentelemetry.sdk.metrics.export.MetricExporter;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import org.threeten.bp.Duration;
+
+/**
+ * Bigtable Cloud Monitoring OpenTelemetry Exporter.
+ *
+ * The exporter will look for all bigtable owned metrics under bigtable.googleapis.com
+ * instrumentation scope and upload it via the Google Cloud Monitoring API.
+ */
+final class BigtableCloudMonitoringExporter implements MetricExporter {
+
+ private static final Logger logger =
+ Logger.getLogger(BigtableCloudMonitoringExporter.class.getName());
+ private final MetricServiceClient client;
+
+ private final String projectId;
+ private final String taskId;
+ private final MonitoredResource monitoredResource;
+ private final AtomicBoolean isShutdown = new AtomicBoolean(false);
+
+ private static final String RESOURCE_TYPE = "bigtable_client_raw";
+
+ private CompletableResultCode lastExportCode;
+
+ static BigtableCloudMonitoringExporter create(String projectId, Credentials credentials)
+ throws IOException {
+ MetricServiceSettings.Builder settingsBuilder = MetricServiceSettings.newBuilder();
+ settingsBuilder.setCredentialsProvider(FixedCredentialsProvider.create(credentials));
+
+ org.threeten.bp.Duration timeout = Duration.ofMinutes(1);
+ // TODO: createServiceTimeSeries needs special handling if the request failed. Leaving
+ // it as not retried for now.
+ settingsBuilder.createServiceTimeSeriesSettings().setSimpleTimeoutNoRetries(timeout);
+ return new BigtableCloudMonitoringExporter(
+ projectId,
+ MetricServiceClient.create(settingsBuilder.build()),
+ MonitoredResource.newBuilder().setType(RESOURCE_TYPE).build(),
+ BigtableExporterUtils.getDefaultTaskValue());
+ }
+
+ @VisibleForTesting
+ BigtableCloudMonitoringExporter(
+ String projectId,
+ MetricServiceClient client,
+ MonitoredResource monitoredResource,
+ String taskId) {
+ this.client = client;
+ this.monitoredResource = monitoredResource;
+ this.taskId = taskId;
+ this.projectId = projectId;
+ }
+
+ @Override
+ public CompletableResultCode export(Collection collection) {
+ if (isShutdown.get()) {
+ logger.log(Level.WARNING, "Exporter is shutting down");
+ return CompletableResultCode.ofFailure();
+ }
+ if (!collection.stream()
+ .flatMap(metricData -> metricData.getData().getPoints().stream())
+ .allMatch(pd -> projectId.equals(BigtableExporterUtils.getProjectId(pd)))) {
+ logger.log(Level.WARNING, "Metric data has different a projectId. Skip exporting.");
+ return CompletableResultCode.ofFailure();
+ }
+
+ List allTimeSeries;
+ try {
+ allTimeSeries =
+ BigtableExporterUtils.convertCollectionToListOfTimeSeries(
+ collection, taskId, monitoredResource);
+ } catch (Throwable e) {
+ logger.log(Level.WARNING, "Failed to convert metric data to cloud monitoring timeseries.", e);
+ return CompletableResultCode.ofFailure();
+ }
+
+ ProjectName projectName = ProjectName.of(projectId);
+ CreateTimeSeriesRequest request =
+ CreateTimeSeriesRequest.newBuilder()
+ .setName(projectName.toString())
+ .addAllTimeSeries(allTimeSeries)
+ .build();
+
+ ApiFuture future = this.client.createServiceTimeSeriesCallable().futureCall(request);
+
+ lastExportCode = new CompletableResultCode();
+
+ ApiFutures.addCallback(
+ future,
+ new ApiFutureCallback() {
+ @Override
+ public void onFailure(Throwable throwable) {
+ logger.log(Level.WARNING, "createServiceTimeSeries request failed. ", throwable);
+ lastExportCode.fail();
+ }
+
+ @Override
+ public void onSuccess(Empty empty) {
+ lastExportCode.succeed();
+ }
+ },
+ MoreExecutors.directExecutor());
+
+ return lastExportCode;
+ }
+
+ @Override
+ public CompletableResultCode flush() {
+ if (lastExportCode != null) {
+ return lastExportCode;
+ }
+ return CompletableResultCode.ofSuccess();
+ }
+
+ @Override
+ public CompletableResultCode shutdown() {
+ if (!isShutdown.compareAndSet(false, true)) {
+ logger.log(Level.WARNING, "shutdown is called multiple times");
+ return CompletableResultCode.ofSuccess();
+ }
+ CompletableResultCode flushResult = flush();
+ CompletableResultCode shutdownResult = new CompletableResultCode();
+ flushResult.whenComplete(
+ () -> {
+ Throwable throwable = null;
+ try {
+ client.shutdown();
+ } catch (Throwable e) {
+ logger.log(Level.WARNING, "failed to shutdown the monitoring client", e);
+ throwable = e;
+ }
+ if (throwable != null) {
+ shutdownResult.fail();
+ } else {
+ shutdownResult.succeed();
+ }
+ });
+ return CompletableResultCode.ofAll(Arrays.asList(flushResult, shutdownResult));
+ }
+
+ /**
+ * For Google Cloud Monitoring always return CUMULATIVE to keep track of the cumulative value of a
+ * metric over time.
+ */
+ @Override
+ public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) {
+ return AggregationTemporality.CUMULATIVE;
+ }
+}
diff --git a/google-cloud-bigtable/src/main/java/com/google/cloud/bigtable/data/v2/stub/metrics/BigtableExporterUtils.java b/google-cloud-bigtable/src/main/java/com/google/cloud/bigtable/data/v2/stub/metrics/BigtableExporterUtils.java
new file mode 100644
index 0000000000..7c3dc09fc4
--- /dev/null
+++ b/google-cloud-bigtable/src/main/java/com/google/cloud/bigtable/data/v2/stub/metrics/BigtableExporterUtils.java
@@ -0,0 +1,249 @@
+/*
+ * Copyright 2023 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.cloud.bigtable.data.v2.stub.metrics;
+
+import static com.google.api.Distribution.BucketOptions;
+import static com.google.api.Distribution.BucketOptions.Explicit;
+import static com.google.api.MetricDescriptor.MetricKind;
+import static com.google.api.MetricDescriptor.MetricKind.CUMULATIVE;
+import static com.google.api.MetricDescriptor.MetricKind.GAUGE;
+import static com.google.api.MetricDescriptor.MetricKind.UNRECOGNIZED;
+import static com.google.api.MetricDescriptor.ValueType;
+import static com.google.api.MetricDescriptor.ValueType.DISTRIBUTION;
+import static com.google.api.MetricDescriptor.ValueType.DOUBLE;
+import static com.google.api.MetricDescriptor.ValueType.INT64;
+import static com.google.cloud.bigtable.data.v2.stub.metrics.BuiltinMetricsAttributes.CLIENT_UID;
+import static com.google.cloud.bigtable.data.v2.stub.metrics.BuiltinMetricsAttributes.CLUSTER_ID;
+import static com.google.cloud.bigtable.data.v2.stub.metrics.BuiltinMetricsAttributes.INSTANCE_ID;
+import static com.google.cloud.bigtable.data.v2.stub.metrics.BuiltinMetricsAttributes.PROJECT_ID;
+import static com.google.cloud.bigtable.data.v2.stub.metrics.BuiltinMetricsAttributes.TABLE_ID;
+import static com.google.cloud.bigtable.data.v2.stub.metrics.BuiltinMetricsAttributes.ZONE_ID;
+
+import com.google.api.Distribution;
+import com.google.api.Metric;
+import com.google.api.MonitoredResource;
+import com.google.common.collect.ImmutableSet;
+import com.google.monitoring.v3.Point;
+import com.google.monitoring.v3.TimeInterval;
+import com.google.monitoring.v3.TimeSeries;
+import com.google.monitoring.v3.TypedValue;
+import com.google.protobuf.util.Timestamps;
+import io.opentelemetry.api.common.AttributeKey;
+import io.opentelemetry.api.common.Attributes;
+import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
+import io.opentelemetry.sdk.metrics.data.DoublePointData;
+import io.opentelemetry.sdk.metrics.data.HistogramData;
+import io.opentelemetry.sdk.metrics.data.HistogramPointData;
+import io.opentelemetry.sdk.metrics.data.LongPointData;
+import io.opentelemetry.sdk.metrics.data.MetricData;
+import io.opentelemetry.sdk.metrics.data.MetricDataType;
+import io.opentelemetry.sdk.metrics.data.PointData;
+import io.opentelemetry.sdk.metrics.data.SumData;
+import java.lang.management.ManagementFactory;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Set;
+import java.util.UUID;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/** Utils to convert OpenTelemetry types to Google Cloud Monitoring types. */
+class BigtableExporterUtils {
+
+ private static final Logger logger = Logger.getLogger(BigtableExporterUtils.class.getName());
+
+ // These metric labels will be promoted to the bigtable_table monitored resource fields
+ private static final Set> PROMOTED_RESOURCE_LABELS =
+ ImmutableSet.of(PROJECT_ID, INSTANCE_ID, TABLE_ID, CLUSTER_ID, ZONE_ID);
+
+ private BigtableExporterUtils() {}
+
+ /**
+ * In most cases this should look like java-${UUID}@${hostname}. The hostname will be retrieved
+ * from the jvm name and fallback to the local hostname.
+ */
+ static String getDefaultTaskValue() {
+ // Something like '@'
+ final String jvmName = ManagementFactory.getRuntimeMXBean().getName();
+ // If jvm doesn't have the expected format, fallback to the local hostname
+ if (jvmName.indexOf('@') < 1) {
+ String hostname = "localhost";
+ try {
+ hostname = InetAddress.getLocalHost().getHostName();
+ } catch (UnknownHostException e) {
+ logger.log(Level.INFO, "Unable to get the hostname.", e);
+ }
+ // Generate a random number and use the same format "random_number@hostname".
+ return "java-" + UUID.randomUUID() + "@" + hostname;
+ }
+ return "java-" + UUID.randomUUID() + jvmName;
+ }
+
+ static String getProjectId(PointData pointData) {
+ return pointData.getAttributes().get(PROJECT_ID);
+ }
+
+ static List convertCollectionToListOfTimeSeries(
+ Collection collection, String taskId, MonitoredResource monitoredResource) {
+ List allTimeSeries = new ArrayList<>();
+
+ for (MetricData metricData : collection) {
+ // TODO: scope will be defined in BuiltinMetricsConstants. Update this field in the following
+ // PR.
+ if (!metricData.getInstrumentationScopeInfo().getName().equals("bigtable.googleapis.com")) {
+ continue;
+ }
+ metricData.getData().getPoints().stream()
+ .map(
+ pointData ->
+ convertPointToTimeSeries(metricData, pointData, taskId, monitoredResource))
+ .forEach(allTimeSeries::add);
+ }
+
+ return allTimeSeries;
+ }
+
+ private static TimeSeries convertPointToTimeSeries(
+ MetricData metricData,
+ PointData pointData,
+ String taskId,
+ MonitoredResource monitoredResource) {
+ Attributes attributes = pointData.getAttributes();
+ MonitoredResource.Builder monitoredResourceBuilder = monitoredResource.toBuilder();
+ Metric.Builder metricBuilder = Metric.newBuilder().setType(metricData.getName());
+
+ for (AttributeKey> key : attributes.asMap().keySet()) {
+ if (PROMOTED_RESOURCE_LABELS.contains(key)) {
+ monitoredResourceBuilder.putLabels(key.getKey(), String.valueOf(attributes.get(key)));
+ } else {
+ metricBuilder.putLabels(key.getKey(), String.valueOf(attributes.get(key)));
+ }
+ }
+ metricBuilder.putLabels(CLIENT_UID.getKey(), taskId);
+
+ TimeSeries.Builder builder =
+ TimeSeries.newBuilder()
+ .setResource(monitoredResourceBuilder.build())
+ .setMetricKind(convertMetricKind(metricData))
+ .setMetric(metricBuilder.build())
+ .setValueType(convertValueType(metricData.getType()));
+
+ TimeInterval timeInterval =
+ TimeInterval.newBuilder()
+ .setStartTime(Timestamps.fromNanos(pointData.getStartEpochNanos()))
+ .setEndTime(Timestamps.fromNanos(pointData.getEpochNanos()))
+ .build();
+
+ builder.addPoints(createPoint(metricData.getType(), pointData, timeInterval));
+
+ return builder.build();
+ }
+
+ private static MetricKind convertMetricKind(MetricData metricData) {
+ switch (metricData.getType()) {
+ case HISTOGRAM:
+ case EXPONENTIAL_HISTOGRAM:
+ return convertHistogramType(metricData.getHistogramData());
+ case LONG_GAUGE:
+ case DOUBLE_GAUGE:
+ return GAUGE;
+ case LONG_SUM:
+ return convertSumDataType(metricData.getLongSumData());
+ case DOUBLE_SUM:
+ return convertSumDataType(metricData.getDoubleSumData());
+ default:
+ return UNRECOGNIZED;
+ }
+ }
+
+ private static MetricKind convertHistogramType(HistogramData histogramData) {
+ if (histogramData.getAggregationTemporality() == AggregationTemporality.CUMULATIVE) {
+ return CUMULATIVE;
+ }
+ return UNRECOGNIZED;
+ }
+
+ private static MetricKind convertSumDataType(SumData> sum) {
+ if (!sum.isMonotonic()) {
+ return GAUGE;
+ }
+ if (sum.getAggregationTemporality() == AggregationTemporality.CUMULATIVE) {
+ return CUMULATIVE;
+ }
+ return UNRECOGNIZED;
+ }
+
+ private static ValueType convertValueType(MetricDataType metricDataType) {
+ switch (metricDataType) {
+ case LONG_GAUGE:
+ case LONG_SUM:
+ return INT64;
+ case DOUBLE_GAUGE:
+ case DOUBLE_SUM:
+ return DOUBLE;
+ case HISTOGRAM:
+ case EXPONENTIAL_HISTOGRAM:
+ return DISTRIBUTION;
+ default:
+ return ValueType.UNRECOGNIZED;
+ }
+ }
+
+ private static Point createPoint(
+ MetricDataType type, PointData pointData, TimeInterval timeInterval) {
+ Point.Builder builder = Point.newBuilder().setInterval(timeInterval);
+ switch (type) {
+ case HISTOGRAM:
+ case EXPONENTIAL_HISTOGRAM:
+ return builder
+ .setValue(
+ TypedValue.newBuilder()
+ .setDistributionValue(convertHistogramData((HistogramPointData) pointData))
+ .build())
+ .build();
+ case DOUBLE_GAUGE:
+ case DOUBLE_SUM:
+ return builder
+ .setValue(
+ TypedValue.newBuilder()
+ .setDoubleValue(((DoublePointData) pointData).getValue())
+ .build())
+ .build();
+ case LONG_GAUGE:
+ case LONG_SUM:
+ return builder
+ .setValue(TypedValue.newBuilder().setInt64Value(((LongPointData) pointData).getValue()))
+ .build();
+ default:
+ logger.log(Level.WARNING, "unsupported metric type");
+ return builder.build();
+ }
+ }
+
+ private static Distribution convertHistogramData(HistogramPointData pointData) {
+ return Distribution.newBuilder()
+ .setCount(pointData.getCount())
+ .setMean(pointData.getCount() == 0L ? 0.0D : pointData.getSum() / pointData.getCount())
+ .setBucketOptions(
+ BucketOptions.newBuilder()
+ .setExplicitBuckets(Explicit.newBuilder().addAllBounds(pointData.getBoundaries())))
+ .addAllBucketCounts(pointData.getCounts())
+ .build();
+ }
+}
diff --git a/google-cloud-bigtable/src/main/java/com/google/cloud/bigtable/data/v2/stub/metrics/BuiltinMetricsAttributes.java b/google-cloud-bigtable/src/main/java/com/google/cloud/bigtable/data/v2/stub/metrics/BuiltinMetricsAttributes.java
new file mode 100644
index 0000000000..e34659444b
--- /dev/null
+++ b/google-cloud-bigtable/src/main/java/com/google/cloud/bigtable/data/v2/stub/metrics/BuiltinMetricsAttributes.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2023 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.cloud.bigtable.data.v2.stub.metrics;
+
+import io.opentelemetry.api.common.AttributeKey;
+
+class BuiltinMetricsAttributes {
+
+ static final AttributeKey PROJECT_ID = AttributeKey.stringKey("project_id");
+ static final AttributeKey INSTANCE_ID = AttributeKey.stringKey("instance");
+ static final AttributeKey TABLE_ID = AttributeKey.stringKey("table");
+ static final AttributeKey CLUSTER_ID = AttributeKey.stringKey("cluster");
+ static final AttributeKey ZONE_ID = AttributeKey.stringKey("zone");
+
+ static final AttributeKey APP_PROFILE = AttributeKey.stringKey("app_profile");
+ static final AttributeKey STREAMING = AttributeKey.booleanKey("streaming");
+ static final AttributeKey METHOD = AttributeKey.stringKey("method");
+ static final AttributeKey STATUS = AttributeKey.stringKey("status");
+ static final AttributeKey CLIENT_NAME = AttributeKey.stringKey("client_name");
+ static final AttributeKey CLIENT_UID = AttributeKey.stringKey("client_uid");
+}
diff --git a/google-cloud-bigtable/src/test/java/com/google/cloud/bigtable/data/v2/stub/metrics/BigtableCloudMonitoringExporterTest.java b/google-cloud-bigtable/src/test/java/com/google/cloud/bigtable/data/v2/stub/metrics/BigtableCloudMonitoringExporterTest.java
new file mode 100644
index 0000000000..0f3bed1d90
--- /dev/null
+++ b/google-cloud-bigtable/src/test/java/com/google/cloud/bigtable/data/v2/stub/metrics/BigtableCloudMonitoringExporterTest.java
@@ -0,0 +1,233 @@
+/*
+ * Copyright 2023 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.cloud.bigtable.data.v2.stub.metrics;
+
+import static com.google.cloud.bigtable.data.v2.stub.metrics.BuiltinMetricsAttributes.APP_PROFILE;
+import static com.google.cloud.bigtable.data.v2.stub.metrics.BuiltinMetricsAttributes.CLIENT_UID;
+import static com.google.cloud.bigtable.data.v2.stub.metrics.BuiltinMetricsAttributes.CLUSTER_ID;
+import static com.google.cloud.bigtable.data.v2.stub.metrics.BuiltinMetricsAttributes.INSTANCE_ID;
+import static com.google.cloud.bigtable.data.v2.stub.metrics.BuiltinMetricsAttributes.PROJECT_ID;
+import static com.google.cloud.bigtable.data.v2.stub.metrics.BuiltinMetricsAttributes.TABLE_ID;
+import static com.google.cloud.bigtable.data.v2.stub.metrics.BuiltinMetricsAttributes.ZONE_ID;
+import static com.google.common.truth.Truth.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import com.google.api.Distribution;
+import com.google.api.MonitoredResource;
+import com.google.api.core.ApiFuture;
+import com.google.api.core.ApiFutures;
+import com.google.api.gax.rpc.UnaryCallable;
+import com.google.cloud.monitoring.v3.MetricServiceClient;
+import com.google.cloud.monitoring.v3.stub.MetricServiceStub;
+import com.google.common.collect.ImmutableList;
+import com.google.monitoring.v3.CreateTimeSeriesRequest;
+import com.google.monitoring.v3.TimeSeries;
+import com.google.protobuf.Empty;
+import io.opentelemetry.api.common.Attributes;
+import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
+import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
+import io.opentelemetry.sdk.metrics.data.HistogramPointData;
+import io.opentelemetry.sdk.metrics.data.LongPointData;
+import io.opentelemetry.sdk.metrics.data.MetricData;
+import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData;
+import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData;
+import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData;
+import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
+import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData;
+import io.opentelemetry.sdk.resources.Resource;
+import java.util.Arrays;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnit;
+import org.mockito.junit.MockitoRule;
+
+public class BigtableCloudMonitoringExporterTest {
+ private static final String projectId = "fake-project";
+ private static final String instanceId = "fake-instance";
+ private static final String appProfileId = "default";
+ private static final String tableId = "fake-table";
+ private static final String zone = "us-east-1";
+ private static final String cluster = "cluster-1";
+
+ private static final String taskId = "fake-task-id";
+
+ @Rule public final MockitoRule mockitoRule = MockitoJUnit.rule();
+
+ @Mock private MetricServiceStub mockMetricServiceStub;
+ private MetricServiceClient fakeMetricServiceClient;
+ private BigtableCloudMonitoringExporter exporter;
+
+ private Attributes attributes;
+ private Resource resource;
+ private InstrumentationScopeInfo scope;
+
+ @Before
+ public void setUp() {
+
+ fakeMetricServiceClient = new FakeMetricServiceClient(mockMetricServiceStub);
+
+ exporter =
+ new BigtableCloudMonitoringExporter(
+ projectId,
+ fakeMetricServiceClient,
+ MonitoredResource.newBuilder().setType("bigtable-table").build(),
+ taskId);
+
+ attributes =
+ Attributes.builder()
+ .put(PROJECT_ID, projectId)
+ .put(INSTANCE_ID, instanceId)
+ .put(TABLE_ID, tableId)
+ .put(CLUSTER_ID, cluster)
+ .put(ZONE_ID, zone)
+ .put(APP_PROFILE, appProfileId)
+ .build();
+
+ resource = Resource.create(Attributes.empty());
+
+ scope = InstrumentationScopeInfo.create("bigtable.googleapis.com");
+ }
+
+ @After
+ public void tearDown() {}
+
+ @Test
+ public void testExportingSumData() {
+ ArgumentCaptor argumentCaptor =
+ ArgumentCaptor.forClass(CreateTimeSeriesRequest.class);
+
+ UnaryCallable mockCallable = mock(UnaryCallable.class);
+ when(mockMetricServiceStub.createServiceTimeSeriesCallable()).thenReturn(mockCallable);
+ ApiFuture future = ApiFutures.immediateFuture(Empty.getDefaultInstance());
+ when(mockCallable.futureCall(argumentCaptor.capture())).thenReturn(future);
+
+ long fakeValue = 11L;
+
+ long startEpoch = 10;
+ long endEpoch = 15;
+ LongPointData longPointData =
+ ImmutableLongPointData.create(startEpoch, endEpoch, attributes, fakeValue);
+
+ MetricData longData =
+ ImmutableMetricData.createLongSum(
+ resource,
+ scope,
+ "bigtable/test/long",
+ "description",
+ "1",
+ ImmutableSumData.create(
+ true, AggregationTemporality.CUMULATIVE, ImmutableList.of(longPointData)));
+
+ exporter.export(Arrays.asList(longData));
+
+ CreateTimeSeriesRequest request = argumentCaptor.getValue();
+
+ assertThat(request.getTimeSeriesList()).hasSize(1);
+
+ TimeSeries timeSeries = request.getTimeSeriesList().get(0);
+
+ assertThat(timeSeries.getResource().getLabelsMap())
+ .containsExactly(
+ PROJECT_ID.getKey(), projectId,
+ INSTANCE_ID.getKey(), instanceId,
+ TABLE_ID.getKey(), tableId,
+ CLUSTER_ID.getKey(), cluster,
+ ZONE_ID.getKey(), zone);
+
+ assertThat(timeSeries.getMetric().getLabelsMap()).hasSize(2);
+ assertThat(timeSeries.getMetric().getLabelsMap())
+ .containsAtLeast(APP_PROFILE.getKey(), appProfileId);
+ assertThat(timeSeries.getMetric().getLabelsMap()).containsAtLeast(CLIENT_UID.getKey(), taskId);
+ assertThat(timeSeries.getPoints(0).getValue().getInt64Value()).isEqualTo(fakeValue);
+ assertThat(timeSeries.getPoints(0).getInterval().getStartTime().getNanos())
+ .isEqualTo(startEpoch);
+ assertThat(timeSeries.getPoints(0).getInterval().getEndTime().getNanos()).isEqualTo(endEpoch);
+ }
+
+ @Test
+ public void testExportingHistogramData() {
+ ArgumentCaptor argumentCaptor =
+ ArgumentCaptor.forClass(CreateTimeSeriesRequest.class);
+
+ UnaryCallable mockCallable = mock(UnaryCallable.class);
+ when(mockMetricServiceStub.createServiceTimeSeriesCallable()).thenReturn(mockCallable);
+ ApiFuture future = ApiFutures.immediateFuture(Empty.getDefaultInstance());
+ when(mockCallable.futureCall(argumentCaptor.capture())).thenReturn(future);
+
+ long startEpoch = 10;
+ long endEpoch = 15;
+ HistogramPointData histogramPointData =
+ ImmutableHistogramPointData.create(
+ startEpoch,
+ endEpoch,
+ attributes,
+ 3d,
+ true,
+ 1d, // min
+ true,
+ 2d, // max
+ Arrays.asList(1.0),
+ Arrays.asList(1L, 2L));
+
+ MetricData histogramData =
+ ImmutableMetricData.createDoubleHistogram(
+ resource,
+ scope,
+ "bigtable/test/histogram",
+ "description",
+ "ms",
+ ImmutableHistogramData.create(
+ AggregationTemporality.CUMULATIVE, ImmutableList.of(histogramPointData)));
+
+ exporter.export(Arrays.asList(histogramData));
+
+ CreateTimeSeriesRequest request = argumentCaptor.getValue();
+
+ assertThat(request.getTimeSeriesList()).hasSize(1);
+
+ TimeSeries timeSeries = request.getTimeSeriesList().get(0);
+
+ assertThat(timeSeries.getResource().getLabelsMap())
+ .containsExactly(
+ PROJECT_ID.getKey(), projectId,
+ INSTANCE_ID.getKey(), instanceId,
+ TABLE_ID.getKey(), tableId,
+ CLUSTER_ID.getKey(), cluster,
+ ZONE_ID.getKey(), zone);
+
+ assertThat(timeSeries.getMetric().getLabelsMap()).hasSize(2);
+ assertThat(timeSeries.getMetric().getLabelsMap())
+ .containsAtLeast(APP_PROFILE.getKey(), appProfileId);
+ assertThat(timeSeries.getMetric().getLabelsMap()).containsAtLeast(CLIENT_UID.getKey(), taskId);
+ Distribution distribution = timeSeries.getPoints(0).getValue().getDistributionValue();
+ assertThat(distribution.getCount()).isEqualTo(3);
+ assertThat(timeSeries.getPoints(0).getInterval().getStartTime().getNanos())
+ .isEqualTo(startEpoch);
+ assertThat(timeSeries.getPoints(0).getInterval().getEndTime().getNanos()).isEqualTo(endEpoch);
+ }
+
+ private static class FakeMetricServiceClient extends MetricServiceClient {
+
+ protected FakeMetricServiceClient(MetricServiceStub stub) {
+ super(stub);
+ }
+ }
+}